Repository: incubator-hawq
Updated Branches:
  refs/heads/HAWQ-703 9d791730a -> 7b8372399 (forced update)


HAWQ-703. Serialize HCatalog Complex Types to plain text (as Hive profile).


Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/7b837239
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/7b837239
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/7b837239

Branch: refs/heads/HAWQ-703
Commit: 7b83723990939dc4dd5cf6ce325e21eed291d523
Parents: 649828f
Author: Oleksandr Diachenko <odiache...@pivotal.io>
Authored: Fri Apr 22 16:34:42 2016 -0700
Committer: Oleksandr Diachenko <odiache...@pivotal.io>
Committed: Thu Apr 28 13:57:30 2016 -0700

----------------------------------------------------------------------
 .../java/org/apache/hawq/pxf/api/Metadata.java  |  33 ++++--
 .../hawq/pxf/api/utilities/EnumHawqType.java    |  77 +++++++++++++
 .../hive/utilities/EnumHiveToHawqTypes.java     |  70 ++++++++++++
 .../plugins/hive/utilities/HiveUtilities.java   | 109 ++++++-------------
 .../plugins/hive/HiveMetadataFetcherTest.java   |  12 +-
 .../hive/utilities/HiveUtilitiesTest.java       |  18 +--
 .../hawq/pxf/service/MetadataResponse.java      |   1 +
 .../pxf/service/MetadataResponseFormatter.java  |   3 +-
 .../service/MetadataResponseFormatterTest.java  |  57 ++++++----
 src/backend/catalog/external/externalmd.c       |   4 +
 src/backend/utils/adt/pxf_functions.c           |   9 +-
 src/bin/psql/describe.c                         |  32 +++++-
 src/include/catalog/external/itemmd.h           |   3 +
 src/include/catalog/pg_proc.h                   |   4 +-
 src/include/catalog/pg_proc.sql                 |   2 +-
 15 files changed, 305 insertions(+), 129 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java 
b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index 4fc510d..a375ad8 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@ -23,6 +23,7 @@ package org.apache.hawq.pxf.api;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 import org.apache.commons.lang.StringUtils;
 
 /**
@@ -67,25 +68,39 @@ public class Metadata {
     }
 
     /**
-     * Class representing item field - name and type.
+     * Class representing item field - name, type, source type, modifiers.
+     * Type - exposed type of field
+     * Source type - type of field in underlying source
+     * Modifiers - additional attributes which describe type or field
      */
     public static class Field {
         private String name;
-        private String type; // TODO: change to enum
+        private EnumHawqType type; // field type which PXF exposes
+        private String sourceType; // filed type PXF reads from
         private String[] modifiers; // type modifiers, optional field
 
-        public Field(String name, String type) {
+        public Field(String name, EnumHawqType type) {
 
-            if (StringUtils.isBlank(name) || StringUtils.isBlank(type)) {
-                throw new IllegalArgumentException("Field name and type cannot 
be empty");
+            if (StringUtils.isBlank(name) || 
StringUtils.isBlank(type.getTypeName())) {
+                throw new IllegalArgumentException("Field name and type cannot 
be empty.");
             }
 
             this.name = name;
             this.type = type;
         }
 
-        public Field(String name, String type, String[] modifiers) {
+        public Field(String name, EnumHawqType type, String sourceType) {
             this(name, type);
+            this.sourceType = sourceType;
+        }
+
+        public Field(String name, EnumHawqType type, String[] modifiers) {
+            this(name, type);
+            this.modifiers = modifiers;
+        }
+
+        public Field(String name, EnumHawqType type, String sourceType, 
String[] modifiers) {
+            this(name, type, sourceType);
             this.modifiers = modifiers;
         }
 
@@ -93,10 +108,14 @@ public class Metadata {
             return name;
         }
 
-        public String getType() {
+        public EnumHawqType getType() {
             return type;
         }
 
+        public String getSourceType() {
+            return sourceType;
+        }
+
         public String[] getModifiers() {
             return modifiers;
         }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java 
b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
new file mode 100644
index 0000000..61189d9
--- /dev/null
+++ 
b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/utilities/EnumHawqType.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.api.utilities;
+
+import java.io.IOException;
+import org.codehaus.jackson.JsonGenerator;
+import org.codehaus.jackson.map.JsonSerializer;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+import org.codehaus.jackson.map.SerializerProvider;
+import org.codehaus.jackson.JsonProcessingException;
+
+class EnumHawqTypeSerializer extends JsonSerializer<EnumHawqType> {
+
+    @Override
+    public void serialize(EnumHawqType value, JsonGenerator generator,
+              SerializerProvider provider) throws IOException,
+              JsonProcessingException {
+      generator.writeString(value.getTypeName());
+    }
+  }
+
+@JsonSerialize(using = EnumHawqTypeSerializer.class)
+public enum EnumHawqType {
+    Int2Type("int2"),
+    Int4Type("int4"),
+    Int8Type("int8"),
+    Float4Type("float4"),
+    Float8Type("float8"),
+    TextType("text"),
+    VarcharType("varchar", (byte) 1),
+    ByteaType("bytea"),
+    DateType("date"),
+    TimestampType("timestamp"),
+    BoolType("bool"),
+    NumericType("numeric", (byte) 2),
+    BpcharType("bpchar", (byte) 1);
+
+    private String typeName;
+    private byte modifiersNum;
+
+    EnumHawqType(String typeName) {
+        this.typeName = typeName;
+    }
+
+    EnumHawqType(String typeName, byte modifiersNum) {
+        this(typeName);
+        this.modifiersNum = modifiersNum;
+    }
+
+    public String getTypeName() {
+        return this.typeName;
+    }
+
+    public byte getModifiersNum() {
+        return this.modifiersNum;
+    }
+}
+
+
+

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqTypes.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqTypes.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqTypes.java
new file mode 100644
index 0000000..9e0e236
--- /dev/null
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqTypes.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hawq.pxf.plugins.hive.utilities;
+
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.api.UnsupportedTypeException;
+
+public enum EnumHiveToHawqTypes {
+
+    TinyintType("tinyint", EnumHawqType.Int2Type),
+    SmallintType("smallint", EnumHawqType.Int2Type),
+    IntType("int", EnumHawqType.Int4Type),
+    BigintType("bigint", EnumHawqType.Int8Type),
+    BooleanType("boolean", EnumHawqType.BoolType),
+    FloatType("float", EnumHawqType.Float4Type),
+    DoubleType("double", EnumHawqType.Float8Type),
+    StringType("string", EnumHawqType.TextType),
+    BinaryType("binary", EnumHawqType.ByteaType),
+    TimestampType("timestamp", EnumHawqType.TimestampType),
+    DateType("date", EnumHawqType.DateType),
+    DecimalType("decimal", EnumHawqType.NumericType),
+    VarcharType("varchar", EnumHawqType.VarcharType),
+    CharType("char", EnumHawqType.BpcharType);
+
+    private String typeName;
+    private EnumHawqType hawqType;
+
+    EnumHiveToHawqTypes(String typeName, EnumHawqType hawqType) {
+        this.typeName = typeName;
+        this.hawqType = hawqType;
+    }
+
+    public String getTypeName() {
+        return this.typeName;
+    }
+
+    public EnumHawqType getHawqType() {
+        return this.hawqType;
+    }
+
+    public static EnumHawqType getHawqType(String hiveTypeName) {
+        for (EnumHiveToHawqTypes t : values()) {
+            if (t.getTypeName().equals(hiveTypeName)) {
+                return t.getHawqType();
+            } else if ((t.getHawqType().getModifiersNum() > 0) && 
t.getTypeName().startsWith(hiveTypeName)) {
+                return t.getHawqType();
+            }
+        }
+        throw new UnsupportedTypeException("Unable to map Hive's type: " + 
hiveTypeName
+                + " to HAWQ's type");
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index 7dfe410..cc3b6c6 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -20,9 +20,12 @@ package org.apache.hawq.pxf.plugins.hive.utilities;
  */
 
 
+import java.util.Arrays;
 import java.util.List;
 import java.util.ArrayList;
 
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqTypes;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -32,7 +35,6 @@ import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
-
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
 
@@ -88,20 +90,20 @@ public class HiveUtilities {
      * Unsupported types will result in an exception.
      * <br>
      * The supported mappings are:<ul>
-     * <li>{@code tinyint -> int2}</li>
-     * <li>{@code smallint -> int2}</li>
-     * <li>{@code int -> int4}</li>
-     * <li>{@code bigint -> int8}</li>
-     * <li>{@code boolean -> bool}</li>
-     * <li>{@code float -> float4}</li>
-     * <li>{@code double -> float8}</li>
-     * <li>{@code string -> text}</li>
-     * <li>{@code binary -> bytea}</li>
-     * <li>{@code timestamp -> timestamp}</li>
-     * <li>{@code date -> date}</li>
-     * <li>{@code decimal(precision, scale) -> numeric(precision, scale)}</li>
-     * <li>{@code varchar(size) -> varchar(size)}</li>
-     * <li>{@code char(size) -> bpchar(size)}</li>
+         * <li>{@code tinyint -> int2}</li>
+         * <li>{@code smallint -> int2}</li>
+         * <li>{@code int -> int4}</li>
+         * <li>{@code bigint -> int8}</li>
+         * <li>{@code boolean -> bool}</li>
+         * <li>{@code float -> float4}</li>
+         * <li>{@code double -> float8}</li>
+         * <li>{@code string -> text}</li>
+         * <li>{@code binary -> bytea}</li>
+         * <li>{@code timestamp -> timestamp}</li>
+         * <li>{@code date -> date}</li>
+         * <li>{@code decimal(precision, scale) -> numeric(precision, 
scale)}</li>
+         * <li>{@code varchar(size) -> varchar(size)}</li>
+         * <li>{@code char(size) -> bpchar(size)}</li>
      * </ul>
      *
      * @param hiveColumn hive column schema
@@ -110,72 +112,25 @@ public class HiveUtilities {
      */
     public static Metadata.Field mapHiveType(FieldSchema hiveColumn) throws 
UnsupportedTypeException {
         String fieldName = hiveColumn.getName();
-        String hiveType = hiveColumn.getType();
-        String mappedType;
-        String[] modifiers = null;
-
-        // check parameterized types:
-        if (hiveType.startsWith("varchar(") ||
-                hiveType.startsWith("char(")) {
-            String[] toks = hiveType.split("[(,)]");
-            if (toks.length != 2) {
-                throw new UnsupportedTypeException( "HAWQ does not support 
type " + hiveType + " (Field " + fieldName + "), " +
-                        "expected type of the form <type name>(<parameter>)");
-            }
-            mappedType = toks[0];
-            if (mappedType.equals("char")) {
-                mappedType = "bpchar";
-            }
-            modifiers = new String[] {toks[1]};
-        } else if (hiveType.startsWith("decimal(")) {
-            String[] toks = hiveType.split("[(,)]");
-            if (toks.length != 3) {
-                throw new UnsupportedTypeException( "HAWQ does not support 
type " + hiveType + " (Field " + fieldName + "), " +
-                        "expected type of the form <type 
name>(<parameter>,<parameter>)");
-            }
-            mappedType = "numeric";
-            modifiers = new String[] {toks[1], toks[2]};
-        } else {
-
-            switch (hiveType) {
-            case "tinyint":
-            case "smallint":
-               mappedType = "int2";
-               break;
-            case "int":
-               mappedType = "int4";
-               break;
-            case "bigint":
-               mappedType = "int8";
-               break;
-            case "boolean":
-               mappedType = "bool";
-               break;
-            case "timestamp":
-            case "date":
-                mappedType = hiveType;
-                break;
-            case "float":
-                mappedType = "float4";
-                break;
-            case "double":
-                mappedType = "float8";
-                break;
-            case "string":
-                mappedType = "text";
-                break;
-            case "binary":
-                mappedType = "bytea";
-                break;
-            default:
-                throw new UnsupportedTypeException(
-                        "HAWQ does not support type " + hiveType + " (Field " 
+ fieldName + ")");
-            }
+        String hiveType = hiveColumn.getType(); // Type name and modifiers if 
any
+        String[] tokens = hiveType.split("[(,)]");
+        String hiveTypeName = tokens[0]; // Type name
+        String[] modifiers = Arrays.copyOfRange(tokens, 1, tokens.length); // 
Modifiers
+        EnumHawqType hawqType = EnumHiveToHawqTypes.getHawqType(hiveTypeName);
+
+        if (hawqType.getModifiersNum() > 0 && modifiers.length != 
hawqType.getModifiersNum()) {
+            throw new UnsupportedTypeException(
+                    "HAWQ does not support type " + hiveType + " (Field "
+                            + fieldName + "), "
+                            + "expected number of modifiers: "
+                            + hawqType.getModifiersNum()
+                            + ", actual number of modifiers: " + 
modifiers.length);
         }
+
         if (!verifyModifers(modifiers)) {
             throw new UnsupportedTypeException("HAWQ does not support type " + 
hiveType + " (Field " + fieldName + "), modifiers should be integers");
         }
-        return new Metadata.Field(fieldName, mappedType, modifiers);
+        return new Metadata.Field(fieldName, hawqType, hiveType, modifiers);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
 
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
index 1323eea..d9d97fc 100644
--- 
a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
+++ 
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcherTest.java
@@ -149,10 +149,10 @@ public class HiveMetadataFetcherTest {
         assertEquals(2, resultFields.size());
         Metadata.Field field = resultFields.get(0);
         assertEquals("field1", field.getName());
-        assertEquals("text", field.getType()); // converted type
+        assertEquals("text", field.getType().getTypeName()); // converted type
         field = resultFields.get(1);
         assertEquals("field2", field.getName());
-        assertEquals("int4", field.getType());
+        assertEquals("int4", field.getType().getTypeName());
     }
 
     @Test
@@ -204,10 +204,10 @@ public class HiveMetadataFetcherTest {
             assertEquals(2, resultFields.size());
             Metadata.Field field = resultFields.get(0);
             assertEquals("field1", field.getName());
-            assertEquals("text", field.getType()); // converted type
+            assertEquals("text", field.getType().getTypeName()); // converted 
type
             field = resultFields.get(1);
             assertEquals("field2", field.getName());
-            assertEquals("int4", field.getType());
+            assertEquals("int4", field.getType().getTypeName());
         }
     }
 
@@ -258,10 +258,10 @@ public class HiveMetadataFetcherTest {
         assertEquals(2, resultFields.size());
         Metadata.Field field = resultFields.get(0);
         assertEquals("field1", field.getName());
-        assertEquals("text", field.getType()); // converted type
+        assertEquals("text", field.getType().getTypeName()); // converted type
         field = resultFields.get(1);
         assertEquals("field2", field.getName());
-        assertEquals("int4", field.getType());
+        assertEquals("int4", field.getType().getTypeName());
     }
 
     private void prepareConstruction() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
 
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
index 466dedb..d13880e 100644
--- 
a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
+++ 
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilitiesTest.java
@@ -24,9 +24,9 @@ import static org.junit.Assert.*;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.junit.Test;
-
 import org.apache.hawq.pxf.api.Metadata;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
+import org.apache.hawq.pxf.plugins.hive.utilities.EnumHiveToHawqTypes;
 
 public class HiveUtilitiesTest {
 
@@ -63,8 +63,7 @@ public class HiveUtilitiesTest {
             HiveUtilities.mapHiveType(hiveColumn);
             fail("unsupported type");
         } catch (UnsupportedTypeException e) {
-            assertEquals("HAWQ does not support type " + hiveColumn.getType() 
+ " (Field " + hiveColumn.getName() + ")",
-                    e.getMessage());
+            assertEquals("Unable to map Hive's type: " + hiveColumn.getType() 
+ " to HAWQ's type", e.getMessage());
         }
     }
 
@@ -85,12 +84,13 @@ public class HiveUtilitiesTest {
          */
         for (String[] line: typesMappings) {
             String hiveType = line[0];
-            String expectedType = line[1];
+            String hawqTypeName = line[1];
             hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
             Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
             assertEquals("field" + hiveType, result.getName());
-            assertEquals(expectedType, result.getType());
-            assertNull(result.getModifiers());
+            assertEquals(hawqTypeName, result.getType().getTypeName());
+            assertEquals(result.getModifiers().length, 0);
+            //assertNull(result.getModifiers());
         }
     }
 
@@ -109,7 +109,7 @@ public class HiveUtilitiesTest {
             hiveColumn = new FieldSchema("field" + hiveType, hiveType, null);
             Metadata.Field result = HiveUtilities.mapHiveType(hiveColumn);
             assertEquals("field" + hiveType, result.getName());
-            assertEquals(expectedType, result.getType());
+            assertEquals(expectedType, result.getType().getTypeName());
             assertArrayEquals(expectedModifiers, result.getModifiers());
         }
     }
@@ -124,7 +124,7 @@ public class HiveUtilitiesTest {
             fail("should fail with bad numeric type error");
         } catch (UnsupportedTypeException e) {
             String errorMsg = "HAWQ does not support type " + badHiveType + " 
(Field badNumeric), " +
-                "expected type of the form <type 
name>(<parameter>,<parameter>)";
+                "expected number of modifiers: 2, actual number of modifiers: 
1";
             assertEquals(errorMsg, e.getMessage());
         }
 
@@ -135,7 +135,7 @@ public class HiveUtilitiesTest {
             fail("should fail with bad char type error");
         } catch (UnsupportedTypeException e) {
             String errorMsg = "HAWQ does not support type " + badHiveType + " 
(Field badChar), " +
-                "expected type of the form <type name>(<parameter>)";
+                    "expected number of modifiers: 1, actual number of 
modifiers: 3";
             assertEquals(errorMsg, e.getMessage());
         }
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
index ff73499..396b653 100644
--- 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
+++ 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponse.java
@@ -65,6 +65,7 @@ public class MetadataResponse implements StreamingOutput {
             WebApplicationException {
         DataOutputStream dos = new DataOutputStream(output);
         ObjectMapper mapper = new ObjectMapper();
+        
mapper.configure(org.codehaus.jackson.map.SerializationConfig.Feature.USE_ANNOTATIONS,
 true);
         mapper.setSerializationInclusion(Inclusion.NON_EMPTY); // ignore empty 
fields
 
         if(metadataList == null || metadataList.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index 92d11de..025797b 100644
--- 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -84,7 +84,8 @@ public class MetadataResponseFormatter {
                 for (Metadata.Field field : metadata.getFields()) {
                     result.append("Field #").append(++i).append(": [")
                             .append("Name: ").append(field.getName())
-                            .append(", Type: 
").append(field.getType()).append("] ");
+                            .append(", Type: 
").append(field.getType().getTypeName())
+                            .append(", Source type: 
").append(field.getSourceType()).append("] ");
                 }
             }
             LOG.debug(result);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
 
b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index 0182835..16c457b 100644
--- 
a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ 
b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@ -26,7 +26,7 @@ import java.util.List;
 
 import static org.junit.Assert.*;
 import org.apache.hawq.pxf.api.Metadata;
-
+import org.apache.hawq.pxf.api.utilities.EnumHawqType;
 import org.junit.Test;
 
 public class MetadataResponseFormatterTest {
@@ -49,14 +49,14 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int"));
-        fields.add(new Metadata.Field("field2", "text"));
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type));
+        fields.add(new Metadata.Field("field2", EnumHawqType.TextType));
         metadataList.add(metadata);
 
         response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
-                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
+                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
@@ -67,14 +67,14 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int", null));
-        fields.add(new Metadata.Field("field2", "text", new String[] {}));
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type));
+        fields.add(new Metadata.Field("field2", EnumHawqType.TextType, new 
String[] {}));
         metadataList.add(metadata);
 
         response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
-                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
+                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\"},{\"name\":\"field2\",\"type\":\"text\"}]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
@@ -85,10 +85,10 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int"));
-        fields.add(new Metadata.Field("field2", "numeric",
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type));
+        fields.add(new Metadata.Field("field2", EnumHawqType.NumericType,
                 new String[] {"1349", "1789"}));
-        fields.add(new Metadata.Field("field3", "char",
+        fields.add(new Metadata.Field("field3", EnumHawqType.BpcharType,
                 new String[] {"50"}));
         metadataList.add(metadata);
 
@@ -96,9 +96,28 @@ public class MetadataResponseFormatterTest {
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
                 .append("\"fields\":[")
-                .append("{\"name\":\"field1\",\"type\":\"int\"},")
+                .append("{\"name\":\"field1\",\"type\":\"int8\"},")
                 
.append("{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},")
-                
.append("{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}")
+                
.append("{\"name\":\"field3\",\"type\":\"bpchar\",\"modifiers\":[\"50\"]}")
+                .append("]}]}");
+
+        assertEquals(expected.toString(), convertResponseToString(response));
+    }
+
+    @Test
+    public void formatResponseStringWithSourceType() throws Exception {
+        List<Metadata> metadataList = new ArrayList<Metadata>();
+        List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
+        Metadata.Item itemName = new Metadata.Item("default", "table1");
+        Metadata metadata = new Metadata(itemName, fields);
+        fields.add(new Metadata.Field("field1", EnumHawqType.Float8Type, 
"double"));
+        metadataList.add(metadata);
+
+        response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
+        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
+        expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
+                .append("\"fields\":[")
+                
.append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\"}")
                 .append("]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
@@ -146,7 +165,7 @@ public class MetadataResponseFormatterTest {
         List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
         Metadata.Item itemName = new Metadata.Item("default", "table1");
         Metadata metadata = new Metadata(itemName, fields);
-        fields.add(new Metadata.Field("field1", "int"));
+        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type));
         metadataList.add(null);
         metadataList.add(metadata);
         try {
@@ -165,8 +184,8 @@ public class MetadataResponseFormatterTest {
             List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
             Metadata.Item itemName = new Metadata.Item("default", "table"+i);
             Metadata metadata = new Metadata(itemName, fields);
-            fields.add(new Metadata.Field("field1", "int"));
-            fields.add(new Metadata.Field("field2", "text"));
+            fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type));
+            fields.add(new Metadata.Field("field2", EnumHawqType.TextType));
             metdataList.add(metadata);
         }
         response = MetadataResponseFormatter.formatResponse(metdataList, 
"path.file");
@@ -179,7 +198,7 @@ public class MetadataResponseFormatterTest {
                 expected.append(",");
             }
             
expected.append("{\"item\":{\"path\":\"default\",\"name\":\"table").append(i).append("\"},");
-            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
         }
         expected.append("]}");
 
@@ -193,8 +212,8 @@ public class MetadataResponseFormatterTest {
             List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
             Metadata.Item itemName = new Metadata.Item("default"+i, "table"+i);
             Metadata metadata = new Metadata(itemName, fields);
-            fields.add(new Metadata.Field("field1", "int"));
-            fields.add(new Metadata.Field("field2", "text"));
+            fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type));
+            fields.add(new Metadata.Field("field2", EnumHawqType.TextType));
             metdataList.add(metadata);
         }
         response = MetadataResponseFormatter.formatResponse(metdataList, 
"path.file");
@@ -206,7 +225,7 @@ public class MetadataResponseFormatterTest {
                 expected.append(",");
             }
             
expected.append("{\"item\":{\"path\":\"default").append(i).append("\",\"name\":\"table").append(i).append("\"},");
-            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
+            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\"},{\"name\":\"field2\",\"type\":\"text\"}]}");
         }
         expected.append("]}");
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/src/backend/catalog/external/externalmd.c
----------------------------------------------------------------------
diff --git a/src/backend/catalog/external/externalmd.c 
b/src/backend/catalog/external/externalmd.c
index e65d741..926605f 100644
--- a/src/backend/catalog/external/externalmd.c
+++ b/src/backend/catalog/external/externalmd.c
@@ -125,6 +125,10 @@ static PxfItem *ParsePxfItem(struct json_object *pxfMD, 
char* profile)
 
                struct json_object *fieldType = json_object_object_get(jsonCol, 
"type");
                pxfField->type = pstrdup(json_object_get_string(fieldType));
+
+               struct json_object *sourceFieldType = 
json_object_object_get(jsonCol, "sourceType");
+               pxfField->sourceType = 
pstrdup(json_object_get_string(sourceFieldType));
+
                pxfField->nTypeModifiers = 0;
                
                elog(DEBUG1, "Parsing field %s, type %s", pxfField->name, 
pxfField->type);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/src/backend/utils/adt/pxf_functions.c
----------------------------------------------------------------------
diff --git a/src/backend/utils/adt/pxf_functions.c 
b/src/backend/utils/adt/pxf_functions.c
index ee19a8b..806565a 100644
--- a/src/backend/utils/adt/pxf_functions.c
+++ b/src/backend/utils/adt/pxf_functions.c
@@ -86,8 +86,8 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
        FuncCallContext *funcctx;
        HeapTuple tuple;
        Datum result;
-       Datum values[4];
-       bool nulls[4];
+       Datum values[5];
+       bool nulls[5];
 
        ItemContext *item_context;
 
@@ -126,7 +126,7 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
                 * build tupdesc for result tuples. This must match this 
function's
                 * pg_proc entry!
                 */
-               tupdesc = CreateTemplateTupleDesc(4, false);
+               tupdesc = CreateTemplateTupleDesc(5, false);
                TupleDescInitEntry(tupdesc, (AttrNumber) 1, "path",
                TEXTOID, -1, 0);
                TupleDescInitEntry(tupdesc, (AttrNumber) 2, "itemname",
@@ -135,6 +135,8 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
                TEXTOID, -1, 0);
                TupleDescInitEntry(tupdesc, (AttrNumber) 4, "fieldtype",
                TEXTOID, -1, 0);
+               TupleDescInitEntry(tupdesc, (AttrNumber) 5, "sourcefieldtype",
+               TEXTOID, -1, 0);
 
                funcctx->tuple_desc = BlessTupleDesc(tupdesc);
                MemoryContextSwitchTo(oldcontext);
@@ -169,6 +171,7 @@ Datum pxf_get_item_fields(PG_FUNCTION_ARGS)
        values[1] = CStringGetTextDatum(item->name);
        values[2] = CStringGetTextDatum(field->name);
        values[3] = CStringGetTextDatum(field->type);
+       values[4] = CStringGetTextDatum(field->sourceType);
 
        tuple = heap_form_tuple(funcctx->tuple_desc, values, nulls);
        result = HeapTupleGetDatum(tuple);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/src/bin/psql/describe.c
----------------------------------------------------------------------
diff --git a/src/bin/psql/describe.c b/src/bin/psql/describe.c
index f1de41b..ab2aa8b 100644
--- a/src/bin/psql/describe.c
+++ b/src/bin/psql/describe.c
@@ -4263,8 +4263,13 @@ describePxfTable(const char *profile, const char 
*pattern, bool verbose)
        printQueryOpt myopt = pset.popt;
        printTableContent cont;
        int                     cols = 0;
+       if (verbose)
+       {
+               cols = 3;
+       } else
+               cols = 2;
        int                     total_numrows = 0;
-       char       *headers[2];
+       char       *headers[cols];
        bool            printTableInitialized = false;
 
        char *previous_path = NULL;
@@ -4274,11 +4279,15 @@ describePxfTable(const char *profile, const char 
*pattern, bool verbose)
        char *itemname;
        char *fieldname;
        char *fieldtype;
+       char *sourcefieldtype;
        int total_fields = 0; //needed to know how much memory allocate for 
current table
 
        initPQExpBuffer(&buf);
 
-       printfPQExpBuffer(&buf, "SELECT t.*, COUNT() OVER(PARTITION BY path, 
itemname) as total_fields FROM\n"
+       printfPQExpBuffer(&buf, "SELECT t.path, t.itemname, t.fieldname, 
t.fieldtype,");
+       if (verbose)
+               appendPQExpBuffer(&buf, " sourcefieldtype, ");
+       appendPQExpBuffer(&buf,"COUNT() OVER(PARTITION BY path, itemname) as 
total_fields FROM\n"
                        "pxf_get_item_fields('%s', '%s') t\n", profile, 
pattern);
 
        res = PSQLexec(buf.data, false);
@@ -4294,7 +4303,9 @@ describePxfTable(const char *profile, const char 
*pattern, bool verbose)
        /* Header */
        headers[0] = gettext_noop("Column");
        headers[1] = gettext_noop("Type");
-       cols = 2;
+       if (verbose)
+               headers[2] = gettext_noop("Source type");
+
 
        for (int i = 0; i < total_numrows; i++)
        {
@@ -4303,7 +4314,14 @@ describePxfTable(const char *profile, const char 
*pattern, bool verbose)
                itemname = PQgetvalue(res, i, 1);
                fieldname = PQgetvalue(res, i, 2);
                fieldtype = PQgetvalue(res, i, 3);
-               total_fields = PQgetvalue(res, i, 4);
+               if (verbose)
+               {
+                       sourcefieldtype = PQgetvalue(res, i, 4);
+                       total_fields = PQgetvalue(res, i, 5);
+               } else
+               {
+                       total_fields = PQgetvalue(res, i, 4);
+               }
 
                /* First row for current table */
                if (previous_itemname == NULL
@@ -4340,6 +4358,12 @@ describePxfTable(const char *profile, const char 
*pattern, bool verbose)
                /* Type */
                printTableAddCell(&cont, fieldtype, false, false);
 
+               if (verbose)
+               {
+                       /*Source type */
+                       printTableAddCell(&cont, sourcefieldtype, false, false);
+               }
+
                previous_path = path;
                previous_itemname = itemname;
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/src/include/catalog/external/itemmd.h
----------------------------------------------------------------------
diff --git a/src/include/catalog/external/itemmd.h 
b/src/include/catalog/external/itemmd.h
index a841d63..e6dad63 100644
--- a/src/include/catalog/external/itemmd.h
+++ b/src/include/catalog/external/itemmd.h
@@ -41,6 +41,9 @@ typedef struct PxfField
        /* type name */
        char *type;
        
+       /*source type name */
+       char *sourceType;
+
        /* type modifiers, e.g. max length or precision */
        int typeModifiers[2];
        

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/src/include/catalog/pg_proc.h
----------------------------------------------------------------------
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index f3c5e77..e818909 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -10129,8 +10129,8 @@ DESCR("bitmap(internal)");
 DATA(insert OID = 3011 ( bmoptions  PGNSP PGUID 12 f f t f s 2 17 f "1009 16" 
_null_ _null_ _null_ bmoptions - _null_ n ));
 DESCR("btree(internal)");
 
-/* pxf_get_item_fields(text, text, OUT text, OUT text, OUT text, OUT text) => 
SETOF pg_catalog.record */
-DATA(insert OID = 9996 ( pxf_get_item_fields  PGNSP PGUID 12 f f t t v 2 2249 
f "25 25" "{25,25,25,25,25,25}" "{i,i,o,o,o,o}" 
"{profile,pattern,path,itemname,fieldname,fieldtype}" pxf_get_item_fields - 
_null_ r ));
+/* pxf_get_item_fields(text, text, OUT text, OUT text, OUT text, OUT text, OUT 
text) => SETOF pg_catalog.record */
+DATA(insert OID = 9996 ( pxf_get_item_fields  PGNSP PGUID 12 f f t t v 2 2249 
f "25 25" "{25,25,25,25,25,25,25}" "{i,i,o,o,o,o,o}" 
"{profile,pattern,path,itemname,fieldname,fieldtype,sourcefieldtype}" 
pxf_get_item_fields - _null_ r ));
 DESCR("Returns the metadata fields of external object from PXF");
 
 /* raises deprecation error */

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/7b837239/src/include/catalog/pg_proc.sql
----------------------------------------------------------------------
diff --git a/src/include/catalog/pg_proc.sql b/src/include/catalog/pg_proc.sql
index fc475e2..987b802 100644
--- a/src/include/catalog/pg_proc.sql
+++ b/src/include/catalog/pg_proc.sql
@@ -5348,7 +5348,7 @@
 
  CREATE FUNCTION bmoptions(_text, bool) RETURNS bytea LANGUAGE internal STABLE 
STRICT AS 'bmoptions' WITH (OID=3011, DESCRIPTION="btree(internal)");
 
- CREATE FUNCTION pxf_get_item_fields(IN profile text, IN pattern text, OUT 
path text, OUT itemname text, OUT fieldname text, OUT fieldtype text) RETURNS 
SETOF pg_catalog.record LANGUAGE internal VOLATILE STRICT AS 
'pxf_get_object_fields' WITH (OID=9996, DESCRIPTION="Returns the metadata 
fields of external object from PXF");
+ CREATE FUNCTION pxf_get_item_fields(IN profile text, IN pattern text, OUT 
path text, OUT itemname text, OUT fieldname text, OUT fieldtype text, OUT 
sourcefieldtype text) RETURNS SETOF pg_catalog.record LANGUAGE internal 
VOLATILE STRICT AS 'pxf_get_object_fields' WITH (OID=9996, DESCRIPTION="Returns 
the metadata fields of external object from PXF");
 
 -- raises deprecation error
  CREATE FUNCTION gp_deprecated() RETURNS void LANGUAGE internal IMMUTABLE AS 
'gp_deprecated' WITH (OID=9997, DESCRIPTION="raises function deprecation 
error");


Reply via email to