HAWQ-1228. Fixed complex types for HiveText.

Project: http://git-wip-us.apache.org/repos/asf/incubator-hawq/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-hawq/commit/607184c3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-hawq/tree/607184c3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-hawq/diff/607184c3

Branch: refs/heads/HAWQ-1228
Commit: 607184c365d872ce9b567ee2366122aac903c4a7
Parents: 8ba2e16
Author: Oleksandr Diachenko <odiache...@pivotal.io>
Authored: Mon Jan 23 18:32:21 2017 -0800
Committer: Oleksandr Diachenko <odiache...@pivotal.io>
Committed: Mon Jan 23 18:32:21 2017 -0800

----------------------------------------------------------------------
 .../java/org/apache/hawq/pxf/api/Metadata.java  | 18 +++-
 .../org/apache/hawq/pxf/api/MetadataTest.java   |  2 +-
 .../plugins/hive/HiveColumnarSerdeResolver.java |  5 --
 .../pxf/plugins/hive/HiveDataFragmenter.java    | 25 +++---
 .../pxf/plugins/hive/HiveMetadataFetcher.java   | 63 +++----------
 .../hawq/pxf/plugins/hive/HiveResolver.java     | 66 +++++++-------
 .../plugins/hive/HiveStringPassResolver.java    | 13 +--
 .../hawq/pxf/plugins/hive/HiveUserData.java     | 20 ++++-
 .../hive/utilities/EnumHiveToHawqType.java      | 31 +++++--
 .../plugins/hive/utilities/HiveUtilities.java   | 94 ++++++++++++++++++--
 .../pxf/plugins/hive/HiveORCAccessorTest.java   |  2 +-
 .../pxf/service/MetadataResponseFormatter.java  |  3 +-
 .../apache/hawq/pxf/service/ProfileFactory.java |  9 +-
 .../service/MetadataResponseFormatterTest.java  | 16 ++--
 src/backend/catalog/external/externalmd.c       | 65 +++++++++-----
 15 files changed, 268 insertions(+), 164 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
----------------------------------------------------------------------
diff --git a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java 
b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
index a920e4f..8701813 100644
--- a/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
+++ b/pxf/pxf-api/src/main/java/org/apache/hawq/pxf/api/Metadata.java
@@ -78,6 +78,7 @@ public class Metadata {
     public static class Field {
         private String name;
         private EnumHawqType type; // field type which PXF exposes
+        private boolean isComplexType; // whether source field's type is 
complex
         private String sourceType; // field type PXF reads from
         private String[] modifiers; // type modifiers, optional field
 
@@ -93,12 +94,17 @@ public class Metadata {
             this.sourceType = sourceType;
         }
 
-        public Field(String name, EnumHawqType type, String sourceType,
-                String[] modifiers) {
+        public Field(String name, EnumHawqType type, String sourceType, 
String[] modifiers) {
             this(name, type, sourceType);
             this.modifiers = modifiers;
         }
 
+        public Field(String name, EnumHawqType type, boolean isComplexType, 
String sourceType, String[] modifiers) {
+            this(name, type, sourceType);
+            this.modifiers = modifiers;
+            this.isComplexType = isComplexType;
+        }
+
         public String getName() {
             return name;
         }
@@ -114,6 +120,14 @@ public class Metadata {
         public String[] getModifiers() {
             return modifiers;
         }
+
+        public boolean isComplexType() {
+            return isComplexType;
+        }
+
+        public void setComplexType(boolean isComplexType) {
+            this.isComplexType = isComplexType;
+        }
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java 
b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
index 327a15b..9244ba2 100644
--- a/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
+++ b/pxf/pxf-api/src/test/java/org/apache/hawq/pxf/api/MetadataTest.java
@@ -32,7 +32,7 @@ public class MetadataTest {
     @Test
     public void createFieldEmptyNameType() {
         try {
-            Metadata.Field field = new Metadata.Field(null, null, null, null);
+            Metadata.Field field = new Metadata.Field(null, null, false, null, 
null);
             fail("Empty name, type and source type shouldn't be allowed.");
         } catch (IllegalArgumentException e) {
             assertEquals("Field name, type and source type cannot be empty", 
e.getMessage());

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
index 157f723..5ef4edc 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveColumnarSerdeResolver.java
@@ -237,9 +237,4 @@ public class HiveColumnarSerdeResolver extends HiveResolver 
{
         }
         firstColumn = false;
     }
-
-    @Override
-    void parseDelimiterChar(InputData input) {
-        delimiter = 44; //,
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
index 97f278d..6217ce2 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveDataFragmenter.java
@@ -162,6 +162,10 @@ public class HiveDataFragmenter extends Fragmenter {
 
         Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
 
+        Metadata metadata = new Metadata(tblDesc);
+        HiveUtilities.getSchema(tbl, metadata);
+        boolean hasComplexTypes = HiveUtilities.hasComplexTypes(metadata);
+
         verifySchema(tbl);
 
         List<Partition> partitions = null;
@@ -227,7 +231,7 @@ public class HiveDataFragmenter extends Fragmenter {
 
         if (partitions.isEmpty()) {
             props = getSchema(tbl);
-            fetchMetaDataForSimpleTable(descTable, props);
+            fetchMetaDataForSimpleTable(descTable, props, hasComplexTypes);
         } else {
             List<FieldSchema> partitionKeys = tbl.getPartitionKeys();
 
@@ -238,7 +242,7 @@ public class HiveDataFragmenter extends Fragmenter {
                         tblDesc.getPath(), tblDesc.getName(),
                         partitionKeys);
                 fetchMetaDataForPartitionedTable(descPartition, props,
-                        partition, partitionKeys, tblDesc.getName());
+                        partition, partitionKeys, tblDesc.getName(), 
hasComplexTypes);
             }
         }
     }
@@ -254,29 +258,30 @@ public class HiveDataFragmenter extends Fragmenter {
     }
 
     private void fetchMetaDataForSimpleTable(StorageDescriptor stdsc,
-                                             Properties props) throws 
Exception {
-        fetchMetaDataForSimpleTable(stdsc, props, null);
+                                             Properties props, boolean 
hasComplexTypes) throws Exception {
+        fetchMetaDataForSimpleTable(stdsc, props, null, hasComplexTypes);
     }
 
     private void fetchMetaDataForSimpleTable(StorageDescriptor stdsc,
-                                             Properties props, String 
tableName)
+                                             Properties props, String 
tableName, boolean hasComplexTypes)
             throws Exception {
         fetchMetaData(new HiveTablePartition(stdsc, props, null, null,
-                tableName));
+                tableName), hasComplexTypes);
     }
 
     private void fetchMetaDataForPartitionedTable(StorageDescriptor stdsc,
                                                   Properties props,
                                                   Partition partition,
                                                   List<FieldSchema> 
partitionKeys,
-                                                  String tableName)
+                                                  String tableName,
+                                                  boolean hasComplexTypes)
             throws Exception {
         fetchMetaData(new HiveTablePartition(stdsc, props, partition,
-                partitionKeys, tableName));
+                partitionKeys, tableName), hasComplexTypes);
     }
 
     /* Fills a table partition */
-    private void fetchMetaData(HiveTablePartition tablePartition)
+    private void fetchMetaData(HiveTablePartition tablePartition, boolean 
hasComplexTypes)
             throws Exception {
         InputFormat<?, ?> fformat = makeInputFormat(
                 tablePartition.storageDesc.getInputFormat(), jobConf);
@@ -284,7 +289,7 @@ public class HiveDataFragmenter extends Fragmenter {
         if (inputData.getProfile() != null) {
             // evaluate optimal profile based on file format if profile was 
explicitly specified in url
             // if user passed accessor+fragmenter+resolver - use them
-            profile = ProfileFactory.get(fformat);
+            profile = ProfileFactory.get(fformat, hasComplexTypes);
         }
         String fragmenterForProfile = null;
         if (profile != null) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
index 90943fc..6dcd329 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveMetadataFetcher.java
@@ -101,32 +101,28 @@ public class HiveMetadataFetcher extends MetadataFetcher {
             try {
                 Metadata metadata = new Metadata(tblDesc);
                 Table tbl = HiveUtilities.getHiveTable(client, tblDesc);
-                getSchema(tbl, metadata);
+                HiveUtilities.getSchema(tbl, metadata);
+                boolean hasComplexTypes = 
HiveUtilities.hasComplexTypes(metadata);
                 metadataList.add(metadata);
                 List<Partition> tablePartitions = 
client.listPartitionsByFilter(tblDesc.getPath(), tblDesc.getName(), "", (short) 
-1);
                 Set<OutputFormat> formats = new HashSet<OutputFormat>();
                 //If table has partitions - find out all formats
                 for (Partition tablePartition : tablePartitions) {
                     String inputFormat = 
tablePartition.getSd().getInputFormat();
-                    OutputFormat outputFormat = getOutputFormat(inputFormat);
+                    OutputFormat outputFormat = getOutputFormat(inputFormat, 
hasComplexTypes);
                     formats.add(outputFormat);
                 }
                 //If table has no partitions - get single format of table
                 if (tablePartitions.size() == 0 ) {
                     String inputFormat = tbl.getSd().getInputFormat();
-                    OutputFormat outputFormat = getOutputFormat(inputFormat);
+                    OutputFormat outputFormat = getOutputFormat(inputFormat, 
hasComplexTypes);
                     formats.add(outputFormat);
                 }
                 metadata.setOutputFormats(formats);
-                if (tbl != null && tbl.getSd() != null && 
tbl.getSd().getSerdeInfo() != null) {
-                    Map<String, String> outputParameters = new HashMap<String, 
String>();
-                    Map<String, String> serdeParameters = 
tbl.getSd().getSerdeInfo().getParameters();
-                    //outputParameters.put(DELIM_COLLECTION, 
String.valueOf((int) 
serdeParameters.get(serdeConstants.COLLECTION_DELIM).charAt(0)));
-                    //outputParameters.put(DELIM_MAPKEY, String.valueOf((int) 
serdeParameters.get(serdeConstants.MAPKEY_DELIM).charAt(0)));
-                    //outputParameters.put(DELIM_LINE, String.valueOf((int) 
serdeParameters.get(serdeConstants.LINE_DELIM).charAt(0)));
-                    outputParameters.put(DELIM_FIELD, String.valueOf((int) 
serdeParameters.get(serdeConstants.FIELD_DELIM).charAt(0)));
-                    metadata.setOutputParameters(outputParameters);
-                }
+                Map<String, String> outputParameters = new HashMap<String, 
String>();
+                if (HiveUtilities.getDelimiter(tbl.getSd()) != null)
+                    outputParameters.put(DELIM_FIELD, 
HiveUtilities.getDelimiter(tbl.getSd()));
+                metadata.setOutputParameters(outputParameters);
             } catch (UnsupportedTypeException | UnsupportedOperationException 
e) {
                 if(ignoreErrors) {
                     LOG.warn("Metadata fetch for " + tblDesc.toString() + " 
failed. " + e.getMessage());
@@ -140,50 +136,11 @@ public class HiveMetadataFetcher extends MetadataFetcher {
         return metadataList;
     }
 
-
-    /**
-     * Populates the given metadata object with the given table's fields and 
partitions,
-     * The partition fields are added at the end of the table schema.
-     * Throws an exception if the table contains unsupported field types.
-     * Supported HCatalog types: TINYINT,
-     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, 
TIMESTAMP,
-     * DATE, DECIMAL, VARCHAR, CHAR.
-     *
-     * @param tbl Hive table
-     * @param metadata schema of given table
-     */
-    private void getSchema(Table tbl, Metadata metadata) {
-
-        int hiveColumnsSize = tbl.getSd().getColsSize();
-        int hivePartitionsSize = tbl.getPartitionKeysSize();
-
-        if (LOG.isDebugEnabled()) {
-            LOG.debug("Hive table: " + hiveColumnsSize + " fields, " + 
hivePartitionsSize + " partitions.");
-        }
-
-        // check hive fields
-        try {
-            List<FieldSchema> hiveColumns = tbl.getSd().getCols();
-            for (FieldSchema hiveCol : hiveColumns) {
-                metadata.addField(HiveUtilities.mapHiveType(hiveCol));
-            }
-            // check partition fields
-            List<FieldSchema> hivePartitions = tbl.getPartitionKeys();
-            for (FieldSchema hivePart : hivePartitions) {
-                metadata.addField(HiveUtilities.mapHiveType(hivePart));
-            }
-        } catch (UnsupportedTypeException e) {
-            String errorMsg = "Failed to retrieve metadata for table " + 
metadata.getItem() + ". " +
-                    e.getMessage();
-            throw new UnsupportedTypeException(errorMsg);
-        }
-    }
-
-    private OutputFormat getOutputFormat(String inputFormat) {
+    private OutputFormat getOutputFormat(String inputFormat, boolean 
hasComplexTypes) {
         OutputFormat outputFormat = null;
         try {
             InputFormat<?, ?> fformat = 
HiveDataFragmenter.makeInputFormat(inputFormat, jobConf);
-            String profile = ProfileFactory.get(fformat);
+            String profile = ProfileFactory.get(fformat, hasComplexTypes);
             String outputFormatString = 
ProfilesConf.getProfilePluginsMap(profile).get("X-GP-OUTPUTFORMAT");
             outputFormat = OutputFormat.valueOf(outputFormatString);
         } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
index 16b08d7..3e76187 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveResolver.java
@@ -621,44 +621,46 @@ public class HiveResolver extends Plugin implements 
ReadResolver {
         String userDelim = input.getUserProperty(InputData.DELIMITER_KEY);
 
         if (userDelim == null) {
-            throw new IllegalArgumentException(InputData.DELIMITER_KEY + " is 
a required option");
-        }
-
-        final int VALID_LENGTH = 1;
-        final int VALID_LENGTH_HEX = 4;
-
-        if (userDelim.startsWith("\\x")) { // hexadecimal sequence
-
-            if (userDelim.length() != VALID_LENGTH_HEX) {
+            /* No DELIMITER in URL, try to get it from fragment's user data*/
+            HiveUserData hiveUserData = null;
+            try {
+                hiveUserData = HiveUtilities.parseHiveUserData(input);
+            } catch (UserDataException ude) {
+                throw new IllegalArgumentException(InputData.DELIMITER_KEY + " 
is a required option");
+            }
+            if (hiveUserData.getDelimiter() == null) {
+                throw new IllegalArgumentException(InputData.DELIMITER_KEY + " 
is a required option");
+            }
+            delimiter = (char) 
Integer.valueOf(hiveUserData.getDelimiter()).intValue();
+        } else {
+            final int VALID_LENGTH = 1;
+            final int VALID_LENGTH_HEX = 4;
+            if (userDelim.startsWith("\\x")) { // hexadecimal sequence
+                if (userDelim.length() != VALID_LENGTH_HEX) {
+                    throw new IllegalArgumentException(
+                            "Invalid hexdecimal value for delimiter (got"
+                                    + userDelim + ")");
+                }
+                delimiter = (char) Integer.parseInt(
+                        userDelim.substring(2, VALID_LENGTH_HEX), 16);
+                if (!CharUtils.isAscii(delimiter)) {
+                    throw new IllegalArgumentException(
+                            "Invalid delimiter value. Must be a single ASCII 
character, or a hexadecimal sequence (got non ASCII "
+                                    + delimiter + ")");
+                }
+                return;
+            }
+            if (userDelim.length() != VALID_LENGTH) {
                 throw new IllegalArgumentException(
-                        "Invalid hexdecimal value for delimiter (got"
+                        "Invalid delimiter value. Must be a single ASCII 
character, or a hexadecimal sequence (got "
                                 + userDelim + ")");
             }
-
-            delimiter = (char) Integer.parseInt(
-                    userDelim.substring(2, VALID_LENGTH_HEX), 16);
-
-            if (!CharUtils.isAscii(delimiter)) {
+            if (!CharUtils.isAscii(userDelim.charAt(0))) {
                 throw new IllegalArgumentException(
                         "Invalid delimiter value. Must be a single ASCII 
character, or a hexadecimal sequence (got non ASCII "
-                                + delimiter + ")");
+                                + userDelim + ")");
             }
-
-            return;
+            delimiter = userDelim.charAt(0);
         }
-
-        if (userDelim.length() != VALID_LENGTH) {
-            throw new IllegalArgumentException(
-                    "Invalid delimiter value. Must be a single ASCII 
character, or a hexadecimal sequence (got "
-                            + userDelim + ")");
-        }
-
-        if (!CharUtils.isAscii(userDelim.charAt(0))) {
-            throw new IllegalArgumentException(
-                    "Invalid delimiter value. Must be a single ASCII 
character, or a hexadecimal sequence (got non ASCII "
-                            + userDelim + ")");
-        }
-
-        delimiter = userDelim.charAt(0);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveStringPassResolver.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveStringPassResolver.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveStringPassResolver.java
index 65bce98..acbff27 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveStringPassResolver.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveStringPassResolver.java
@@ -87,22 +87,11 @@ public class HiveStringPassResolver extends HiveResolver {
     public List<OneField> getFields(OneRow onerow) throws Exception {
         if (((ProtocolData) inputData).outputFormat() == OutputFormat.TEXT) {
             String line = (onerow.getData()).toString();
-            String replacedLine = replaceComplexSpecCharacters(line);
             /* We follow Hive convention. Partition fields are always added at 
the end of the record */
-            return Collections.singletonList(new OneField(VARCHAR.getOID(), 
replacedLine + parts));
+            return Collections.singletonList(new OneField(VARCHAR.getOID(), 
line + parts));
         } else {
             return super.getFields(onerow);
         }
     }
 
-    private String replaceComplexSpecCharacters(String line) throws 
UserDataException {
-        HiveUserData hiveUserData = HiveUtilities.parseHiveUserData(inputData);
-        char collectionDelimChar = 
(char)Integer.valueOf(hiveUserData.getCollectionDelim()).intValue();
-        char mapKeyDelimChar = 
(char)Integer.valueOf(hiveUserData.getMapKeyDelim()).intValue();
-        String replacedLine = line;
-        replacedLine = line.replace(Character.toString(collectionDelimChar), 
collectionDelim);
-        replacedLine = 
replacedLine.replace(Character.toString(mapKeyDelimChar), mapkeyDelim);
-        return replacedLine;
-    }
-
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveUserData.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveUserData.java 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveUserData.java
index 07159ca..2437b60 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveUserData.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/HiveUserData.java
@@ -27,15 +27,17 @@ public class HiveUserData {
             String propertiesString, String partitionKeys,
             boolean filterInFragmenter,
             String collectionDelim,
-            String mapKeyDelim) {
+            String mapKeyDelim,
+            String delimiter) {
 
         this.inputFormatName = inputFormatName;
         this.serdeClassName = serdeClassName;
         this.propertiesString = propertiesString;
         this.partitionKeys = partitionKeys;
         this.filterInFragmenter = filterInFragmenter;
-        this.collectionDelim = collectionDelim;
-        this.mapKeyDelim = mapKeyDelim;
+        this.collectionDelim = (collectionDelim == null ? "0" : 
collectionDelim);
+        this.mapKeyDelim = (mapKeyDelim == null ? "0" : mapKeyDelim);
+        this.delimiter = (delimiter == null ? "0" : delimiter);
     }
 
     public String getInputFormatName() {
@@ -74,6 +76,14 @@ public class HiveUserData {
         this.mapKeyDelim = mapKeyDelim;
     }
 
+    public String getDelimiter() {
+        return delimiter;
+    }
+
+    public void setDelimiter(String delimiter) {
+        this.delimiter = delimiter;
+    }
+
     private String inputFormatName;
     private String serdeClassName;
     private String propertiesString;
@@ -81,6 +91,7 @@ public class HiveUserData {
     private boolean filterInFragmenter;
     private String collectionDelim;
     private String mapKeyDelim;
+    private String delimiter;
 
     @Override
     public String toString() {
@@ -90,6 +101,7 @@ public class HiveUserData {
                 + partitionKeys + HiveUserData.HIVE_UD_DELIM
                 + filterInFragmenter + HiveUserData.HIVE_UD_DELIM
                 + collectionDelim + HiveUserData.HIVE_UD_DELIM
-                + mapKeyDelim;
+                + mapKeyDelim + HiveUserData.HIVE_UD_DELIM
+                + delimiter;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
index d91e949..ea65a66 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/EnumHiveToHawqType.java
@@ -42,37 +42,48 @@ public enum EnumHiveToHawqType {
     FloatType("float", EnumHawqType.Float4Type),
     DoubleType("double", EnumHawqType.Float8Type),
     StringType("string", EnumHawqType.TextType),
-    BinaryType("binary", EnumHawqType.ByteaType),
+    BinaryType("binary", EnumHawqType.ByteaType, true),
     TimestampType("timestamp", EnumHawqType.TimestampType),
     DateType("date", EnumHawqType.DateType),
     DecimalType("decimal", EnumHawqType.NumericType, "[(,)]"),
     VarcharType("varchar", EnumHawqType.VarcharType, "[(,)]"),
     CharType("char", EnumHawqType.BpcharType, "[(,)]"),
-    ArrayType("array", EnumHawqType.TextType, "[<,>]"),
-    MapType("map", EnumHawqType.TextType, "[<,>]"),
-    StructType("struct", EnumHawqType.TextType, "[<,>]"),
-    UnionType("uniontype", EnumHawqType.TextType, "[<,>]");
+    ArrayType("array", EnumHawqType.TextType, "[<,>]", true),
+    MapType("map", EnumHawqType.TextType, "[<,>]", true),
+    StructType("struct", EnumHawqType.TextType, "[<,>]", true),
+    UnionType("uniontype", EnumHawqType.TextType, "[<,>]", true);
 
     private String typeName;
     private EnumHawqType hawqType;
     private String splitExpression;
     private byte size;
+    private boolean isComplexType;
 
     EnumHiveToHawqType(String typeName, EnumHawqType hawqType) {
         this.typeName = typeName;
         this.hawqType = hawqType;
     }
-    
+
     EnumHiveToHawqType(String typeName, EnumHawqType hawqType, byte size) {
         this(typeName, hawqType);
         this.size = size;
     }
 
+    EnumHiveToHawqType(String typeName, EnumHawqType hawqType, boolean 
isComplexType) {
+        this(typeName, hawqType);
+        this.isComplexType = isComplexType;
+    }
+
     EnumHiveToHawqType(String typeName, EnumHawqType hawqType, String 
splitExpression) {
         this(typeName, hawqType);
         this.splitExpression = splitExpression;
     }
 
+    EnumHiveToHawqType(String typeName, EnumHawqType hawqType, String 
splitExpression, boolean isComplexType) {
+        this(typeName, hawqType, splitExpression);
+        this.isComplexType = isComplexType;
+    }
+
     /**
      * 
      * @return name of type
@@ -216,4 +227,12 @@ public enum EnumHiveToHawqType {
         return size;
     }
 
+    public boolean isComplexType() {
+        return isComplexType;
+    }
+
+    public void setComplexType(boolean isComplexType) {
+        this.isComplexType = isComplexType;
+    }
+
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
index f8d12ab..d94bf12 100644
--- 
a/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
+++ 
b/pxf/pxf-hive/src/main/java/org/apache/hawq/pxf/plugins/hive/utilities/HiveUtilities.java
@@ -35,10 +35,12 @@ import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hawq.pxf.api.Fragmenter;
 import org.apache.hawq.pxf.api.Metadata;
+import org.apache.hawq.pxf.api.Metadata.Field;
 import org.apache.hawq.pxf.api.UnsupportedTypeException;
 import org.apache.hawq.pxf.api.UserDataException;
 import org.apache.hawq.pxf.api.utilities.EnumHawqType;
@@ -100,7 +102,8 @@ public class HiveUtilities {
     static final String STR_RC_FILE_INPUT_FORMAT = 
"org.apache.hadoop.hive.ql.io.RCFileInputFormat";
     static final String STR_TEXT_FILE_INPUT_FORMAT = 
"org.apache.hadoop.mapred.TextInputFormat";
     static final String STR_ORC_FILE_INPUT_FORMAT = 
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat";
-    private static final int EXPECTED_NUM_OF_TOKS = 7;
+    private static final int EXPECTED_NUM_OF_TOKS = 8;
+    private static final String DEFAULT_DELIMITER = "44";
 
     /**
      * Initializes the HiveMetaStoreClient
@@ -195,7 +198,7 @@ public class HiveUtilities {
         } else
             hiveTypeName = hiveType;
 
-        return new Metadata.Field(fieldName, hawqType, hiveTypeName, 
modifiers);
+        return new Metadata.Field(fieldName, hawqType, 
hiveToHawqType.isComplexType(), hiveTypeName, modifiers);
     }
 
     /**
@@ -452,28 +455,29 @@ public class HiveUtilities {
         String serdeClassName = 
partData.storageDesc.getSerdeInfo().getSerializationLib();
         String propertiesString = serializeProperties(partData.properties);
         String partitionKeys = serializePartitionKeys(partData);
-        String collectionDelim = String.valueOf((int) 
partData.storageDesc.getSerdeInfo().getParameters().get(serdeConstants.COLLECTION_DELIM).charAt(0));
-        String mapKeyDelim = String.valueOf((int) 
partData.storageDesc.getSerdeInfo().getParameters().get(serdeConstants.MAPKEY_DELIM).charAt(0));
+        String collectionDelim = getCollectionDelim(partData.storageDesc);
+        String mapKeyDelim = getMapKeyDelim(partData.storageDesc);
+        String delimiter = getDelimiter(partData.storageDesc);
 
         if (HiveInputFormatFragmenter.class.isAssignableFrom(fragmenterClass)) 
{
             assertFileType(inputFormatName, partData);
         }
 
-        hiveUserData = new HiveUserData(inputFormatName, serdeClassName, 
propertiesString, partitionKeys, filterInFragmenter, collectionDelim, 
mapKeyDelim);
+        hiveUserData = new HiveUserData(inputFormatName, serdeClassName, 
propertiesString, partitionKeys, filterInFragmenter, collectionDelim, 
mapKeyDelim, delimiter);
 
         return hiveUserData.toString().getBytes();
     }
 
     public static HiveUserData parseHiveUserData(InputData input, 
PXF_HIVE_SERDES... supportedSerdes) throws UserDataException{
         String userData = new String(input.getFragmentUserData());
-        String[] toks = userData.split(HiveUserData.HIVE_UD_DELIM);
+        String[] toks = userData.split(HiveUserData.HIVE_UD_DELIM, 
EXPECTED_NUM_OF_TOKS);
 
         if (toks.length != (EXPECTED_NUM_OF_TOKS)) {
             throw new UserDataException("HiveInputFormatFragmenter expected "
                     + EXPECTED_NUM_OF_TOKS + " tokens, but got " + 
toks.length);
         }
 
-        HiveUserData hiveUserData = new HiveUserData(toks[0], toks[1], 
toks[2], toks[3], Boolean.valueOf(toks[4]), toks[5], toks[6]);
+        HiveUserData hiveUserData = new HiveUserData(toks[0], toks[1], 
toks[2], toks[3], Boolean.valueOf(toks[4]), toks[5], toks[6], toks[7]);
 
             if (supportedSerdes.length > 0) {
                 /* Make sure this serde is supported */
@@ -482,4 +486,80 @@ public class HiveUtilities {
 
         return hiveUserData;
     }
+
+    private static String getSerdeParameter(StorageDescriptor sd, String 
parameterKey) {
+        String parameterValue = null;
+        if (sd != null && sd.getSerdeInfo() != null && 
sd.getSerdeInfo().getParameters() != null && 
sd.getSerdeInfo().getParameters().get(parameterKey) != null) {
+            parameterValue = String.valueOf((int) 
sd.getSerdeInfo().getParameters().get(parameterKey).charAt(0));
+        }
+        return parameterValue;
+    }
+
+    public static String getCollectionDelim(StorageDescriptor sd) {
+        String collectionDelim = getSerdeParameter(sd, 
serdeConstants.COLLECTION_DELIM);
+        return collectionDelim;
+    }
+
+    public static String getMapKeyDelim(StorageDescriptor sd) {
+        String mapKeyDelim = getSerdeParameter(sd, 
serdeConstants.MAPKEY_DELIM);
+        return mapKeyDelim;
+    }
+
+    public static String getDelimiter(StorageDescriptor sd) {
+        String delimiter = getSerdeParameter(sd, serdeConstants.FIELD_DELIM);
+        if (delimiter == null)
+            delimiter = DEFAULT_DELIMITER;
+        return delimiter;
+    }
+
+    public static boolean hasComplexTypes(Metadata metadata) {
+        boolean hasComplexTypes = false;
+        List<Field> fields = metadata.getFields();
+        for (Field field: fields) {
+            if (field.isComplexType()) {
+                hasComplexTypes = true;
+                break;
+            }
+        }
+
+        return hasComplexTypes;
+    }
+
+    /**
+     * Populates the given metadata object with the given table's fields and 
partitions,
+     * The partition fields are added at the end of the table schema.
+     * Throws an exception if the table contains unsupported field types.
+     * Supported HCatalog types: TINYINT,
+     * SMALLINT, INT, BIGINT, BOOLEAN, FLOAT, DOUBLE, STRING, BINARY, 
TIMESTAMP,
+     * DATE, DECIMAL, VARCHAR, CHAR.
+     *
+     * @param tbl Hive table
+     * @param metadata schema of given table
+     */
+    public static void getSchema(Table tbl, Metadata metadata) {
+
+        int hiveColumnsSize = tbl.getSd().getColsSize();
+        int hivePartitionsSize = tbl.getPartitionKeysSize();
+
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Hive table: " + hiveColumnsSize + " fields, " + 
hivePartitionsSize + " partitions.");
+        }
+
+        // check hive fields
+        try {
+            List<FieldSchema> hiveColumns = tbl.getSd().getCols();
+            for (FieldSchema hiveCol : hiveColumns) {
+                metadata.addField(HiveUtilities.mapHiveType(hiveCol));
+            }
+            // check partition fields
+            List<FieldSchema> hivePartitions = tbl.getPartitionKeys();
+            for (FieldSchema hivePart : hivePartitions) {
+                metadata.addField(HiveUtilities.mapHiveType(hivePart));
+            }
+        } catch (UnsupportedTypeException e) {
+            String errorMsg = "Failed to retrieve metadata for table " + 
metadata.getItem() + ". " +
+                    e.getMessage();
+            throw new UnsupportedTypeException(errorMsg);
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessorTest.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessorTest.java
 
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessorTest.java
index 30233a4..5feec4f 100644
--- 
a/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessorTest.java
+++ 
b/pxf/pxf-hive/src/test/java/org/apache/hawq/pxf/plugins/hive/HiveORCAccessorTest.java
@@ -64,7 +64,7 @@ public class HiveORCAccessorTest {
         
PowerMockito.whenNew(JobConf.class).withAnyArguments().thenReturn(jobConf);
 
         PowerMockito.mockStatic(HiveUtilities.class);
-        
PowerMockito.when(HiveUtilities.parseHiveUserData(any(InputData.class), 
any(PXF_HIVE_SERDES[].class))).thenReturn(new HiveUserData("", "", null, 
HiveDataFragmenter.HIVE_NO_PART_TBL, true, "2", "3"));
+        
PowerMockito.when(HiveUtilities.parseHiveUserData(any(InputData.class), 
any(PXF_HIVE_SERDES[].class))).thenReturn(new HiveUserData("", "", null, 
HiveDataFragmenter.HIVE_NO_PART_TBL, true, "2", "3", "1"));
 
         PowerMockito.mockStatic(HdfsUtilities.class);
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
index 8225ec5..d2b0b5c 100644
--- 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
+++ 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/MetadataResponseFormatter.java
@@ -86,7 +86,8 @@ public class MetadataResponseFormatter {
                     result.append("Field #").append(++i).append(": [")
                             .append("Name: ").append(field.getName())
                             .append(", Type: 
").append(field.getType().getTypeName())
-                            .append(", Source type: 
").append(field.getSourceType()).append("] ");
+                            .append(", Source type: 
").append(field.getSourceType())
+                            .append(", Source type is complex: 
").append(field.isComplexType()).append("] ");
                 }
             }
             LOG.debug(result);

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ProfileFactory.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ProfileFactory.java 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ProfileFactory.java
index 092f89e..580cd39 100644
--- 
a/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ProfileFactory.java
+++ 
b/pxf/pxf-service/src/main/java/org/apache/hawq/pxf/service/ProfileFactory.java
@@ -32,9 +32,14 @@ public class ProfileFactory {
     private static final String HIVE_ORC_PROFILE = "HiveORC";
     private static final String HIVE_PROFILE = "Hive";
 
-    public static String get(InputFormat inputFormat) {
+/*    public static String get(InputFormat inputFormat) {
+        String profileName = get(inputFormat, false);
+        return profileName;
+    }
+*/
+    public static String get(InputFormat inputFormat, boolean hasComplexTypes) 
{
         String profileName = null;
-        if (inputFormat instanceof TextInputFormat) {
+        if (inputFormat instanceof TextInputFormat && !hasComplexTypes) {
             profileName = HIVE_TEXT_PROFILE;
         } else if (inputFormat instanceof RCFileInputFormat) {
             profileName = HIVE_RC_PROFILE;

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
----------------------------------------------------------------------
diff --git 
a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
 
b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
index 21bf423..546b42d 100644
--- 
a/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
+++ 
b/pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
@@ -57,7 +57,7 @@ public class MetadataResponseFormatterTest {
         response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
-                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}]}");
+                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\",\"complexType\":false},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\",\"complexType\":false}]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
@@ -75,7 +75,7 @@ public class MetadataResponseFormatterTest {
         response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
-                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}]}");
+                
.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\",\"complexType\":false},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\",\"complexType\":false}]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
     }
@@ -97,9 +97,9 @@ public class MetadataResponseFormatterTest {
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
                 .append("\"fields\":[")
-                
.append("{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},")
-                
.append("{\"name\":\"field2\",\"type\":\"numeric\",\"sourceType\":\"decimal\",\"modifiers\":[\"1349\",\"1789\"]},")
-                
.append("{\"name\":\"field3\",\"type\":\"bpchar\",\"sourceType\":\"char\",\"modifiers\":[\"50\"]}")
+                
.append("{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\",\"complexType\":false},")
+                
.append("{\"name\":\"field2\",\"type\":\"numeric\",\"sourceType\":\"decimal\",\"modifiers\":[\"1349\",\"1789\"],\"complexType\":false},")
+                
.append("{\"name\":\"field3\",\"type\":\"bpchar\",\"sourceType\":\"char\",\"modifiers\":[\"50\"],\"complexType\":false}")
                 .append("]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
@@ -118,7 +118,7 @@ public class MetadataResponseFormatterTest {
         StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
         expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
                 .append("\"fields\":[")
-                
.append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\"}")
+                
.append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\",\"complexType\":false}")
                 .append("]}]}");
 
         assertEquals(expected.toString(), convertResponseToString(response));
@@ -199,7 +199,7 @@ public class MetadataResponseFormatterTest {
                 expected.append(",");
             }
             
expected.append("{\"item\":{\"path\":\"default\",\"name\":\"table").append(i).append("\"},");
-            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}");
+            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\",\"complexType\":false},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\",\"complexType\":false}]}");
         }
         expected.append("]}");
 
@@ -226,7 +226,7 @@ public class MetadataResponseFormatterTest {
                 expected.append(",");
             }
             
expected.append("{\"item\":{\"path\":\"default").append(i).append("\",\"name\":\"table").append(i).append("\"},");
-            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\"}]}");
+            
expected.append("\"fields\":[{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\",\"complexType\":false},{\"name\":\"field2\",\"type\":\"text\",\"sourceType\":\"string\",\"complexType\":false}]}");
         }
         expected.append("]}");
 

http://git-wip-us.apache.org/repos/asf/incubator-hawq/blob/607184c3/src/backend/catalog/external/externalmd.c
----------------------------------------------------------------------
diff --git a/src/backend/catalog/external/externalmd.c 
b/src/backend/catalog/external/externalmd.c
index 4150f42..0e572ae 100644
--- a/src/backend/catalog/external/externalmd.c
+++ b/src/backend/catalog/external/externalmd.c
@@ -59,6 +59,7 @@ static void LoadColumns(Oid relid, List *columns);
 static int ComputeTypeMod(Oid typeOid, const char *colname, int *typemod, int 
nTypeMod);
 static Datum GetFormatTypeForProfile(const List *outputFormats);
 static Datum GetFormatOptionsForProfile(const List *outputFormats, int 
delimiter);
+static Datum GetLocationForFormat(char *profile, List *outputFormats, char 
*pxf_service_address, char *path, char *name, int delimiter);
 
 const int maxNumTypeModifiers = 2;
 /*
@@ -469,27 +470,8 @@ static void LoadExtTable(Oid relid, PxfItem *pxfItem)
                values[i] = (Datum) 0;
        }
 
-       /* location - should be an array of text with one element:
-        * pxf://<ip:port/namaservice>/<hive db>.<hive table>?Profile=Hive */
-       StringInfoData locationStr;
-       initStringInfo(&locationStr);
-       appendStringInfo(&locationStr, 
"pxf://%s/%s.%s?Profile=%s&delimiter=%cx%02x",
-                       pxf_service_address, pxfItem->path, pxfItem->name, 
pxfItem->profile, '\\', pxfItem->delimiter);
-       Size len = VARHDRSZ + locationStr.len;
-       /* +1 leaves room for sprintf's trailing null */
-       text *t = (text *) palloc(len + 1);
-       SET_VARSIZE(t, len);
-       sprintf((char *) VARDATA(t), "%s", locationStr.data);
-       ArrayBuildState *astate = NULL;
-       astate = accumArrayResult(astate, PointerGetDatum(t),
-                                                         false, TEXTOID,
-                                                         CurrentMemoryContext);
-       pfree(locationStr.data);
-       Assert(NULL != astate);
-       Datum location = makeArrayResult(astate, CurrentMemoryContext);
-
        values[Anum_pg_exttable_reloid - 1] = ObjectIdGetDatum(relid);
-       values[Anum_pg_exttable_location - 1] = location;
+       values[Anum_pg_exttable_location - 1] = 
GetLocationForFormat(pxfItem->profile, pxfItem->outputFormats, 
pxf_service_address, pxfItem->path, pxfItem->name, pxfItem->delimiter);
        values[Anum_pg_exttable_fmttype - 1] = 
GetFormatTypeForProfile(pxfItem->outputFormats);
        values[Anum_pg_exttable_fmtopts - 1] = 
GetFormatOptionsForProfile(pxfItem->outputFormats, pxfItem->delimiter);
        nulls[Anum_pg_exttable_command - 1] = true;
@@ -664,11 +646,13 @@ static Datum GetFormatOptionsForProfile(const List 
*outputFormats, int delimiter
 {
        StringInfoData formatStr;
        initStringInfo(&formatStr);
+
        /* "delimiter 'delimiter' null '\\N' escape '\\'"*/
        char formatArr[35] = { 0x64, 0x65, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x65,
                        0x72, 0x20, 0x27, delimiter, 0x27, 0x20, 0x6e, 0x75, 
0x6c, 0x6c,
                        0x20, 0x27, 0x5c, 0x4e, 0x27, 0x20, 0x65, 0x73, 0x63, 
0x61, 0x70,
                        0x65, 0x20, 0x27, 0x5c, 0x27, 0x00 };
+
        if (list_length(outputFormats) == 1 && 
strcmp(lfirst(list_head(outputFormats)),"TEXT") == 0)
        {
                appendStringInfo(&formatStr, "%s", formatArr);
@@ -680,3 +664,44 @@ static Datum GetFormatOptionsForProfile(const List 
*outputFormats, int delimiter
        return format_opts;
 }
 
+/* location - should be an array of text with one element:
+ * pxf://<ip:port/namaservice>/<hive db>.<hive table>?Profile=Hive */
+static Datum GetLocationForFormat(char *profile, List *outputFormats, char 
*pxf_service_address, char *path, char *name, int delimiter)
+{
+       StringInfoData locationStr;
+       initStringInfo(&locationStr);
+       appendStringInfo(&locationStr, "pxf://%s/%s.%s?Profile=%s", 
pxf_service_address, path, name, profile);
+       bool hasTextOutputFormat = false;
+       ListCell *lc = NULL;
+       foreach (lc, outputFormats)
+       {
+               char *outputFormat = (char *) lfirst(lc);
+               if (strcmp(outputFormat, "TEXT") == 0)
+               {
+                       hasTextOutputFormat = true;
+                       break;
+               }
+       }
+       if (delimiter)
+       {
+               appendStringInfo(&locationStr, "&delimiter=%cx%02x", '\\', 
delimiter);
+       } else if (hasTextOutputFormat)
+       {
+               ereport(ERROR,
+                       (errcode(ERRCODE_NULL_VALUE_NOT_ALLOWED),
+                        errmsg("delimiter attribute is mandatory for output 
format \"TEXT\"")));
+       }
+       Size len = VARHDRSZ + locationStr.len;
+       /* +1 leaves room for sprintf's trailing null */
+       text *t = (text *) palloc(len + 1);
+       SET_VARSIZE(t, len);
+       sprintf((char *) VARDATA(t), "%s", locationStr.data);
+       ArrayBuildState *astate = NULL;
+       astate = accumArrayResult(astate, PointerGetDatum(t),
+                                                         false, TEXTOID,
+                                                         CurrentMemoryContext);
+       pfree(locationStr.data);
+       Assert(NULL != astate);
+       Datum location = makeArrayResult(astate, CurrentMemoryContext);
+       return location;
+}

Reply via email to