[ 
https://issues.apache.org/jira/browse/HAWQ-703?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15269931#comment-15269931
 ] 

ASF GitHub Bot commented on HAWQ-703:
-------------------------------------

Github user sansanichfb commented on a diff in the pull request:

    https://github.com/apache/incubator-hawq/pull/633#discussion_r61978916
  
    --- Diff: 
pxf/pxf-service/src/test/java/org/apache/hawq/pxf/service/MetadataResponseFormatterTest.java
 ---
    @@ -85,26 +86,45 @@ public void formatResponseStringWithModifiers() throws 
Exception {
             List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
             Metadata.Item itemName = new Metadata.Item("default", "table1");
             Metadata metadata = new Metadata(itemName, fields);
    -        fields.add(new Metadata.Field("field1", "int"));
    -        fields.add(new Metadata.Field("field2", "numeric",
    +        fields.add(new Metadata.Field("field1", EnumHawqType.Int8Type, 
"bigint"));
    +        fields.add(new Metadata.Field("field2", EnumHawqType.NumericType, 
"decimal",
                     new String[] {"1349", "1789"}));
    -        fields.add(new Metadata.Field("field3", "char",
    +        fields.add(new Metadata.Field("field3", EnumHawqType.BpcharType, 
"char",
                     new String[] {"50"}));
             metadataList.add(metadata);
     
             response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
             StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
             
expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
                     .append("\"fields\":[")
    -                .append("{\"name\":\"field1\",\"type\":\"int\"},")
    -                
.append("{\"name\":\"field2\",\"type\":\"numeric\",\"modifiers\":[\"1349\",\"1789\"]},")
    -                
.append("{\"name\":\"field3\",\"type\":\"char\",\"modifiers\":[\"50\"]}")
    +                
.append("{\"name\":\"field1\",\"type\":\"int8\",\"sourceType\":\"bigint\"},")
    +                
.append("{\"name\":\"field2\",\"type\":\"numeric\",\"sourceType\":\"decimal\",\"modifiers\":[\"1349\",\"1789\"]},")
    +                
.append("{\"name\":\"field3\",\"type\":\"bpchar\",\"sourceType\":\"char\",\"modifiers\":[\"50\"]}")
                     .append("]}]}");
     
             assertEquals(expected.toString(), 
convertResponseToString(response));
         }
     
         @Test
    +    public void formatResponseStringWithSourceType() throws Exception {
    +        List<Metadata> metadataList = new ArrayList<Metadata>();
    +        List<Metadata.Field> fields = new ArrayList<Metadata.Field>();
    +        Metadata.Item itemName = new Metadata.Item("default", "table1");
    +        Metadata metadata = new Metadata(itemName, fields);
    +        fields.add(new Metadata.Field("field1", EnumHawqType.Float8Type, 
"double"));
    +        metadataList.add(metadata);
    +
    +        response = MetadataResponseFormatter.formatResponse(metadataList, 
"path.file");
    +        StringBuilder expected = new StringBuilder("{\"PXFMetadata\":[{");
    +        
expected.append("\"item\":{\"path\":\"default\",\"name\":\"table1\"},")
    +                .append("\"fields\":[")
    +                
.append("{\"name\":\"field1\",\"type\":\"float8\",\"sourceType\":\"double\"}")
    +                .append("]}]}");
    +
    +//        assertEquals(expected.toString(), 
convertResponseToString(response));
    --- End diff --
    
    yes, sure


> Serialize HCatalog Complex Types to plain text (as Hive profile)
> ----------------------------------------------------------------
>
>                 Key: HAWQ-703
>                 URL: https://issues.apache.org/jira/browse/HAWQ-703
>             Project: Apache HAWQ
>          Issue Type: New Feature
>          Components: Hcatalog, PXF
>            Reporter: Oleksandr Diachenko
>            Assignee: Oleksandr Diachenko
>
> As for now user is able to create external HAWQ table on top of Hive table 
> with complex types(LIST, MAP, STRUCT, UNION) and those types are being 
> serialized as TEXT.
> But if user uses HCatalog integration feature and queries Hive tables having 
> compex type columns through HAWQ he/she gets error.
> The goal is to serialize complex types to text when using HCatalog 
> integration.
> Changes required:
> - PXF:
>  -- add sourceType field to getMetadata endpoint.
> - HAWQ:
>  -- master:
>  --- update stored procedure pxf_get_item_fields, add sourceType field to 
> responce.
>  -- psql: 
> --- update describe logic for Hive tables to show sourceType column in \d(\d+)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to