Author: heyongqiang
Date: Thu Dec  8 06:41:02 2011
New Revision: 1211767

URL: http://svn.apache.org/viewvc?rev=1211767&view=rev
Log:
HIVE-1003 [jira] optimize metadata only queries
(Namit Jain via Yongqiang He)

Summary:
testing

Queries like:

select max(ds) from T

where ds is a partitioning column should be optimized.

Test Plan: EMPTY

Reviewers: JIRA, heyongqiang

Reviewed By: heyongqiang

CC: njain, heyongqiang

Differential Revision: 105

Added:
    hive/trunk/ql/src/test/queries/clientpositive/partition_schema1.q
    hive/trunk/ql/src/test/results/clientpositive/partition_schema1.q.out
Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
    hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out
    hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1211767&r1=1211766&r2=1211767&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Thu Dec  
8 06:41:02 2011
@@ -2556,14 +2556,17 @@ public class DDLTask extends Task<DDLWor
       outStream = fs.create(resFile);
 
       if (colPath.equals(tableName)) {
+        List<FieldSchema> cols = (part == null) ? tbl.getCols() : 
part.getCols();
+
         if (!descTbl.isFormatted()) {
-          List<FieldSchema> cols = tbl.getCols();
           if (tableName.equals(colPath)) {
             cols.addAll(tbl.getPartCols());
           }
           
outStream.writeBytes(MetaDataFormatUtils.displayColsUnformatted(cols));
         } else {
-          
outStream.writeBytes(MetaDataFormatUtils.getAllColumnsInformation(tbl));
+          outStream.writeBytes(
+            MetaDataFormatUtils.getAllColumnsInformation(cols,
+              tbl.isPartitioned() ? tbl.getPartCols() : null));
         }
       } else {
         List<FieldSchema> cols = Hive.getFieldsFromDeserializer(colPath, 
tbl.getDeserializer());

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java?rev=1211767&r1=1211766&r2=1211767&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/MetaDataFormatUtils.java
 Thu Dec  8 06:41:02 2011
@@ -48,22 +48,6 @@ public final class MetaDataFormatUtils {
   private MetaDataFormatUtils() {
   }
 
-  public static String getAllColumnsInformation(Table table) {
-
-    StringBuilder columnInformation = new 
StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
-    formatColumnsHeader(columnInformation);
-    formatAllFields(columnInformation, table.getCols());
-
-    // Partitions
-    if (table.isPartitioned()) {
-      columnInformation.append(LINE_DELIM).append("# Partition Information")
-          .append(LINE_DELIM);
-      formatColumnsHeader(columnInformation);
-      formatAllFields(columnInformation, table.getPartCols());
-    }
-    return columnInformation.toString();
-  }
-
   private static void formatColumnsHeader(StringBuilder columnInformation) {
     columnInformation.append("# "); // Easy for shell scripts to ignore
     formatOutput(getColumnsHeader(), columnInformation);
@@ -77,6 +61,21 @@ public final class MetaDataFormatUtils {
     return columnInformation.toString();
   }
 
+  public static String getAllColumnsInformation(List<FieldSchema> cols, 
List<FieldSchema> partCols) {
+    StringBuilder columnInformation = new 
StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
+    formatColumnsHeader(columnInformation);
+    formatAllFields(columnInformation, cols);
+
+    if ((partCols != null) && (!partCols.isEmpty())) {
+      columnInformation.append(LINE_DELIM).append("# Partition Information")
+        .append(LINE_DELIM);
+      formatColumnsHeader(columnInformation);
+      formatAllFields(columnInformation, partCols);
+    }
+
+    return columnInformation.toString();
+  }
+
   private static void formatAllFields(StringBuilder tableInfo, 
List<FieldSchema> cols) {
     for (FieldSchema col : cols) {
       formatFieldSchemas(tableInfo, col);

Added: hive/trunk/ql/src/test/queries/clientpositive/partition_schema1.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/partition_schema1.q?rev=1211767&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/partition_schema1.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/partition_schema1.q Thu Dec  
8 06:41:02 2011
@@ -0,0 +1,12 @@
+
+create table partition_schema1(key string, value string) partitioned by (dt 
string);
+
+insert overwrite table partition_schema1 partition(dt='100') select * from 
src1;
+desc partition_schema1 partition(dt='100');
+
+alter table partition_schema1 add columns (x string);
+
+desc partition_schema1;
+desc partition_schema1 partition (dt='100');
+
+

Modified: hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out?rev=1211767&r1=1211766&r2=1211767&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out 
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/alter_table_serde.q.out Thu 
Dec  8 06:41:02 2011
@@ -13,7 +13,7 @@ id    int     
 query  string  
 name   string  
                 
-Detailed Table Information     Table(tableName:test_table, dbName:default, 
owner:xiaol, createTime:1317799666, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), partitionKeys:[], 
parameters:{transient_lastDdlTime=1317799666}, viewOriginalText:null, 
viewExpandedText:null, tableType:MANAGED_TABLE)     
+Detailed Table Information     Table(tableName:test_table, dbName:default, 
owner:njain, createTime:1322718206, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table, 
inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), partitionKeys:[], 
parameters:{transient_lastDdlTime=1322718206}, viewOriginalText:null, 
viewExpandedText:null, tableType:MANAGED_TABLE)   
 PREHOOK: query: alter table test_table set serde 
'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
 PREHOOK: type: ALTERTABLE_SERIALIZER
 PREHOOK: Input: default@test_table
@@ -30,7 +30,7 @@ id    int     from deserializer
 query  string  from deserializer
 name   string  from deserializer
                 
-Detailed Table Information     Table(tableName:test_table, dbName:default, 
owner:xiaol, createTime:1317799666, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:from 
deserializer), FieldSchema(name:query, type:string, comment:from deserializer), 
FieldSchema(name:name, type:string, comment:from deserializer)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), partitionKeys:[], parameters:{last_modified_by=xiaol, 
last_modified_time=1317799666, transient_lastDdlTime=1317799666}, 
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)     
+Detailed Table Information     Table(tableName:test_table, dbName:default, 
owner:njain, createTime:1322718206, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:from 
deserializer), FieldSchema(name:query, type:string, comment:from deserializer), 
FieldSchema(name:name, type:string, comment:from deserializer)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table, 
inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), partitionKeys:[], parameters:{last_modified_by=njain, 
last_modified_time=1322718206, transient_lastDdlTime=1322718206}, 
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)   
 PREHOOK: query: alter table test_table set serdeproperties ('field.delim' = 
',')
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@test_table
@@ -47,7 +47,7 @@ id    int     from deserializer
 query  string  from deserializer
 name   string  from deserializer
                 
-Detailed Table Information     Table(tableName:test_table, dbName:default, 
owner:xiaol, createTime:1317799666, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:from 
deserializer), FieldSchema(name:query, type:string, comment:from deserializer), 
FieldSchema(name:name, type:string, comment:from deserializer)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1, field.delim=,}), bucketCols:[], 
sortCols:[], parameters:{}), partitionKeys:[], 
parameters:{last_modified_by=xiaol, last_modified_time=1317799666, 
transient_lastDdlTime=1317799666}, viewOriginalText:null, 
viewExpandedText:null, tableType:MANAGED_TABLE)
        
+Detailed Table Information     Table(tableName:test_table, dbName:default, 
owner:njain, createTime:1322718206, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:from 
deserializer), FieldSchema(name:query, type:string, comment:from deserializer), 
FieldSchema(name:name, type:string, comment:from deserializer)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table, 
inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1, field.delim=,}), bucketCols:[], 
sortCols:[], parameters:{}), partitionKeys:[], 
parameters:{last_modified_by=njain, last_modified_time=1322718206, 
transient_lastDdlTime=1322718206}, viewOriginalText:null, 
viewExpandedText:null, tableType:MANAGED_TABLE)    
 PREHOOK: query: drop table test_table
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@test_table
@@ -79,7 +79,7 @@ query string  
 name   string  
 dt     string  
                 
-Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1317799667, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{transient_lastDdlTime=1317799667})     
+Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1322718208, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null), FieldSchema(name:dt, type:string, comment:null)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{transient_lastDdlTime=1322718208})  
 PREHOOK: query: alter table test_table set serde 
'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
 PREHOOK: type: ALTERTABLE_SERIALIZER
 PREHOOK: Input: default@test_table
@@ -92,12 +92,12 @@ PREHOOK: query: describe extended test_t
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended test_table partition (dt='2011')
 POSTHOOK: type: DESCTABLE
-id     int     from deserializer
-query  string  from deserializer
-name   string  from deserializer
+id     int     
+query  string  
+name   string  
 dt     string  
                 
-Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1317799667, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{transient_lastDdlTime=1317799667})     
+Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1322718208, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null), FieldSchema(name:dt, type:string, comment:null)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{transient_lastDdlTime=1322718208})  
 PREHOOK: query: alter table test_table set serdeproperties ('field.delim' = 
',')
 PREHOOK: type: ALTERTABLE_SERDEPROPERTIES
 PREHOOK: Input: default@test_table
@@ -110,12 +110,12 @@ PREHOOK: query: describe extended test_t
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended test_table partition (dt='2011')
 POSTHOOK: type: DESCTABLE
-id     int     from deserializer
-query  string  from deserializer
-name   string  from deserializer
+id     int     
+query  string  
+name   string  
 dt     string  
                 
-Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1317799667, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{transient_lastDdlTime=1317799667})     
+Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1322718208, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null), FieldSchema(name:dt, type:string, comment:null)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{transient_lastDdlTime=1322718208})  
 PREHOOK: query: -- test partitions
 
 alter table test_table partition(dt='2011') set serde 
'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
@@ -133,12 +133,12 @@ PREHOOK: query: describe extended test_t
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended test_table partition (dt='2011')
 POSTHOOK: type: DESCTABLE
-id     int     from deserializer
-query  string  from deserializer
-name   string  from deserializer
+id     int     
+query  string  
+name   string  
 dt     string  
                 
-Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1317799667, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{last_modified_by=xiaol, 
last_modified_time=1317799667, transient_lastDdlTime=1317799667})    
+Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1322718208, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null), FieldSchema(name:dt, type:string, comment:null)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1}), bucketCols:[], sortCols:[], 
parameters:{}), parameters:{last_modified_by=njain, 
last_modified_time=1322718208, transient_lastDdlTime=1322718208}) 
 PREHOOK: query: alter table test_table partition(dt='2011') set 
serdeproperties ('field.delim' = ',')
 PREHOOK: type: ALTERPARTITION_SERDEPROPERTIES
 PREHOOK: Input: default@test_table
@@ -152,12 +152,12 @@ PREHOOK: query: describe extended test_t
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended test_table partition (dt='2011')
 POSTHOOK: type: DESCTABLE
-id     int     from deserializer
-query  string  from deserializer
-name   string  from deserializer
+id     int     
+query  string  
+name   string  
 dt     string  
                 
-Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1317799667, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null)], 
location:pfile:/Users/xiaol/Tools/hive-trunk/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1, field.delim=,}), bucketCols:[], 
sortCols:[], parameters:{}), parameters:{last_modified_by=xiaol, 
last_modified_time=1317799668, transient_lastDdlTime=1317799668})     
+Detailed Partition Information Partition(values:[2011], dbName:default, 
tableName:test_table, createTime:1322718208, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:id, type:int, comment:null), 
FieldSchema(name:query, type:string, comment:null), FieldSchema(name:name, 
type:string, comment:null), FieldSchema(name:dt, type:string, comment:null)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/test_table/dt=2011,
 inputFormat:org.apache.hadoop.mapred.TextInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat, 
compressed:false, numBuckets:-1, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1, field.delim=,}), bucketCols:[], 
sortCols:[], parameters:{}), parameters:{last_modified_by=njain, 
last_modified_time=1322718209, transient_lastDdlTime=1322718209})  
 PREHOOK: query: drop table test_table
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@test_table

Modified: 
hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out?rev=1211767&r1=1211766&r2=1211767&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out 
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/exim_04_evolved_parts.q.out 
Thu Dec  8 06:41:02 2011
@@ -67,12 +67,12 @@ PREHOOK: query: export table exim_employ
 PREHOOK: type: EXPORT
 PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
 PREHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: Output: 
pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+PREHOOK: Output: 
pfile:/data/users/njain/hive1/build/ql/test/data/exports/exim_employee
 POSTHOOK: query: export table exim_employee to 
'ql/test/data/exports/exim_employee'
 POSTHOOK: type: EXPORT
 POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=ka
 POSTHOOK: Input: default@exim_employee@emp_country=in/emp_state=tn
-POSTHOOK: Output: 
pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/exports/exim_employee
+POSTHOOK: Output: 
pfile:/data/users/njain/hive1/build/ql/test/data/exports/exim_employee
 PREHOOK: query: drop table exim_employee
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@exim_employee
@@ -108,20 +108,19 @@ emp_dept  int     from deserializer
 emp_country    string  2-char code
 emp_state      string  2-char code
                 
-Detailed Table Information     Table(tableName:exim_employee, dbName:importer, 
owner:krishnak, createTime:1297311791, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:from 
deserializer), FieldSchema(name:emp_name, type:string, comment:from 
deserializer), FieldSchema(name:emp_dob, type:string, comment:from 
deserializer), FieldSchema(name:emp_sex, type:string, comment:from 
deserializer), FieldSchema(name:emp_dept, type:int, comment:from 
deserializer)], 
location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee,
 inputFormat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, 
compressed:false, numBuckets:5, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, 
parameters:{serialization.format=2}), bucketCols:[emp_sex, emp_dept], 
sortCols:[Orde
 r(col:emp_id, order:0)], parameters:{}), 
partitionKeys:[FieldSchema(name:emp_country, type:string, comment:2-char code), 
FieldSchema(name:emp_state, type:string, comment:2-char code)], 
parameters:{last_modified_by=krishnak, last_modified_time=1297311789, 
transient_lastDdlTime=1297311791, comment=employee table}, 
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)       
+Detailed Table Information     Table(tableName:exim_employee, dbName:importer, 
owner:njain, createTime:1322718291, lastAccessTime:0, retention:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:from 
deserializer), FieldSchema(name:emp_name, type:string, comment:from 
deserializer), FieldSchema(name:emp_dob, type:string, comment:from 
deserializer), FieldSchema(name:emp_sex, type:string, comment:from 
deserializer), FieldSchema(name:emp_dept, type:int, comment:from 
deserializer)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/importer.db/exim_employee,
 inputFormat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, 
compressed:false, numBuckets:5, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, 
parameters:{serialization.format=2}), bucketCols:[emp_sex, emp_dept], 
sortCols:[Order(col:emp_id, order:0)], paramet
 ers:{}), partitionKeys:[FieldSchema(name:emp_country, type:string, 
comment:2-char code), FieldSchema(name:emp_state, type:string, comment:2-char 
code)], parameters:{last_modified_by=njain, last_modified_time=1322718289, 
transient_lastDdlTime=1322718291, comment=employee table}, 
viewOriginalText:null, viewExpandedText:null, tableType:MANAGED_TABLE)  
 PREHOOK: query: describe extended exim_employee partition (emp_country='in', 
emp_state='tn')
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended exim_employee partition (emp_country='in', 
emp_state='tn')
 POSTHOOK: type: DESCTABLE
-emp_id int     from deserializer
-emp_name       string  from deserializer
-emp_dob        string  from deserializer
-emp_sex        string  from deserializer
-emp_dept       int     from deserializer
+emp_id int     employee id
+emp_name       string  
+emp_dob        string  employee date of birth
+emp_sex        string  M/F
 emp_country    string  2-char code
 emp_state      string  2-char code
                 
-Detailed Partition Information Partition(values:[in, tn], dbName:importer, 
tableName:exim_employee, createTime:1297311791, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee 
id), FieldSchema(name:emp_name, type:string, comment:), 
FieldSchema(name:emp_dob, type:string, comment:employee date of birth), 
FieldSchema(name:emp_sex, type:string, comment:M/F)], 
location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=in/emp_state=tn,
 inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, 
numBuckets:10, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1}), bucketCols:[emp_sex], 
sortCols:[Order(col:emp_id, order:1)], parameters:{}), 
parameters:{transient_lastDdlTime=1297311791})   
+Detailed Partition Information Partition(values:[in, tn], dbName:importer, 
tableName:exim_employee, createTime:1322718291, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:employee 
id), FieldSchema(name:emp_name, type:string, comment:null), 
FieldSchema(name:emp_dob, type:string, comment:employee date of birth), 
FieldSchema(name:emp_sex, type:string, comment:M/F), 
FieldSchema(name:emp_country, type:string, comment:2-char code), 
FieldSchema(name:emp_state, type:string, comment:2-char code)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=in/emp_state=tn,
 inputFormat:org.apache.hadoop.hive.ql.io.RCFileInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.RCFileOutputFormat, compressed:false, 
numBuckets:10, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe, 
parameters:{serialization.format=1}), bucketCols:[emp_sex], sortCols:[Order
 (col:emp_id, order:1)], parameters:{}), 
parameters:{transient_lastDdlTime=1322718291}) 
 PREHOOK: query: describe extended exim_employee partition (emp_country='in', 
emp_state='ka')
 PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe extended exim_employee partition (emp_country='in', 
emp_state='ka')
@@ -134,14 +133,14 @@ emp_dept  int     from deserializer
 emp_country    string  2-char code
 emp_state      string  2-char code
                 
-Detailed Partition Information Partition(values:[in, ka], dbName:importer, 
tableName:exim_employee, createTime:1297311791, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:from 
deserializer), FieldSchema(name:emp_name, type:string, comment:from 
deserializer), FieldSchema(name:emp_dob, type:string, comment:from 
deserializer), FieldSchema(name:emp_sex, type:string, comment:from 
deserializer), FieldSchema(name:emp_dept, type:int, comment:from 
deserializer)], 
location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=in/emp_state=ka,
 inputFormat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, 
compressed:false, numBuckets:5, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe, 
parameters:{serialization.format=2}), bucketCols:[emp_sex, e
 mp_dept], sortCols:[Order(col:emp_id, order:0)], parameters:{}), 
parameters:{transient_lastDdlTime=1297311792})        
+Detailed Partition Information Partition(values:[in, ka], dbName:importer, 
tableName:exim_employee, createTime:1322718291, lastAccessTime:0, 
sd:StorageDescriptor(cols:[FieldSchema(name:emp_id, type:int, comment:from 
deserializer), FieldSchema(name:emp_name, type:string, comment:from 
deserializer), FieldSchema(name:emp_dob, type:string, comment:from 
deserializer), FieldSchema(name:emp_sex, type:string, comment:from 
deserializer), FieldSchema(name:emp_dept, type:int, comment:from deserializer), 
FieldSchema(name:emp_country, type:string, comment:2-char code), 
FieldSchema(name:emp_state, type:string, comment:2-char code)], 
location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/importer.db/exim_employee/emp_country=in/emp_state=ka,
 inputFormat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat, 
outputFormat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat, 
compressed:false, numBuckets:5, serdeInfo:SerDeInfo(name:null, 
serializationLib:org.apache.hadoop.h
 ive.serde2.lazybinary.LazyBinarySerDe, parameters:{serialization.format=2}), 
bucketCols:[emp_sex, emp_dept], sortCols:[Order(col:emp_id, order:0)], 
parameters:{}), parameters:{transient_lastDdlTime=1322718291})     
 PREHOOK: query: show table extended like exim_employee
 PREHOOK: type: SHOW_TABLESTATUS
 POSTHOOK: query: show table extended like exim_employee
 POSTHOOK: type: SHOW_TABLESTATUS
 tableName:exim_employee
-owner:krishnak
-location:pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/importer.db/exim_employee
+owner:njain
+location:pfile:/data/users/njain/hive1/build/ql/test/data/warehouse/importer.db/exim_employee
 inputformat:org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat
 outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
 columns:struct columns { i32 emp_id, string emp_name, string emp_dob, string 
emp_sex, i32 emp_dept}
@@ -152,18 +151,18 @@ totalFileSize:0
 maxFileSize:0
 minFileSize:0
 lastAccessTime:0
-lastUpdateTime:1297311791000
+lastUpdateTime:1322718291000
 
 PREHOOK: query: select * from exim_employee
 PREHOOK: type: QUERY
 PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
 PREHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
-PREHOOK: Output: 
file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-13_239_2893413516045547407/-mr-10000
+PREHOOK: Output: 
file:/tmp/njain/hive_2011-11-30_21-44-52_709_1135127561865027481/-mr-10000
 POSTHOOK: query: select * from exim_employee
 POSTHOOK: type: QUERY
 POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=ka
 POSTHOOK: Input: importer@exim_employee@emp_country=in/emp_state=tn
-POSTHOOK: Output: 
file:/var/folders/67/67R3POPtF90VG63KSmCbcU++F0U/-Tmp-/krishnak/hive_2011-02-09_20-23-13_239_2893413516045547407/-mr-10000
+POSTHOOK: Output: 
file:/tmp/njain/hive_2011-11-30_21-44-52_709_1135127561865027481/-mr-10000
 PREHOOK: query: drop table exim_employee
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: importer@exim_employee

Added: hive/trunk/ql/src/test/results/clientpositive/partition_schema1.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/partition_schema1.q.out?rev=1211767&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/partition_schema1.q.out 
(added)
+++ hive/trunk/ql/src/test/results/clientpositive/partition_schema1.q.out Thu 
Dec  8 06:41:02 2011
@@ -0,0 +1,53 @@
+PREHOOK: query: create table partition_schema1(key string, value string) 
partitioned by (dt string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table partition_schema1(key string, value string) 
partitioned by (dt string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@partition_schema1
+PREHOOK: query: insert overwrite table partition_schema1 partition(dt='100') 
select * from src1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src1
+PREHOOK: Output: default@partition_schema1@dt=100
+POSTHOOK: query: insert overwrite table partition_schema1 partition(dt='100') 
select * from src1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src1
+POSTHOOK: Output: default@partition_schema1@dt=100
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc partition_schema1 partition(dt='100')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc partition_schema1 partition(dt='100')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+key    string  
+value  string  
+dt     string  
+PREHOOK: query: alter table partition_schema1 add columns (x string)
+PREHOOK: type: ALTERTABLE_ADDCOLS
+PREHOOK: Input: default@partition_schema1
+PREHOOK: Output: default@partition_schema1
+POSTHOOK: query: alter table partition_schema1 add columns (x string)
+POSTHOOK: type: ALTERTABLE_ADDCOLS
+POSTHOOK: Input: default@partition_schema1
+POSTHOOK: Output: default@partition_schema1
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: desc partition_schema1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc partition_schema1
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+key    string  
+value  string  
+x      string  
+dt     string  
+PREHOOK: query: desc partition_schema1 partition (dt='100')
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: desc partition_schema1 partition (dt='100')
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).key SIMPLE 
[(src1)src1.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: partition_schema1 PARTITION(dt=100).value SIMPLE 
[(src1)src1.FieldSchema(name:value, type:string, comment:default), ]
+key    string  
+value  string  
+dt     string  


Reply via email to