http://git-wip-us.apache.org/repos/asf/hive/blob/22541610/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
deleted file mode 100644
index 1960d41..0000000
--- a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.8.out
+++ /dev/null
@@ -1,1119 +0,0 @@
-PREHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed
columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
---
hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
---
hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String)
- partitioned by (ds String, hr String)
- skewed by (key, value) on
(('484','val_484'),('51','val_14'),('103','val_103'))
- stored as DIRECTORIES
- STORED AS RCFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- list bucketing DML: dynamic partition. multiple skewed
columns. merge.
--- The following explains merge example used in this test case
--- DML will generated 2 partitions
--- ds=2008-04-08/hr=a1
--- ds=2008-04-08/hr=b1
--- without merge, each partition has more files
--- ds=2008-04-08/hr=a1 has 2 files
--- ds=2008-04-08/hr=b1 has 6 files
--- with merge each partition has more files
--- ds=2008-04-08/hr=a1 has 1 files
--- ds=2008-04-08/hr=b1 has 4 files
--- The following shows file size and name in each directory
---
hr=a1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 155 000000_0
--- 155 000001_0
--- with merge
--- 254 000000_0
--- hr=b1/key=103/value=val_103:
--- without merge
--- 99 000000_0
--- 99 000001_0
--- with merge
--- 142 000001_0
---
hr=b1/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME/HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME:
--- without merge
--- 5181 000000_0
--- 5181 000001_0
--- with merge
--- 5181 000000_0
--- 5181 000001_0
--- hr=b1/key=484/value=val_484
--- without merge
--- 87 000000_0
--- 87 000001_0
--- with merge
--- 118 000002_0
-
--- INCLUDE_HADOOP_MAJOR_VERSIONS(0.23)
--- SORT_QUERY_RESULTS
--- JAVA_VERSION_SPECIFIC_OUTPUT
-
--- create a skewed table
-create table list_bucketing_dynamic_part (key String, value String)
- partitioned by (ds String, hr String)
- skewed by (key, value) on
(('484','val_484'),('51','val_14'),('103','val_103'))
- stored as DIRECTORIES
- STORED AS RCFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@list_bucketing_dynamic_part
-PREHOOK: query: -- list bucketing DML without merge. use bucketize to generate
a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds =
'2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML without merge. use bucketize to
generate a few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds =
'2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- srcpart
- TOK_INSERT
- TOK_DESTINATION
- TOK_TAB
- TOK_TABNAME
- list_bucketing_dynamic_part
- TOK_PARTSPEC
- TOK_PARTVAL
- ds
- '2008-04-08'
- TOK_PARTVAL
- hr
- TOK_SELECT
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- key
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- value
- TOK_SELEXPR
- TOK_FUNCTION
- if
- ==
- %
- TOK_TABLE_OR_COL
- key
- 100
- 0
- 'a1'
- 'b1'
- TOK_WHERE
- =
- TOK_TABLE_OR_COL
- ds
- '2008-04-08'
-
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 depends on stages: Stage-1
- Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: srcpart
- Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE
Column stats: NONE
- GatherStats: false
- Select Operator
- expressions: key (type: string), value (type: string),
if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1000 Data size: 10624 Basic stats:
COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 1
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Static Partition Specification: ds=2008-04-08/
- Statistics: Num rows: 1000 Data size: 10624 Basic stats:
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
- table:
- input format:
org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format:
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct list_bucketing_dynamic_part {
string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
- TotalFiles: 1
- GatherStats: true
- MultiFileSpray: false
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: hr=11
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- ds 2008-04-08
- hr 11
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- numFiles 1
- numRows 500
- partition_columns ds/hr
- partition_columns.types string:string
- rawDataSize 5312
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 5812
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.srcpart
- name: default.srcpart
-#### A masked pattern was here ####
- Partition
- base file name: hr=12
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- ds 2008-04-08
- hr 12
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- numFiles 1
- numRows 500
- partition_columns ds/hr
- partition_columns.types string:string
- rawDataSize 5312
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 5812
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.srcpart
- name: default.srcpart
- Truncated Path -> Alias:
- /srcpart/ds=2008-04-08/hr=11 [srcpart]
- /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
- Stage: Stage-0
- Move Operator
- tables:
- partition:
- ds 2008-04-08
- hr
- replace: true
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
-
- Stage: Stage-2
- Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition
(ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition
(ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=a1).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=a1).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=b1).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=b1).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name data_type comment
-
-key string
-value string
-
-# Partition Information
-# col_name data_type comment
-
-ds string
-hr string
-
-# Detailed Partition Information
-Partition Value: [2008-04-08, a1]
-Database: default
-Table: list_bucketing_dynamic_part
-#### A masked pattern was here ####
-Partition Parameters:
- COLUMN_STATS_ACCURATE true
- numFiles 2
- numRows 16
- rawDataSize 136
- totalSize 310
-#### A masked pattern was here ####
-
-# Storage Information
-SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-Compressed: No
-Num Buckets: -1
-Bucket Columns: []
-Sort Columns: []
-Stored As SubDirectories: Yes
-Skewed Columns: [key, value]
-Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
-Storage Desc Params:
- serialization.format 1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name data_type comment
-
-key string
-value string
-
-# Partition Information
-# col_name data_type comment
-
-ds string
-hr string
-
-# Detailed Partition Information
-Partition Value: [2008-04-08, b1]
-Database: default
-Table: list_bucketing_dynamic_part
-#### A masked pattern was here ####
-Partition Parameters:
- COLUMN_STATS_ACCURATE true
- numFiles 6
- numRows 984
- rawDataSize 9488
- totalSize 10734
-#### A masked pattern was here ####
-
-# Storage Information
-SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-Compressed: No
-Num Buckets: -1
-Bucket Columns: []
-Sort Columns: []
-Stored As SubDirectories: Yes
-Skewed Columns: [key, value]
-Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
-#### A masked pattern was here ####
-Skewed Value to Truncated Path: {[484,
val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484,
[103,
val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}
-Storage Desc Params:
- serialization.format 1
-PREHOOK: query: -- list bucketing DML with merge. use bucketize to generate a
few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds =
'2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-PREHOOK: type: QUERY
-POSTHOOK: query: -- list bucketing DML with merge. use bucketize to generate a
few small files.
-explain extended
-insert overwrite table list_bucketing_dynamic_part partition (ds =
'2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- srcpart
- TOK_INSERT
- TOK_DESTINATION
- TOK_TAB
- TOK_TABNAME
- list_bucketing_dynamic_part
- TOK_PARTSPEC
- TOK_PARTVAL
- ds
- '2008-04-08'
- TOK_PARTVAL
- hr
- TOK_SELECT
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- key
- TOK_SELEXPR
- TOK_TABLE_OR_COL
- value
- TOK_SELEXPR
- TOK_FUNCTION
- if
- ==
- %
- TOK_TABLE_OR_COL
- key
- 100
- 0
- 'a1'
- 'b1'
- TOK_WHERE
- =
- TOK_TABLE_OR_COL
- ds
- '2008-04-08'
-
-
-STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
- Stage-4
- Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
- Stage-2 depends on stages: Stage-0
- Stage-3
- Stage-5
- Stage-6 depends on stages: Stage-5
-
-STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
- TableScan
- alias: srcpart
- Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE
Column stats: NONE
- GatherStats: false
- Select Operator
- expressions: key (type: string), value (type: string),
if(((UDFToDouble(key) % 100.0) = 0.0), 'a1', 'b1') (type: string)
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1000 Data size: 10624 Basic stats:
COMPLETE Column stats: NONE
- File Output Operator
- compressed: false
- GlobalTableId: 1
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Static Partition Specification: ds=2008-04-08/
- Statistics: Num rows: 1000 Data size: 10624 Basic stats:
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
- table:
- input format:
org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format:
org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns hr
- partition_columns.types string
- serialization.ddl struct list_bucketing_dynamic_part {
string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
- TotalFiles: 1
- GatherStats: true
- MultiFileSpray: false
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: hr=11
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- ds 2008-04-08
- hr 11
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- numFiles 1
- numRows 500
- partition_columns ds/hr
- partition_columns.types string:string
- rawDataSize 5312
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 5812
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.srcpart
- name: default.srcpart
-#### A masked pattern was here ####
- Partition
- base file name: hr=12
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- partition values:
- ds 2008-04-08
- hr 12
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- numFiles 1
- numRows 500
- partition_columns ds/hr
- partition_columns.types string:string
- rawDataSize 5312
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- totalSize 5812
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format:
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments 'default','default'
- columns.types string:string
-#### A masked pattern was here ####
- name default.srcpart
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct srcpart { string key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.srcpart
- name: default.srcpart
- Truncated Path -> Alias:
- /srcpart/ds=2008-04-08/hr=11 [srcpart]
- /srcpart/ds=2008-04-08/hr=12 [srcpart]
-
- Stage: Stage-7
- Conditional Operator
-
- Stage: Stage-4
- Move Operator
- files:
- hdfs directory: true
-#### A masked pattern was here ####
-
- Stage: Stage-0
- Move Operator
- tables:
- partition:
- ds 2008-04-08
- hr
- replace: true
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns hr
- partition_columns.types string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
-
- Stage: Stage-2
- Stats-Aggr Operator
-#### A masked pattern was here ####
-
- Stage: Stage-3
- Merge File Operator
- Map Operator Tree:
- RCFile Merge Operator
- merge level: block
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- input format:
org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns hr
- partition_columns.types string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns hr
- partition_columns.types string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
- name: default.list_bucketing_dynamic_part
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- Truncated Path -> Alias:
-#### A masked pattern was here ####
-
- Stage: Stage-5
- Merge File Operator
- Map Operator Tree:
- RCFile Merge Operator
- merge level: block
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- input format:
org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns hr
- partition_columns.types string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns hr
- partition_columns.types string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
- name: default.list_bucketing_dynamic_part
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- Truncated Path -> Alias:
-#### A masked pattern was here ####
-
- Stage: Stage-6
- Move Operator
- files:
- hdfs directory: true
-#### A masked pattern was here ####
-
-PREHOOK: query: insert overwrite table list_bucketing_dynamic_part partition
(ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-PREHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08
-POSTHOOK: query: insert overwrite table list_bucketing_dynamic_part partition
(ds = '2008-04-08', hr)
-select key, value, if(key % 100 == 0, 'a1', 'b1') from srcpart where ds =
'2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Output: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=a1).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=a1).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=b1).key SIMPLE
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
-POSTHOOK: Lineage: list_bucketing_dynamic_part
PARTITION(ds=2008-04-08,hr=b1).value SIMPLE
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
-PREHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-PREHOOK: type: SHOWPARTITIONS
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- check DML result
-show partitions list_bucketing_dynamic_part
-POSTHOOK: type: SHOWPARTITIONS
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-ds=2008-04-08/hr=a1
-ds=2008-04-08/hr=b1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='a1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='a1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name data_type comment
-
-key string
-value string
-
-# Partition Information
-# col_name data_type comment
-
-ds string
-hr string
-
-# Detailed Partition Information
-Partition Value: [2008-04-08, a1]
-Database: default
-Table: list_bucketing_dynamic_part
-#### A masked pattern was here ####
-Partition Parameters:
- COLUMN_STATS_ACCURATE true
- numFiles 1
- numRows 16
- rawDataSize 136
- totalSize 254
-#### A masked pattern was here ####
-
-# Storage Information
-SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-Compressed: No
-Num Buckets: -1
-Bucket Columns: []
-Sort Columns: []
-Stored As SubDirectories: Yes
-Skewed Columns: [key, value]
-Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
-Storage Desc Params:
- serialization.format 1
-PREHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='b1')
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: query: desc formatted list_bucketing_dynamic_part partition
(ds='2008-04-08', hr='b1')
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-# col_name data_type comment
-
-key string
-value string
-
-# Partition Information
-# col_name data_type comment
-
-ds string
-hr string
-
-# Detailed Partition Information
-Partition Value: [2008-04-08, b1]
-Database: default
-Table: list_bucketing_dynamic_part
-#### A masked pattern was here ####
-Partition Parameters:
- COLUMN_STATS_ACCURATE true
- numFiles 4
- numRows 984
- rawDataSize 9488
- totalSize 10622
-#### A masked pattern was here ####
-
-# Storage Information
-SerDe Library: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
-Compressed: No
-Num Buckets: -1
-Bucket Columns: []
-Sort Columns: []
-Stored As SubDirectories: Yes
-Skewed Columns: [key, value]
-Skewed Values: [[484, val_484], [51, val_14], [103, val_103]]
-#### A masked pattern was here ####
-Skewed Value to Truncated Path: {[484,
val_484]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=484/value=val_484,
[103,
val_103]=/list_bucketing_dynamic_part/ds=2008-04-08/hr=b1/key=103/value=val_103}
-Storage Desc Params:
- serialization.format 1
-PREHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select count(1) from srcpart where ds = '2008-04-08'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-1000
-PREHOOK: query: select count(*) from list_bucketing_dynamic_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from list_bucketing_dynamic_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-1000
-PREHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value =
'val_484'
-PREHOOK: type: QUERY
-POSTHOOK: query: explain extended
-select * from list_bucketing_dynamic_part where key = '484' and value =
'val_484'
-POSTHOOK: type: QUERY
-ABSTRACT SYNTAX TREE:
-
-TOK_QUERY
- TOK_FROM
- TOK_TABREF
- TOK_TABNAME
- list_bucketing_dynamic_part
- TOK_INSERT
- TOK_DESTINATION
- TOK_DIR
- TOK_TMP_FILE
- TOK_SELECT
- TOK_SELEXPR
- TOK_ALLCOLREF
- TOK_WHERE
- and
- =
- TOK_TABLE_OR_COL
- key
- '484'
- =
- TOK_TABLE_OR_COL
- value
- 'val_484'
-
-
-STAGE DEPENDENCIES:
- Stage-0 is a root stage
-
-STAGE PLANS:
- Stage: Stage-0
- Fetch Operator
- limit: -1
- Partition Description:
- Partition
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- partition values:
- ds 2008-04-08
- hr a1
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- numFiles 1
- numRows 16
- partition_columns ds/hr
- partition_columns.types string:string
- rawDataSize 136
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- totalSize 254
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
- name: default.list_bucketing_dynamic_part
- Partition
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- partition values:
- ds 2008-04-08
- hr b1
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- numFiles 4
- numRows 984
- partition_columns ds/hr
- partition_columns.types string:string
- rawDataSize 9488
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- totalSize 10622
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-
- input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat
- output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
- properties:
- bucket_count -1
- columns key,value
- columns.comments
- columns.types string:string
-#### A masked pattern was here ####
- name default.list_bucketing_dynamic_part
- partition_columns ds/hr
- partition_columns.types string:string
- serialization.ddl struct list_bucketing_dynamic_part { string
key, string value}
- serialization.format 1
- serialization.lib
org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
- name: default.list_bucketing_dynamic_part
- name: default.list_bucketing_dynamic_part
- Processor Tree:
- TableScan
- alias: list_bucketing_dynamic_part
- Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE
Column stats: NONE
- GatherStats: false
- Filter Operator
- isSamplingPred: false
- predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
- Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE
Column stats: NONE
- Select Operator
- expressions: '484' (type: string), 'val_484' (type: string), ds
(type: string), hr (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE
Column stats: NONE
- ListSink
-
-PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484'
and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-PREHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from list_bucketing_dynamic_part where key = '484'
and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=a1
-POSTHOOK: Input: default@list_bucketing_dynamic_part@ds=2008-04-08/hr=b1
-#### A masked pattern was here ####
-484 val_484 2008-04-08 b1
-484 val_484 2008-04-08 b1
-PREHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484'
and value = 'val_484'
-PREHOOK: type: QUERY
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-POSTHOOK: query: select * from srcpart where ds = '2008-04-08' and key = '484'
and value = 'val_484'
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
-#### A masked pattern was here ####
-484 val_484 2008-04-08 11
-484 val_484 2008-04-08 12
-PREHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@list_bucketing_dynamic_part
-PREHOOK: Output: default@list_bucketing_dynamic_part
-POSTHOOK: query: -- clean up
-drop table list_bucketing_dynamic_part
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@list_bucketing_dynamic_part
-POSTHOOK: Output: default@list_bucketing_dynamic_part