http://git-wip-us.apache.org/repos/asf/hive/blob/6f5c1135/ql/src/test/results/clientpositive/dynpart_sort_optimization.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/dynpart_sort_optimization.q.out 
b/ql/src/test/results/clientpositive/dynpart_sort_optimization.q.out
deleted file mode 100644
index d24ee16..0000000
--- a/ql/src/test/results/clientpositive/dynpart_sort_optimization.q.out
+++ /dev/null
@@ -1,3041 +0,0 @@
-PREHOOK: query: create table over1k(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-       row format delimited
-       fields terminated by '|'
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k
-POSTHOOK: query: create table over1k(
-           t tinyint,
-           si smallint,
-           i int,
-           b bigint,
-           f float,
-           d double,
-           bo boolean,
-           s string,
-           ts timestamp,
-           dec decimal(4,2),
-           bin binary)
-       row format delimited
-       fields terminated by '|'
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k
-PREHOOK: query: load data local inpath '../../data/files/over1k' into table 
over1k
-PREHOOK: type: LOAD
-#### A masked pattern was here ####
-PREHOOK: Output: default@over1k
-POSTHOOK: query: load data local inpath '../../data/files/over1k' into table 
over1k
-POSTHOOK: type: LOAD
-#### A masked pattern was here ####
-POSTHOOK: Output: default@over1k
-PREHOOK: query: create table over1k_part(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (ds string, t tinyint)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k_part
-POSTHOOK: query: create table over1k_part(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (ds string, t tinyint)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k_part
-PREHOOK: query: create table over1k_part_limit like over1k_part
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k_part_limit
-POSTHOOK: query: create table over1k_part_limit like over1k_part
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k_part_limit
-PREHOOK: query: create table over1k_part_buck(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (t tinyint)
-       clustered by (si) into 4 buckets
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k_part_buck
-POSTHOOK: query: create table over1k_part_buck(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (t tinyint)
-       clustered by (si) into 4 buckets
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k_part_buck
-PREHOOK: query: create table over1k_part_buck_sort(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (t tinyint)
-       clustered by (si) 
-       sorted by (f) into 4 buckets
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k_part_buck_sort
-POSTHOOK: query: create table over1k_part_buck_sort(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (t tinyint)
-       clustered by (si) 
-       sorted by (f) into 4 buckets
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k_part_buck_sort
-PREHOOK: query: -- map-only jobs converted to map-reduce job by 
hive.optimize.sort.dynamic.partition optimization
-explain insert overwrite table over1k_part partition(ds="foo", t) select 
si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: -- map-only jobs converted to map-reduce job by 
hive.optimize.sort.dynamic.partition optimization
-explain insert overwrite table over1k_part partition(ds="foo", t) select 
si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint)
-                  sort order: +
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert overwrite table over1k_part_limit 
partition(ds="foo", t) select si,i,b,f,t from over1k where t is null or t=27 
limit 10
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part_limit 
partition(ds="foo", t) select si,i,b,f,t from over1k where t is null or t=27 
limit 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 240 Basic stats: 
COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 10 Data size: 240 Basic stats: 
COMPLETE Column stats: NONE
-                    TopN Hash Memory Usage: 0.1
-                    value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), VALUE._col4 (type: 
tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE Column 
stats: NONE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-            File Output Operator
-              compressed: false
-              table:
-                  input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col4 (type: tinyint)
-              sort order: +
-              Map-reduce partition columns: _col4 (type: tinyint)
-              Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-              value expressions: _col0 (type: smallint), _col1 (type: int), 
_col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE Column 
stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part_limit
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part_limit
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert overwrite table over1k_part_buck partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part_buck partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint), '_bucket_number' 
(type: string)
-                  sort order: ++
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: 
tinyint), KEY.'_bucket_number' (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
'_bucket_number'
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part_buck
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part_buck
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert overwrite table over1k_part_buck_sort 
partition(t) select si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part_buck_sort 
partition(t) select si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint), '_bucket_number' 
(type: string), _col3 (type: float)
-                  sort order: +++
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), KEY._col3 (type: float), KEY._col4 (type: tinyint), 
KEY.'_bucket_number' (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
'_bucket_number'
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part_buck_sort
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part_buck_sort
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: insert overwrite table over1k_part partition(ds="foo", t) 
select si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part@ds=foo
-POSTHOOK: query: insert overwrite table over1k_part partition(ds="foo", t) 
select si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part@ds=foo/t=27
-POSTHOOK: Output: default@over1k_part@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: insert overwrite table over1k_part_limit partition(ds="foo", 
t) select si,i,b,f,t from over1k where t is null or t=27 limit 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part_limit@ds=foo
-POSTHOOK: query: insert overwrite table over1k_part_limit partition(ds="foo", 
t) select si,i,b,f,t from over1k where t is null or t=27 limit 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part_limit@ds=foo/t=27
-POSTHOOK: Output: default@over1k_part_limit@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: insert overwrite table over1k_part_buck partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part_buck
-POSTHOOK: query: insert overwrite table over1k_part_buck partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part_buck@t=27
-POSTHOOK: Output: default@over1k_part_buck@t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).b 
SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).f 
SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).i 
SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).si 
SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: insert overwrite table over1k_part_buck_sort partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part_buck_sort
-POSTHOOK: query: insert overwrite table over1k_part_buck_sort partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part_buck_sort@t=27
-POSTHOOK: Output: default@over1k_part_buck_sort@t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: -- map-reduce jobs modified by 
hive.optimize.sort.dynamic.partition optimization
-explain insert into table over1k_part partition(ds="foo", t) select si,i,b,f,t 
from over1k where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: -- map-reduce jobs modified by 
hive.optimize.sort.dynamic.partition optimization
-explain insert into table over1k_part partition(ds="foo", t) select si,i,b,f,t 
from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint)
-                  sort order: +
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert into table over1k_part_limit 
partition(ds="foo", t) select si,i,b,f,t from over1k where t is null or t=27 
limit 10
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert into table over1k_part_limit 
partition(ds="foo", t) select si,i,b,f,t from over1k where t is null or t=27 
limit 10
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Limit
-                  Number of rows: 10
-                  Statistics: Num rows: 10 Data size: 240 Basic stats: 
COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    sort order: 
-                    Statistics: Num rows: 10 Data size: 240 Basic stats: 
COMPLETE Column stats: NONE
-                    TopN Hash Memory Usage: 0.1
-                    value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint), _col3 (type: float), _col4 (type: tinyint)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), VALUE._col4 (type: 
tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE Column 
stats: NONE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-            File Output Operator
-              compressed: false
-              table:
-                  input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col4 (type: tinyint)
-              sort order: +
-              Map-reduce partition columns: _col4 (type: tinyint)
-              Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-              value expressions: _col0 (type: smallint), _col1 (type: int), 
_col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE Column 
stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part_limit
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part_limit
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert into table over1k_part_buck partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert into table over1k_part_buck partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint), '_bucket_number' 
(type: string)
-                  sort order: ++
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: 
tinyint), KEY.'_bucket_number' (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
'_bucket_number'
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part_buck
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            t 
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part_buck
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert into table over1k_part_buck_sort partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert into table over1k_part_buck_sort partition(t) 
select si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint), '_bucket_number' 
(type: string), _col3 (type: float)
-                  sort order: +++
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col1 (type: 
int), _col2 (type: bigint)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), KEY._col3 (type: float), KEY._col4 (type: tinyint), 
KEY.'_bucket_number' (type: string)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
'_bucket_number'
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part_buck_sort
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            t 
-          replace: false
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part_buck_sort
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: insert into table over1k_part partition(ds="foo", t) select 
si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part@ds=foo
-POSTHOOK: query: insert into table over1k_part partition(ds="foo", t) select 
si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part@ds=foo/t=27
-POSTHOOK: Output: default@over1k_part@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part PARTITION(ds=foo,t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: insert into table over1k_part_limit partition(ds="foo", t) 
select si,i,b,f,t from over1k where t is null or t=27 limit 10
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part_limit@ds=foo
-POSTHOOK: query: insert into table over1k_part_limit partition(ds="foo", t) 
select si,i,b,f,t from over1k where t is null or t=27 limit 10
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part_limit@ds=foo/t=27
-POSTHOOK: Output: default@over1k_part_limit@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit PARTITION(ds=foo,t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_limit 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: insert into table over1k_part_buck partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part_buck
-POSTHOOK: query: insert into table over1k_part_buck partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part_buck@t=27
-POSTHOOK: Output: default@over1k_part_buck@t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).b 
SIMPLE [(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).f 
SIMPLE [(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).i 
SIMPLE [(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck PARTITION(t=__HIVE_DEFAULT_PARTITION__).si 
SIMPLE [(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: insert into table over1k_part_buck_sort partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part_buck_sort
-POSTHOOK: query: insert into table over1k_part_buck_sort partition(t) select 
si,i,b,f,t from over1k where t is null or t=27
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part_buck_sort@t=27
-POSTHOOK: Output: default@over1k_part_buck_sort@t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort PARTITION(t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part_buck_sort 
PARTITION(t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: desc formatted over1k_part partition(ds="foo",t=27)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part
-POSTHOOK: query: desc formatted over1k_part partition(ds="foo",t=27)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [foo, 27]                
-Database:              default                  
-Table:                 over1k_part              
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                2                   
-       numRows                 32                  
-       rawDataSize             830                 
-       totalSize               862                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part 
partition(ds="foo",t="__HIVE_DEFAULT_PARTITION__")
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part
-POSTHOOK: query: desc formatted over1k_part 
partition(ds="foo",t="__HIVE_DEFAULT_PARTITION__")
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [foo, __HIVE_DEFAULT_PARTITION__]        
-Database:              default                  
-Table:                 over1k_part              
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                2                   
-       numRows                 6                   
-       rawDataSize             156                 
-       totalSize               162                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part_limit partition(ds="foo",t=27)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part_limit
-POSTHOOK: query: desc formatted over1k_part_limit partition(ds="foo",t=27)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part_limit
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [foo, 27]                
-Database:              default                  
-Table:                 over1k_part_limit        
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                2                   
-       numRows                 14                  
-       rawDataSize             362                 
-       totalSize               376                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part_limit 
partition(ds="foo",t="__HIVE_DEFAULT_PARTITION__")
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part_limit
-POSTHOOK: query: desc formatted over1k_part_limit 
partition(ds="foo",t="__HIVE_DEFAULT_PARTITION__")
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part_limit
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [foo, __HIVE_DEFAULT_PARTITION__]        
-Database:              default                  
-Table:                 over1k_part_limit        
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                2                   
-       numRows                 6                   
-       rawDataSize             156                 
-       totalSize               162                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part_buck partition(t=27)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part_buck
-POSTHOOK: query: desc formatted over1k_part_buck partition(t=27)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part_buck
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [27]                     
-Database:              default                  
-Table:                 over1k_part_buck         
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                8                   
-       numRows                 32                  
-       rawDataSize             830                 
-       totalSize               862                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           4                        
-Bucket Columns:        [si]                     
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part_buck 
partition(t="__HIVE_DEFAULT_PARTITION__")
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part_buck
-POSTHOOK: query: desc formatted over1k_part_buck 
partition(t="__HIVE_DEFAULT_PARTITION__")
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part_buck
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [__HIVE_DEFAULT_PARTITION__]     
-Database:              default                  
-Table:                 over1k_part_buck         
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                8                   
-       numRows                 6                   
-       rawDataSize             156                 
-       totalSize               162                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           4                        
-Bucket Columns:        [si]                     
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part_buck_sort partition(t=27)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part_buck_sort
-POSTHOOK: query: desc formatted over1k_part_buck_sort partition(t=27)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part_buck_sort
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [27]                     
-Database:              default                  
-Table:                 over1k_part_buck_sort    
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                8                   
-       numRows                 32                  
-       rawDataSize             830                 
-       totalSize               862                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           4                        
-Bucket Columns:        [si]                     
-Sort Columns:          [Order(col:f, order:1)]  
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part_buck_sort 
partition(t="__HIVE_DEFAULT_PARTITION__")
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part_buck_sort
-POSTHOOK: query: desc formatted over1k_part_buck_sort 
partition(t="__HIVE_DEFAULT_PARTITION__")
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part_buck_sort
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [__HIVE_DEFAULT_PARTITION__]     
-Database:              default                  
-Table:                 over1k_part_buck_sort    
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                8                   
-       numRows                 6                   
-       rawDataSize             156                 
-       totalSize               162                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           4                        
-Bucket Columns:        [si]                     
-Sort Columns:          [Order(col:f, order:1)]  
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: select count(*) from over1k_part
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k_part
-PREHOOK: Input: default@over1k_part@ds=foo/t=27
-PREHOOK: Input: default@over1k_part@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from over1k_part
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k_part
-POSTHOOK: Input: default@over1k_part@ds=foo/t=27
-POSTHOOK: Input: default@over1k_part@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-38
-PREHOOK: query: select count(*) from over1k_part_limit
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k_part_limit
-PREHOOK: Input: default@over1k_part_limit@ds=foo/t=27
-PREHOOK: Input: default@over1k_part_limit@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from over1k_part_limit
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k_part_limit
-POSTHOOK: Input: default@over1k_part_limit@ds=foo/t=27
-POSTHOOK: Input: default@over1k_part_limit@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-20
-PREHOOK: query: select count(*) from over1k_part_buck
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k_part_buck
-PREHOOK: Input: default@over1k_part_buck@t=27
-PREHOOK: Input: default@over1k_part_buck@t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from over1k_part_buck
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k_part_buck
-POSTHOOK: Input: default@over1k_part_buck@t=27
-POSTHOOK: Input: default@over1k_part_buck@t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-38
-PREHOOK: query: select count(*) from over1k_part_buck_sort
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k_part_buck_sort
-PREHOOK: Input: default@over1k_part_buck_sort@t=27
-PREHOOK: Input: default@over1k_part_buck_sort@t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from over1k_part_buck_sort
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k_part_buck_sort
-POSTHOOK: Input: default@over1k_part_buck_sort@t=27
-POSTHOOK: Input: default@over1k_part_buck_sort@t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-38
-PREHOOK: query: -- tests for HIVE-6883
-create table over1k_part2(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (ds string, t tinyint)
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@over1k_part2
-POSTHOOK: query: -- tests for HIVE-6883
-create table over1k_part2(
-           si smallint,
-           i int,
-           b bigint,
-           f float)
-       partitioned by (ds string, t tinyint)
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@over1k_part2
-PREHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from over1k where t is null or t=27 
order by i
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from over1k where t is null or t=27 
order by i
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col1 (type: int)
-                  sort order: +
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col2 (type: 
bigint), _col3 (type: float), _col4 (type: tinyint)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), KEY.reducesinkkey0 (type: 
int), VALUE._col1 (type: bigint), VALUE._col2 (type: float), VALUE._col3 (type: 
tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part2
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part2
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from over1k where t is null or t=27 
order by i
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from over1k where t is null or t=27 
order by i
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Select Operator
-                expressions: si (type: smallint), i (type: int), b (type: 
bigint), f (type: float), t (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col4 (type: tinyint), _col1 (type: int)
-                  sort order: ++
-                  Map-reduce partition columns: _col4 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: smallint), _col2 (type: 
bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), KEY._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 4442 Data size: 106611 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part2
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part2
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from (select * from over1k order by i 
limit 10) tmp where t is null or t=27
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from (select * from over1k order by i 
limit 10) tmp where t is null or t=27
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Select Operator
-              expressions: t (type: tinyint), si (type: smallint), i (type: 
int), b (type: bigint), f (type: float)
-              outputColumnNames: _col0, _col1, _col2, _col3, _col4
-              Statistics: Num rows: 4443 Data size: 106636 Basic stats: 
COMPLETE Column stats: NONE
-              Reduce Output Operator
-                key expressions: _col2 (type: int)
-                sort order: +
-                Statistics: Num rows: 4443 Data size: 106636 Basic stats: 
COMPLETE Column stats: NONE
-                TopN Hash Memory Usage: 0.1
-                value expressions: _col0 (type: tinyint), _col1 (type: 
smallint), _col3 (type: bigint), _col4 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: 
smallint), KEY.reducesinkkey0 (type: int), VALUE._col2 (type: bigint), 
VALUE._col3 (type: float)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-          Limit
-            Number of rows: 10
-            Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (_col0 is null or (_col0 = 27)) (type: boolean)
-              Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: _col1 (type: smallint), _col2 (type: int), _col3 
(type: bigint), _col4 (type: float), _col0 (type: tinyint)
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-
-  Stage: Stage-2
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            Reduce Output Operator
-              key expressions: _col4 (type: tinyint)
-              sort order: +
-              Map-reduce partition columns: _col4 (type: tinyint)
-              Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-              value expressions: _col0 (type: smallint), _col1 (type: int), 
_col2 (type: bigint), _col3 (type: float)
-      Reduce Operator Tree:
-        Select Operator
-          expressions: VALUE._col0 (type: smallint), VALUE._col1 (type: int), 
VALUE._col2 (type: bigint), VALUE._col3 (type: float), KEY._col4 (type: tinyint)
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE Column 
stats: NONE
-          File Output Operator
-            compressed: false
-            Statistics: Num rows: 10 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
-            table:
-                input format: org.apache.hadoop.mapred.TextInputFormat
-                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                name: default.over1k_part2
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part2
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-
-PREHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from over1k where t is null or t=27 
group by si,i,b,f,t
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert overwrite table over1k_part2 
partition(ds="foo",t) select si,i,b,f,t from over1k where t is null or t=27 
group by si,i,b,f,t
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Group By Operator
-                keys: t (type: tinyint), si (type: smallint), i (type: int), b 
(type: bigint), f (type: float)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: tinyint), _col1 (type: 
smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float)
-                  sort order: +++++
-                  Map-reduce partition columns: _col0 (type: tinyint), _col1 
(type: smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Group By Operator
-          keys: KEY._col0 (type: tinyint), KEY._col1 (type: smallint), 
KEY._col2 (type: int), KEY._col3 (type: bigint), KEY._col4 (type: float)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 2221 Data size: 53305 Basic stats: COMPLETE 
Column stats: NONE
-          Select Operator
-            expressions: _col1 (type: smallint), _col2 (type: int), _col3 
(type: bigint), _col4 (type: float), _col0 (type: tinyint)
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4
-            Statistics: Num rows: 2221 Data size: 53305 Basic stats: COMPLETE 
Column stats: NONE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 2221 Data size: 53305 Basic stats: 
COMPLETE Column stats: NONE
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.over1k_part2
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part2
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: -- tests for HIVE-8162, only partition column 't' should be in 
last RS operator
-explain insert overwrite table over1k_part2 partition(ds="foo",t) select 
si,i,b,f,t from over1k where t is null or t=27 group by si,i,b,f,t
-PREHOOK: type: QUERY
-POSTHOOK: query: -- tests for HIVE-8162, only partition column 't' should be 
in last RS operator
-explain insert overwrite table over1k_part2 partition(ds="foo",t) select 
si,i,b,f,t from over1k where t is null or t=27 group by si,i,b,f,t
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-0 depends on stages: Stage-1
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: over1k
-            Statistics: Num rows: 4443 Data size: 106636 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (t is null or (t = 27)) (type: boolean)
-              Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-              Group By Operator
-                keys: t (type: tinyint), si (type: smallint), i (type: int), b 
(type: bigint), f (type: float)
-                mode: hash
-                outputColumnNames: _col0, _col1, _col2, _col3, _col4
-                Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col0 (type: tinyint), _col1 (type: 
smallint), _col2 (type: int), _col3 (type: bigint), _col4 (type: float)
-                  sort order: +++++
-                  Map-reduce partition columns: _col0 (type: tinyint)
-                  Statistics: Num rows: 4442 Data size: 106611 Basic stats: 
COMPLETE Column stats: NONE
-      Reduce Operator Tree:
-        Group By Operator
-          keys: KEY._col0 (type: tinyint), KEY._col1 (type: smallint), 
KEY._col2 (type: int), KEY._col3 (type: bigint), KEY._col4 (type: float)
-          mode: mergepartial
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4
-          Statistics: Num rows: 2221 Data size: 53305 Basic stats: COMPLETE 
Column stats: NONE
-          Select Operator
-            expressions: _col1 (type: smallint), _col2 (type: int), _col3 
(type: bigint), _col4 (type: float), _col0 (type: tinyint)
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4
-            Statistics: Num rows: 2221 Data size: 53305 Basic stats: COMPLETE 
Column stats: NONE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 2221 Data size: 53305 Basic stats: 
COMPLETE Column stats: NONE
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                  name: default.over1k_part2
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          partition:
-            ds foo
-            t 
-          replace: true
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.over1k_part2
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-
-PREHOOK: query: insert overwrite table over1k_part2 partition(ds="foo",t) 
select si,i,b,f,t from over1k where t is null or t=27 order by i
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part2@ds=foo
-POSTHOOK: query: insert overwrite table over1k_part2 partition(ds="foo",t) 
select si,i,b,f,t from over1k where t is null or t=27 order by i
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part2@ds=foo/t=27
-POSTHOOK: Output: default@over1k_part2@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: desc formatted over1k_part2 partition(ds="foo",t=27)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part2
-POSTHOOK: query: desc formatted over1k_part2 partition(ds="foo",t=27)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part2
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [foo, 27]                
-Database:              default                  
-Table:                 over1k_part2             
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                1                   
-       numRows                 16                  
-       rawDataSize             415                 
-       totalSize               431                 
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: desc formatted over1k_part2 
partition(ds="foo",t="__HIVE_DEFAULT_PARTITION__")
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part2
-POSTHOOK: query: desc formatted over1k_part2 
partition(ds="foo",t="__HIVE_DEFAULT_PARTITION__")
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part2
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# col_name             data_type               comment             
-                
-ds                     string                                      
-t                      tinyint                                     
-                
-# Detailed Partition Information                
-Partition Value:       [foo, __HIVE_DEFAULT_PARTITION__]        
-Database:              default                  
-Table:                 over1k_part2             
-#### A masked pattern was here ####
-Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
-       numFiles                1                   
-       numRows                 3                   
-       rawDataSize             78                  
-       totalSize               81                  
-#### A masked pattern was here ####
-                
-# Storage Information           
-SerDe Library:         org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe      
 
-InputFormat:           org.apache.hadoop.mapred.TextInputFormat         
-OutputFormat:          
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat       
-Compressed:            No                       
-Num Buckets:           -1                       
-Bucket Columns:        []                       
-Sort Columns:          []                       
-Storage Desc Params:            
-       serialization.format    1                   
-PREHOOK: query: -- SORT_BEFORE_DIFF
-select * from over1k_part2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k_part2
-PREHOOK: Input: default@over1k_part2@ds=foo/t=27
-PREHOOK: Input: default@over1k_part2@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-POSTHOOK: query: -- SORT_BEFORE_DIFF
-select * from over1k_part2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k_part2
-POSTHOOK: Input: default@over1k_part2@ds=foo/t=27
-POSTHOOK: Input: default@over1k_part2@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-405    65536   4294967508      82.24   foo     27
-457    65570   4294967464      81.58   foo     27
-256    65599   4294967383      89.55   foo     27
-335    65617   4294967381      64.87   foo     27
-261    65619   4294967401      88.78   foo     27
-278    65622   4294967516      25.67   foo     27
-482    65624   4294967313      78.98   foo     27
-503    65628   4294967371      95.07   foo     27
-335    65636   4294967505      37.14   foo     27
-367    65675   4294967518      12.32   foo     27
-340    65677   4294967461      98.96   foo     27
-490    65680   4294967347      57.46   foo     27
-287    65708   4294967542      83.33   foo     27
-329    65778   4294967451      6.63    foo     27
-401    65779   4294967402      97.39   foo     27
-262    65787   4294967371      57.35   foo     27
-409    65536   4294967490      46.97   foo     NULL
-374    65560   4294967516      65.43   foo     NULL
-473    65720   4294967324      80.74   foo     NULL
-PREHOOK: query: select count(*) from over1k_part2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k_part2
-PREHOOK: Input: default@over1k_part2@ds=foo/t=27
-PREHOOK: Input: default@over1k_part2@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-POSTHOOK: query: select count(*) from over1k_part2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k_part2
-POSTHOOK: Input: default@over1k_part2@ds=foo/t=27
-POSTHOOK: Input: default@over1k_part2@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-#### A masked pattern was here ####
-19
-PREHOOK: query: insert overwrite table over1k_part2 partition(ds="foo",t) 
select si,i,b,f,t from over1k where t is null or t=27 order by i
-PREHOOK: type: QUERY
-PREHOOK: Input: default@over1k
-PREHOOK: Output: default@over1k_part2@ds=foo
-POSTHOOK: query: insert overwrite table over1k_part2 partition(ds="foo",t) 
select si,i,b,f,t from over1k where t is null or t=27 order by i
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@over1k
-POSTHOOK: Output: default@over1k_part2@ds=foo/t=27
-POSTHOOK: Output: default@over1k_part2@ds=foo/t=__HIVE_DEFAULT_PARTITION__
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 PARTITION(ds=foo,t=27).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).b SIMPLE 
[(over1k)over1k.FieldSchema(name:b, type:bigint, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).f SIMPLE 
[(over1k)over1k.FieldSchema(name:f, type:float, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).i SIMPLE 
[(over1k)over1k.FieldSchema(name:i, type:int, comment:null), ]
-POSTHOOK: Lineage: over1k_part2 
PARTITION(ds=foo,t=__HIVE_DEFAULT_PARTITION__).si SIMPLE 
[(over1k)over1k.FieldSchema(name:si, type:smallint, comment:null), ]
-PREHOOK: query: desc formatted over1k_part2 partition(ds="foo",t=27)
-PREHOOK: type: DESCTABLE
-PREHOOK: Input: default@over1k_part2
-POSTHOOK: query: desc formatted over1k_part2 partition(ds="foo",t=27)
-POSTHOOK: type: DESCTABLE
-POSTHOOK: Input: default@over1k_part2
-# col_name             data_type               comment             
-                
-si                     smallint                                    
-i                      int                                         
-b                      bigint                                      
-f                      float                                       
-                
-# Partition Information                 
-# c

<TRUNCATED>

Reply via email to