http://git-wip-us.apache.org/repos/asf/hive/blob/091fd962/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out 
b/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
deleted file mode 100644
index addef62..0000000
--- a/ql/src/test/results/clientpositive/llap/insert_into_default_keyword.q.out
+++ /dev/null
@@ -1,3078 +0,0 @@
-PREHOOK: query: DROP TABLE insert_into1
-PREHOOK: type: DROPTABLE
-POSTHOOK: query: DROP TABLE insert_into1
-POSTHOOK: type: DROPTABLE
-PREHOOK: query: CREATE TABLE insert_into1 (key int, value string)
-    clustered by (key) into 2 buckets stored as orc TBLPROPERTIES 
('transactional'='true')
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: CREATE TABLE insert_into1 (key int, value string)
-    clustered by (key) into 2 buckets stored as orc TBLPROPERTIES 
('transactional'='true')
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(default, DEFAULT)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(default, 
DEFAULT)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(null,null)) (type: 
array<struct<col1:void,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: null (type: void)
-                        outputColumnNames: _col0
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: UDFToInteger(_col0) 
(type: int)
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: UDFToInteger(VALUE._col0) (type: int), 
UDFToString(VALUE._col0) (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1 values(default, DEFAULT)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1 values(default, DEFAULT)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key EXPRESSION []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: SELECT * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-NULL   NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(234,null)) (type: 
array<struct<col1:int,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: void)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: SELECT * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-234    NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: explain insert into insert_into1 values(default, 3),(2,default)
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert into insert_into1 values(default, 
3),(2,default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(null,3),const 
struct(2,null)) (type: array<struct<col1:int,col2:int>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 56 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 56 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), col2 (type: int)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: insert into insert_into1 values(default, 3),(2,default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: insert into insert_into1 values(default, 3),(2,default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value SCRIPT []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-2      NULL
-NULL   3
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key) values(default)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key) values(default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(null)) (type: 
array<struct<col1:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: null (type: void), null (type: string)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 88 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: UDFToInteger(_col0) 
(type: int)
-                          Statistics: Num rows: 1 Data size: 88 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: void), _col1 (type: 
string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: UDFToInteger(VALUE._col0) (type: int), 
VALUE._col1 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1(key) values(default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1(key) values(default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key EXPRESSION []
-POSTHOOK: Lineage: insert_into1.value SIMPLE []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-NULL   NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key, value) 
values(2,default)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key, value) 
values(2,default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(2,null)) (type: 
array<struct<col1:int,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: void)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1(key, value) values(2,default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1(key, value) values(2,default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-2      NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: DROP TABLE insert_into1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into1
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: DROP TABLE insert_into1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into1
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: CREATE TABLE insert_into1 (key int DEFAULT 1, value string)
-    clustered by (key) into 2 buckets stored as orc TBLPROPERTIES 
('transactional'='true')
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: CREATE TABLE insert_into1 (key int DEFAULT 1, value string)
-    clustered by (key) into 2 buckets stored as orc TBLPROPERTIES 
('transactional'='true')
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(default, DEFAULT)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(default, 
DEFAULT)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(1,null)) (type: 
array<struct<col1:int,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: void)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1 values(default, DEFAULT)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1 values(default, DEFAULT)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: SELECT * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-1      NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(234,null)) (type: 
array<struct<col1:int,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: void)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1 values(234, dEfAULt)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: SELECT * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-234    NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: explain insert into insert_into1 values(default, 3),(2,default)
-PREHOOK: type: QUERY
-POSTHOOK: query: explain insert into insert_into1 values(default, 
3),(2,default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(1,3),const struct(2,null)) 
(type: array<struct<col1:int,col2:int>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 56 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 56 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), col2 (type: int)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: insert into insert_into1 values(default, 3),(2,default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: insert into insert_into1 values(default, 3),(2,default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value SCRIPT []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-1      3
-2      NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key) values(default)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key) values(default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(1)) (type: 
array<struct<col1:int>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: string)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 84 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 84 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
string)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), VALUE._col1 (type: 
string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1(key) values(default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1(key) values(default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value SIMPLE []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-1      NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key, value) 
values(2,default)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key, value) 
values(2,default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(2,null)) (type: 
array<struct<col1:int,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: void)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1(key, value) values(2,default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1(key, value) values(2,default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-2      NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(value, key) 
values(2,default)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(value, key) 
values(2,default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(2,1)) (type: 
array<struct<col1:int,col2:int>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 48 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col2 (type: int), col1 (type: int)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 8 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
int)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1(value, key) values(2,default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1(value, key) values(2,default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value SCRIPT []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-1      2
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key, value) 
values(2,default),(DEFAULT, default)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN INSERT INTO TABLE insert_into1(key, value) 
values(2,default),(DEFAULT, default)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
-        Reducer 3 <- Reducer 2 (CUSTOM_SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: _dummy_table
-                  Row Limit Per Split: 1
-                  Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Select Operator
-                    expressions: array(const struct(2,null),const 
struct(1,null)) (type: array<struct<col1:int,col2:void>>)
-                    outputColumnNames: _col0
-                    Statistics: Num rows: 1 Data size: 56 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    UDTF Operator
-                      Statistics: Num rows: 1 Data size: 56 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      function name: inline
-                      Select Operator
-                        expressions: col1 (type: int), null (type: void)
-                        outputColumnNames: _col0, _col1
-                        Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                        Reduce Output Operator
-                          sort order: 
-                          Map-reduce partition columns: _col0 (type: int)
-                          Statistics: Num rows: 1 Data size: 4 Basic stats: 
COMPLETE Column stats: COMPLETE
-                          value expressions: _col0 (type: int), _col1 (type: 
void)
-            Execution mode: llap
-            LLAP IO: no inputs
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: VALUE._col0 (type: int), UDFToString(VALUE._col1) 
(type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: INSERT
-                Select Operator
-                  expressions: _col0 (type: int), _col1 (type: string)
-                  outputColumnNames: key, value
-                  Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  Group By Operator
-                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
-                    mode: hash
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                    Reduce Output Operator
-                      sort order: 
-                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
-                      value expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
-        Reducer 3 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Group By Operator
-                aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
-                mode: mergepartial
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: INSERT
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-      Column Stats Desc:
-          Columns: key, value
-          Column Types: int, string
-          Table: default.insert_into1
-
-PREHOOK: query: INSERT INTO TABLE insert_into1(key, value) 
values(2,default),(DEFAULT, default)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO TABLE insert_into1(key, value) 
values(2,default),(DEFAULT, default)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value EXPRESSION []
-PREHOOK: query: select * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-1      NULL
-2      NULL
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: DROP TABLE insert_into1
-PREHOOK: type: DROPTABLE
-PREHOOK: Input: default@insert_into1
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: DROP TABLE insert_into1
-POSTHOOK: type: DROPTABLE
-POSTHOOK: Input: default@insert_into1
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: CREATE TABLE insert_into1 (key int DEFAULT 1, value string, i 
int)
-    clustered by (i) into 2 buckets stored as orc TBLPROPERTIES 
('transactional'='true')
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: CREATE TABLE insert_into1 (key int DEFAULT 1, value string, i 
int)
-    clustered by (i) into 2 buckets stored as orc TBLPROPERTIES 
('transactional'='true')
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: INSERT INTO insert_into1 values(2,1, 45)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO insert_into1 values(2,1, 45)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.i SCRIPT []
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value SCRIPT []
-PREHOOK: query: EXPLAIN UPDATE insert_into1 set key = DEFAULT where value=1
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN UPDATE insert_into1 set key = DEFAULT where value=1
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into1
-                  Statistics: Num rows: 25 Data size: 4700 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (value = 1) (type: boolean)
-                    Statistics: Num rows: 5 Data size: 940 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: ROW__ID (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>), value (type: string), i 
(type: int)
-                      outputColumnNames: _col0, _col2, _col3
-                      Statistics: Num rows: 5 Data size: 940 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        sort order: +
-                        Map-reduce partition columns: UDFToInteger(_col0) 
(type: int)
-                        Statistics: Num rows: 5 Data size: 940 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col2 (type: string), _col3 (type: 
int)
-            Execution mode: llap
-            LLAP IO: may be used (ACID table)
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>), 1 (type: int), VALUE._col0 
(type: string), VALUE._col1 (type: int)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 5 Data size: 940 Basic stats: COMPLETE 
Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 5 Data size: 940 Basic stats: COMPLETE 
Column stats: NONE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: UPDATE
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: UPDATE
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-
-PREHOOK: query: UPDATE insert_into1 set key = DEFAULT where value=1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: UPDATE insert_into1 set key = DEFAULT where value=1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: SELECT * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query: SELECT * from insert_into1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-1      1       45
-PREHOOK: query: TRUNCATE table insert_into1
-PREHOOK: type: TRUNCATETABLE
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: TRUNCATE table insert_into1
-POSTHOOK: type: TRUNCATETABLE
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: INSERT INTO insert_into1 values(2,1, 45)
-PREHOOK: type: QUERY
-PREHOOK: Input: _dummy_database@_dummy_table
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: INSERT INTO insert_into1 values(2,1, 45)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: _dummy_database@_dummy_table
-POSTHOOK: Output: default@insert_into1
-POSTHOOK: Lineage: insert_into1.i SCRIPT []
-POSTHOOK: Lineage: insert_into1.key SCRIPT []
-POSTHOOK: Lineage: insert_into1.value SCRIPT []
-PREHOOK: query: EXPLAIN UPDATE insert_into1 set key = DEFAULT, value=DEFAULT 
where value=1
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN UPDATE insert_into1 set key = DEFAULT, value=DEFAULT 
where value=1
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-1 is a root stage
-  Stage-2 depends on stages: Stage-1
-  Stage-0 depends on stages: Stage-2
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-1
-    Tez
-#### A masked pattern was here ####
-      Edges:
-        Reducer 2 <- Map 1 (SIMPLE_EDGE)
-#### A masked pattern was here ####
-      Vertices:
-        Map 1 
-            Map Operator Tree:
-                TableScan
-                  alias: insert_into1
-                  Statistics: Num rows: 25 Data size: 4700 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
-                    predicate: (value = 1) (type: boolean)
-                    Statistics: Num rows: 5 Data size: 940 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: ROW__ID (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>), i (type: int)
-                      outputColumnNames: _col0, _col3
-                      Statistics: Num rows: 5 Data size: 940 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>)
-                        sort order: +
-                        Map-reduce partition columns: UDFToInteger(_col0) 
(type: int)
-                        Statistics: Num rows: 5 Data size: 940 Basic stats: 
COMPLETE Column stats: NONE
-                        value expressions: _col3 (type: int)
-            Execution mode: llap
-            LLAP IO: may be used (ACID table)
-        Reducer 2 
-            Execution mode: llap
-            Reduce Operator Tree:
-              Select Operator
-                expressions: KEY.reducesinkkey0 (type: 
struct<writeid:bigint,bucketid:int,rowid:bigint>), 1 (type: int), null (type: 
string), VALUE._col0 (type: int)
-                outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 5 Data size: 940 Basic stats: COMPLETE 
Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 5 Data size: 940 Basic stats: COMPLETE 
Column stats: NONE
-                  table:
-                      input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-                      serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-                      name: default.insert_into1
-                  Write Type: UPDATE
-
-  Stage: Stage-2
-    Dependency Collection
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: false
-          table:
-              input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
-              output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
-              serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-              name: default.insert_into1
-          Write Type: UPDATE
-
-  Stage: Stage-3
-    Stats Work
-      Basic Stats Work:
-
-PREHOOK: query: UPDATE insert_into1 set key = DEFAULT, value=DEFAULT where 
value=1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-PREHOOK: Output: default@insert_into1
-POSTHOOK: query: UPDATE insert_into1 set key = DEFAULT, value=DEFAULT where 
value=1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@insert_into1
-POSTHOOK: Output: default@insert_into1
-PREHOOK: query: SELECT * from insert_into1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@insert_into1
-#### A masked pattern was here ####
-POSTHOOK: query

<TRUNCATED>

Reply via email to