http://git-wip-us.apache.org/repos/asf/hive/blob/c9246f44/ql/src/test/results/clientpositive/pcs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/pcs.q.out 
b/ql/src/test/results/clientpositive/pcs.q.out
new file mode 100644
index 0000000..5cf0dff
--- /dev/null
+++ b/ql/src/test/results/clientpositive/pcs.q.out
@@ -0,0 +1,2249 @@
+PREHOOK: query: drop table pcs_t1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table pcs_t1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table pcs_t2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table pcs_t2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table pcs_t1 (key int, value string) partitioned by (ds 
string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@pcs_t1
+POSTHOOK: query: create table pcs_t1 (key int, value string) partitioned by 
(ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@pcs_t1
+PREHOOK: query: insert overwrite table pcs_t1 partition (ds='2000-04-08') 
select * from src where key < 20 order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@pcs_t1@ds=2000-04-08
+POSTHOOK: query: insert overwrite table pcs_t1 partition (ds='2000-04-08') 
select * from src where key < 20 order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Lineage: pcs_t1 PARTITION(ds=2000-04-08).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: pcs_t1 PARTITION(ds=2000-04-08).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table pcs_t1 partition (ds='2000-04-09') 
select * from src where key < 20 order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@pcs_t1@ds=2000-04-09
+POSTHOOK: query: insert overwrite table pcs_t1 partition (ds='2000-04-09') 
select * from src where key < 20 order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@pcs_t1@ds=2000-04-09
+POSTHOOK: Lineage: pcs_t1 PARTITION(ds=2000-04-09).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: pcs_t1 PARTITION(ds=2000-04-09).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: insert overwrite table pcs_t1 partition (ds='2000-04-10') 
select * from src where key < 20 order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@pcs_t1@ds=2000-04-10
+POSTHOOK: query: insert overwrite table pcs_t1 partition (ds='2000-04-10') 
select * from src where key < 20 order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@pcs_t1@ds=2000-04-10
+POSTHOOK: Lineage: pcs_t1 PARTITION(ds=2000-04-10).key EXPRESSION 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: pcs_t1 PARTITION(ds=2000-04-10).value SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: analyze table pcs_t1 partition(ds) compute statistics
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+PREHOOK: Input: default@pcs_t1@ds=2000-04-10
+PREHOOK: Output: default@pcs_t1
+PREHOOK: Output: default@pcs_t1@ds=2000-04-08
+PREHOOK: Output: default@pcs_t1@ds=2000-04-09
+PREHOOK: Output: default@pcs_t1@ds=2000-04-10
+POSTHOOK: query: analyze table pcs_t1 partition(ds) compute statistics
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-10
+POSTHOOK: Output: default@pcs_t1
+POSTHOOK: Output: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Output: default@pcs_t1@ds=2000-04-09
+POSTHOOK: Output: default@pcs_t1@ds=2000-04-10
+PREHOOK: query: analyze table pcs_t1 partition(ds) compute statistics for 
columns
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+PREHOOK: Input: default@pcs_t1@ds=2000-04-10
+#### A masked pattern was here ####
+POSTHOOK: query: analyze table pcs_t1 partition(ds) compute statistics for 
columns
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-10
+#### A masked pattern was here ####
+PREHOOK: query: explain extended select key, value, ds from pcs_t1 where 
(ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) order by key, value, 
ds
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select key, value, ds from pcs_t1 where 
(ds='2000-04-08' and key=1) or (ds='2000-04-09' and key=2) order by key, value, 
ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               key
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               value
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         or
+            and
+               =
+                  TOK_TABLE_OR_COL
+                     ds
+                  '2000-04-08'
+               =
+                  TOK_TABLE_OR_COL
+                     key
+                  1
+            and
+               =
+                  TOK_TABLE_OR_COL
+                     ds
+                  '2000-04-09'
+               =
+                  TOK_TABLE_OR_COL
+                     key
+                  2
+      TOK_ORDERBY
+         TOK_TABSORTCOLNAMEASC
+            TOK_TABLE_OR_COL
+               key
+         TOK_TABSORTCOLNAMEASC
+            TOK_TABLE_OR_COL
+               value
+         TOK_TABSORTCOLNAMEASC
+            TOK_TABLE_OR_COL
+               ds
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: pcs_t1
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: (struct(key,ds)) IN (const struct(1,'2000-04-08'), 
const struct(2,'2000-04-09')) (type: boolean)
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: string), ds (type: 
string)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string)
+                  sort order: +++
+                  Statistics: Num rows: 20 Data size: 160 Basic stats: 
COMPLETE Column stats: NONE
+                  tag: -1
+                  auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-08
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-09
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Truncated Path -> Alias:
+        /pcs_t1/ds=2000-04-08 [pcs_t1]
+        /pcs_t1/ds=2000-04-09 [pcs_t1]
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 
(type: string), KEY.reducesinkkey2 (type: string)
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column 
stats: NONE
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+            Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2
+                  columns.types int:string:string
+                  escape.delim \
+                  hive.serialization.extend.additional.nesting.levels true
+                  serialization.escape.crlf true
+                  serialization.format 1
+                  serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select key, value, ds from pcs_t1 where (ds='2000-04-08' and 
key=1) or (ds='2000-04-09' and key=2) order by key, value, ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+POSTHOOK: query: select key, value, ds from pcs_t1 where (ds='2000-04-08' and 
key=1) or (ds='2000-04-09' and key=2) order by key, value, ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+2      val_2   2000-04-09
+PREHOOK: query: explain extended select ds from pcs_t1 where struct(ds, key) 
in (struct('2000-04-08',1), struct('2000-04-09',2))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where struct(ds, key) 
in (struct('2000-04-08',1), struct('2000-04-09',2))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               TOK_TABLE_OR_COL
+                  ds
+               TOK_TABLE_OR_COL
+                  key
+            TOK_FUNCTION
+               struct
+               '2000-04-08'
+               1
+            TOK_FUNCTION
+               struct
+               '2000-04-09'
+               2
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: pcs_t1
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: (struct(ds,key)) IN (const struct('2000-04-08',1), 
const struct('2000-04-09',2)) (type: boolean)
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: ds (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 20 Data size: 160 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types string
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels 
true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-08
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-09
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Truncated Path -> Alias:
+        /pcs_t1/ds=2000-04-08 [pcs_t1]
+        /pcs_t1/ds=2000-04-09 [pcs_t1]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select ds from pcs_t1 where struct(ds, key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+POSTHOOK: query: select ds from pcs_t1 where struct(ds, key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+2000-04-09
+PREHOOK: query: explain extended select ds from pcs_t1 where struct(ds, key+2) 
in (struct('2000-04-08',3), struct('2000-04-09',4))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where struct(ds, 
key+2) in (struct('2000-04-08',3), struct('2000-04-09',4))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               TOK_TABLE_OR_COL
+                  ds
+               +
+                  TOK_TABLE_OR_COL
+                     key
+                  2
+            TOK_FUNCTION
+               struct
+               '2000-04-08'
+               3
+            TOK_FUNCTION
+               struct
+               '2000-04-09'
+               4
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: pcs_t1
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: (struct(ds,(key + 2))) IN (const 
struct('2000-04-08',3), const struct('2000-04-09',4)) (type: boolean)
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: ds (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 20 Data size: 160 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types string
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels 
true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-08
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-09
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Truncated Path -> Alias:
+        /pcs_t1/ds=2000-04-08 [pcs_t1]
+        /pcs_t1/ds=2000-04-09 [pcs_t1]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select ds from pcs_t1 where struct(ds, key+2) in 
(struct('2000-04-08',3), struct('2000-04-09',4))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+POSTHOOK: query: select ds from pcs_t1 where struct(ds, key+2) in 
(struct('2000-04-08',3), struct('2000-04-09',4))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+2000-04-09
+PREHOOK: query: explain extended select /*+ MAPJOIN(pcs_t1) */ a.ds, b.key 
from pcs_t1 a join pcs_t1 b  on a.ds=b.ds where struct(a.ds, a.key, b.ds) in 
(struct('2000-04-08',1, '2000-04-09'), struct('2000-04-09',2, '2000-04-08'))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select /*+ MAPJOIN(pcs_t1) */ a.ds, b.key 
from pcs_t1 a join pcs_t1 b  on a.ds=b.ds where struct(a.ds, a.key, b.ds) in 
(struct('2000-04-08',1, '2000-04-09'), struct('2000-04-09',2, '2000-04-08'))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_JOIN
+         TOK_TABREF
+            TOK_TABNAME
+               pcs_t1
+            a
+         TOK_TABREF
+            TOK_TABNAME
+               pcs_t1
+            b
+         =
+            .
+               TOK_TABLE_OR_COL
+                  a
+               ds
+            .
+               TOK_TABLE_OR_COL
+                  b
+               ds
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_HINTLIST
+            TOK_HINT
+               TOK_MAPJOIN
+               TOK_HINTARGLIST
+                  pcs_t1
+         TOK_SELEXPR
+            .
+               TOK_TABLE_OR_COL
+                  a
+               ds
+         TOK_SELEXPR
+            .
+               TOK_TABLE_OR_COL
+                  b
+               key
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               .
+                  TOK_TABLE_OR_COL
+                     a
+                  ds
+               .
+                  TOK_TABLE_OR_COL
+                     a
+                  key
+               .
+                  TOK_TABLE_OR_COL
+                     b
+                  ds
+            TOK_FUNCTION
+               struct
+               '2000-04-08'
+               1
+               '2000-04-09'
+            TOK_FUNCTION
+               struct
+               '2000-04-09'
+               2
+               '2000-04-08'
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: a
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Reduce Output Operator
+              key expressions: ds (type: string)
+              sort order: +
+              Map-reduce partition columns: ds (type: string)
+              Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+              tag: 0
+              value expressions: key (type: int)
+              auto parallelism: false
+          TableScan
+            alias: b
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Reduce Output Operator
+              key expressions: ds (type: string)
+              sort order: +
+              Map-reduce partition columns: ds (type: string)
+              Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+              tag: 1
+              value expressions: key (type: int)
+              auto parallelism: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-08
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-09
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Truncated Path -> Alias:
+        /pcs_t1/ds=2000-04-08 [a, b]
+        /pcs_t1/ds=2000-04-09 [a, b]
+      Needs Tagging: true
+      Reduce Operator Tree:
+        Join Operator
+          condition map:
+               Inner Join 0 to 1
+          keys:
+            0 ds (type: string)
+            1 ds (type: string)
+          outputColumnNames: _col0, _col2, _col6, _col8
+          Statistics: Num rows: 44 Data size: 352 Basic stats: COMPLETE Column 
stats: NONE
+          Filter Operator
+            isSamplingPred: false
+            predicate: (struct(_col2,_col0,_col8)) IN (const 
struct('2000-04-08',1,'2000-04-09'), const struct('2000-04-09',2,'2000-04-08')) 
(type: boolean)
+            Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: _col2 (type: string), _col6 (type: int)
+              outputColumnNames: _col0, _col1
+              Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE 
Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
+#### A masked pattern was here ####
+                NumFilesPerFileSink: 1
+                Statistics: Num rows: 22 Data size: 176 Basic stats: COMPLETE 
Column stats: NONE
+#### A masked pattern was here ####
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      columns _col0,_col1
+                      columns.types string:int
+                      escape.delim \
+                      hive.serialization.extend.additional.nesting.levels true
+                      serialization.escape.crlf true
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select /*+ MAPJOIN(pcs_t1) */ a.ds, b.key from pcs_t1 a join 
pcs_t1 b  on a.ds=b.ds where struct(a.ds, a.key, b.ds) in 
(struct('2000-04-08',1, '2000-04-09'), struct('2000-04-09',2, '2000-04-08'))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+POSTHOOK: query: select /*+ MAPJOIN(pcs_t1) */ a.ds, b.key from pcs_t1 a join 
pcs_t1 b  on a.ds=b.ds where struct(a.ds, a.key, b.ds) in 
(struct('2000-04-08',1, '2000-04-09'), struct('2000-04-09',2, '2000-04-08'))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+PREHOOK: query: explain extended select ds from pcs_t1 where struct(ds, 
key+key) in (struct('2000-04-08',1), struct('2000-04-09',2))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where struct(ds, 
key+key) in (struct('2000-04-08',1), struct('2000-04-09',2))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               TOK_TABLE_OR_COL
+                  ds
+               +
+                  TOK_TABLE_OR_COL
+                     key
+                  TOK_TABLE_OR_COL
+                     key
+            TOK_FUNCTION
+               struct
+               '2000-04-08'
+               1
+            TOK_FUNCTION
+               struct
+               '2000-04-09'
+               2
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: pcs_t1
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: (struct(ds,(key + key))) IN (const 
struct('2000-04-08',1), const struct('2000-04-09',2)) (type: boolean)
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: ds (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 20 Data size: 160 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types string
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels 
true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-08
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-09
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Truncated Path -> Alias:
+        /pcs_t1/ds=2000-04-08 [pcs_t1]
+        /pcs_t1/ds=2000-04-09 [pcs_t1]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select ds from pcs_t1 where struct(ds, key+key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+POSTHOOK: query: select ds from pcs_t1 where struct(ds, key+key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+PREHOOK: query: explain select lag(key) over (partition by key) as c1
+from pcs_t1 where struct(ds, key) in (struct('2000-04-08',1), 
struct('2000-04-09',2))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select lag(key) over (partition by key) as c1
+from pcs_t1 where struct(ds, key) in (struct('2000-04-08',1), 
struct('2000-04-09',2))
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: pcs_t1
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            Filter Operator
+              predicate: (struct(ds,key)) IN (const struct('2000-04-08',1), 
const struct('2000-04-09',2)) (type: boolean)
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              Reduce Output Operator
+                key expressions: key (type: int)
+                sort order: +
+                Map-reduce partition columns: key (type: int)
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: int)
+          outputColumnNames: _col0
+          Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column 
stats: NONE
+          PTF Operator
+            Function definitions:
+                Input definition
+                  input alias: ptf_0
+                  output shape: _col0: int
+                  type: WINDOWING
+                Windowing table definition
+                  input alias: ptf_1
+                  name: windowingtablefunction
+                  order by: _col0
+                  partition by: _col0
+                  raw input shape:
+                  window functions:
+                      window function definition
+                        alias: lag_window_0
+                        arguments: _col0
+                        name: lag
+                        window function: GenericUDAFLagEvaluator
+                        window frame: PRECEDING(MAX)~FOLLOWING(MAX)
+                        isPivotResult: true
+            Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: lag_window_0 (type: int)
+              outputColumnNames: _col0
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              File Output Operator
+                compressed: false
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+                table:
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select lag(key) over (partition by key) as c1
+from pcs_t1 where struct(ds, key) in (struct('2000-04-08',1), 
struct('2000-04-09',2))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+POSTHOOK: query: select lag(key) over (partition by key) as c1
+from pcs_t1 where struct(ds, key) in (struct('2000-04-08',1), 
struct('2000-04-09',2))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: EXPLAIN EXTENDED
+SELECT * FROM (
+  SELECT X.* FROM pcs_t1 X WHERE struct(X.ds, X.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+  UNION ALL
+  SELECT Y.* FROM pcs_t1 Y WHERE struct(Y.ds, Y.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+) A
+WHERE A.ds = '2008-04-08'
+SORT BY A.key, A.value, A.ds
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+SELECT * FROM (
+  SELECT X.* FROM pcs_t1 X WHERE struct(X.ds, X.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+  UNION ALL
+  SELECT Y.* FROM pcs_t1 Y WHERE struct(Y.ds, Y.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+) A
+WHERE A.ds = '2008-04-08'
+SORT BY A.key, A.value, A.ds
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_SUBQUERY
+         TOK_UNIONALL
+            TOK_QUERY
+               TOK_FROM
+                  TOK_TABREF
+                     TOK_TABNAME
+                        pcs_t1
+                     X
+               TOK_INSERT
+                  TOK_DESTINATION
+                     TOK_DIR
+                        TOK_TMP_FILE
+                  TOK_SELECT
+                     TOK_SELEXPR
+                        TOK_ALLCOLREF
+                           TOK_TABNAME
+                              X
+                  TOK_WHERE
+                     TOK_FUNCTION
+                        in
+                        TOK_FUNCTION
+                           struct
+                           .
+                              TOK_TABLE_OR_COL
+                                 X
+                              ds
+                           .
+                              TOK_TABLE_OR_COL
+                                 X
+                              key
+                        TOK_FUNCTION
+                           struct
+                           '2000-04-08'
+                           1
+                        TOK_FUNCTION
+                           struct
+                           '2000-04-09'
+                           2
+            TOK_QUERY
+               TOK_FROM
+                  TOK_TABREF
+                     TOK_TABNAME
+                        pcs_t1
+                     Y
+               TOK_INSERT
+                  TOK_DESTINATION
+                     TOK_DIR
+                        TOK_TMP_FILE
+                  TOK_SELECT
+                     TOK_SELEXPR
+                        TOK_ALLCOLREF
+                           TOK_TABNAME
+                              Y
+                  TOK_WHERE
+                     TOK_FUNCTION
+                        in
+                        TOK_FUNCTION
+                           struct
+                           .
+                              TOK_TABLE_OR_COL
+                                 Y
+                              ds
+                           .
+                              TOK_TABLE_OR_COL
+                                 Y
+                              key
+                        TOK_FUNCTION
+                           struct
+                           '2000-04-08'
+                           1
+                        TOK_FUNCTION
+                           struct
+                           '2000-04-09'
+                           2
+         A
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_ALLCOLREF
+      TOK_WHERE
+         =
+            .
+               TOK_TABLE_OR_COL
+                  A
+               ds
+            '2008-04-08'
+      TOK_SORTBY
+         TOK_TABSORTCOLNAMEASC
+            .
+               TOK_TABLE_OR_COL
+                  A
+               key
+         TOK_TABSORTCOLNAMEASC
+            .
+               TOK_TABLE_OR_COL
+                  A
+               value
+         TOK_TABSORTCOLNAMEASC
+            .
+               TOK_TABLE_OR_COL
+                  A
+               ds
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: x
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((struct(ds,key)) IN (const struct('2000-04-08',1), 
const struct('2000-04-09',2)) and (ds = '2008-04-08')) (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                Union
+                  Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: int), _col1 (type: 
string), '2008-04-08' (type: string)
+                      sort order: +++
+                      Statistics: Num rows: 2 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
+                      tag: -1
+                      auto parallelism: false
+          TableScan
+            alias: y
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: ((struct(ds,key)) IN (const struct('2000-04-08',1), 
const struct('2000-04-09',2)) and (ds = '2008-04-08')) (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+              Select Operator
+                expressions: key (type: int), value (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                Union
+                  Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                  Select Operator
+                    expressions: _col0 (type: int), _col1 (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                    Reduce Output Operator
+                      key expressions: _col0 (type: int), _col1 (type: 
string), '2008-04-08' (type: string)
+                      sort order: +++
+                      Statistics: Num rows: 2 Data size: 0 Basic stats: 
PARTIAL Column stats: NONE
+                      tag: -1
+                      auto parallelism: false
+      Needs Tagging: false
+      Reduce Operator Tree:
+        Select Operator
+          expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 
(type: string), '2008-04-08' (type: string)
+          outputColumnNames: _col0, _col1, _col2
+          Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+          File Output Operator
+            compressed: false
+            GlobalTableId: 0
+#### A masked pattern was here ####
+            NumFilesPerFileSink: 1
+            Statistics: Num rows: 2 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+#### A masked pattern was here ####
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                properties:
+                  columns _col0,_col1,_col2
+                  columns.types int:string:string
+                  escape.delim \
+                  hive.serialization.extend.additional.nesting.levels true
+                  serialization.escape.crlf true
+                  serialization.format 1
+                  serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            TotalFiles: 1
+            GatherStats: false
+            MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT * FROM (
+  SELECT X.* FROM pcs_t1 X WHERE struct(X.ds, X.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+  UNION ALL
+  SELECT Y.* FROM pcs_t1 Y WHERE struct(Y.ds, Y.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+) A
+WHERE A.ds = '2008-04-08'
+SORT BY A.key, A.value, A.ds
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM (
+  SELECT X.* FROM pcs_t1 X WHERE struct(X.ds, X.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+  UNION ALL
+  SELECT Y.* FROM pcs_t1 Y WHERE struct(Y.ds, Y.key) in 
(struct('2000-04-08',1), struct('2000-04-09',2))
+) A
+WHERE A.ds = '2008-04-08'
+SORT BY A.key, A.value, A.ds
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+#### A masked pattern was here ####
+PREHOOK: query: explain extended select ds from pcs_t1 where struct(case when 
ds='2000-04-08' then 10 else 20 end) in (struct(10),struct(11))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where struct(case when 
ds='2000-04-08' then 10 else 20 end) in (struct(10),struct(11))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               TOK_FUNCTION
+                  when
+                  =
+                     TOK_TABLE_OR_COL
+                        ds
+                     '2000-04-08'
+                  10
+                  20
+            TOK_FUNCTION
+               struct
+               10
+            TOK_FUNCTION
+               struct
+               11
+
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Processor Tree:
+        TableScan
+          alias: pcs_t1
+          Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column 
stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: (const struct(10)) IN (const struct(10), const 
struct(11)) (type: boolean)
+            Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: ds (type: string)
+              outputColumnNames: _col0
+              Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE 
Column stats: NONE
+              ListSink
+
+PREHOOK: query: select ds from pcs_t1 where struct(case when ds='2000-04-08' 
then 10 else 20 end) in (struct(10),struct(11))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+#### A masked pattern was here ####
+POSTHOOK: query: select ds from pcs_t1 where struct(case when ds='2000-04-08' 
then 10 else 20 end) in (struct(10),struct(11))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+#### A masked pattern was here ####
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+2000-04-08
+PREHOOK: query: explain extended select ds from pcs_t1 where struct(ds, key, 
rand(100)) in (struct('2000-04-08',1,0.2), struct('2000-04-09',2,0.3))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where struct(ds, key, 
rand(100)) in (struct('2000-04-08',1,0.2), struct('2000-04-09',2,0.3))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               TOK_TABLE_OR_COL
+                  ds
+               TOK_TABLE_OR_COL
+                  key
+               TOK_FUNCTION
+                  rand
+                  100
+            TOK_FUNCTION
+               struct
+               '2000-04-08'
+               1
+               0.2
+            TOK_FUNCTION
+               struct
+               '2000-04-09'
+               2
+               0.3
+
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: pcs_t1
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMPLETE 
Column stats: NONE
+            GatherStats: false
+            Filter Operator
+              isSamplingPred: false
+              predicate: (struct(ds,key,rand(100))) IN (const 
struct('2000-04-08',1,0.2), const struct('2000-04-09',2,0.3)) (type: boolean)
+              Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+              Select Operator
+                expressions: ds (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE 
Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 20 Data size: 160 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        columns _col0
+                        columns.types string
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels 
true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+      Path -> Alias:
+#### A masked pattern was here ####
+      Path -> Partition:
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-08
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+#### A masked pattern was here ####
+          Partition
+            base file name: ds=2000-04-09
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Truncated Path -> Alias:
+        /pcs_t1/ds=2000-04-08 [pcs_t1]
+        /pcs_t1/ds=2000-04-09 [pcs_t1]
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: explain extended select ds from pcs_t1 where 
struct(ds='2000-04-08' or key = 2, key) in (struct(true,2), struct(false,3))
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where 
struct(ds='2000-04-08' or key = 2, key) in (struct(true,2), struct(false,3))
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         TOK_FUNCTION
+            in
+            TOK_FUNCTION
+               struct
+               or
+                  =
+                     TOK_TABLE_OR_COL
+                        ds
+                     '2000-04-08'
+                  =
+                     TOK_TABLE_OR_COL
+                        key
+                     2
+               TOK_TABLE_OR_COL
+                  key
+            TOK_FUNCTION
+               struct
+               true
+               2
+            TOK_FUNCTION
+               struct
+               false
+               3
+
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-10
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Processor Tree:
+        TableScan
+          alias: pcs_t1
+          Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column 
stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: (struct(((ds = '2000-04-08') or (key = 2)),key)) IN 
(const struct(true,2), const struct(false,3)) (type: boolean)
+            Statistics: Num rows: 30 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
+            Select Operator
+              expressions: ds (type: string)
+              outputColumnNames: _col0
+              Statistics: Num rows: 30 Data size: 240 Basic stats: COMPLETE 
Column stats: NONE
+              ListSink
+
+PREHOOK: query: select ds from pcs_t1 where struct(ds='2000-04-08' or key = 2, 
key) in (struct(true,2), struct(false,3))
+PREHOOK: type: QUERY
+PREHOOK: Input: default@pcs_t1
+PREHOOK: Input: default@pcs_t1@ds=2000-04-08
+PREHOOK: Input: default@pcs_t1@ds=2000-04-09
+PREHOOK: Input: default@pcs_t1@ds=2000-04-10
+#### A masked pattern was here ####
+POSTHOOK: query: select ds from pcs_t1 where struct(ds='2000-04-08' or key = 
2, key) in (struct(true,2), struct(false,3))
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@pcs_t1
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-08
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-09
+POSTHOOK: Input: default@pcs_t1@ds=2000-04-10
+#### A masked pattern was here ####
+2000-04-08
+2000-04-09
+2000-04-10
+PREHOOK: query: explain extended select ds from pcs_t1 where key = 3 or 
(struct(ds='2000-04-08' or key = 2, key) in (struct(true,2), struct(false,3)) 
and key+5 > 0)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended select ds from pcs_t1 where key = 3 or 
(struct(ds='2000-04-08' or key = 2, key) in (struct(true,2), struct(false,3)) 
and key+5 > 0)
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  
+TOK_QUERY
+   TOK_FROM
+      TOK_TABREF
+         TOK_TABNAME
+            pcs_t1
+   TOK_INSERT
+      TOK_DESTINATION
+         TOK_DIR
+            TOK_TMP_FILE
+      TOK_SELECT
+         TOK_SELEXPR
+            TOK_TABLE_OR_COL
+               ds
+      TOK_WHERE
+         or
+            =
+               TOK_TABLE_OR_COL
+                  key
+               3
+            and
+               TOK_FUNCTION
+                  in
+                  TOK_FUNCTION
+                     struct
+                     or
+                        =
+                           TOK_TABLE_OR_COL
+                              ds
+                           '2000-04-08'
+                        =
+                           TOK_TABLE_OR_COL
+                              key
+                           2
+                     TOK_TABLE_OR_COL
+                        key
+                  TOK_FUNCTION
+                     struct
+                     true
+                     2
+                  TOK_FUNCTION
+                     struct
+                     false
+                     3
+               >
+                  +
+                     TOK_TABLE_OR_COL
+                        key
+                     5
+                  0
+
+
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Partition Description:
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-08
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-09
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+          Partition
+            input format: org.apache.hadoop.mapred.TextInputFormat
+            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+            partition values:
+              ds 2000-04-10
+            properties:
+              COLUMN_STATS_ACCURATE true
+              bucket_count -1
+              columns key,value
+              columns.comments 
+              columns.types int:string
+#### A masked pattern was here ####
+              name default.pcs_t1
+              numFiles 1
+              numRows 20
+              partition_columns ds
+              partition_columns.types string
+              rawDataSize 160
+              serialization.ddl struct pcs_t1 { i32 key, string value}
+              serialization.format 1
+              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              totalSize 180
+#### A masked pattern was here ####
+            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+          
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                bucket_count -1
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.pcs_t1
+                partition_columns ds
+                partition_columns.types string
+                serialization.ddl struct pcs_t1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.pcs_t1
+            name: default.pcs_t1
+      Processor Tree:
+        TableScan
+          alias: pcs_t1
+          Statistics: Num rows: 60 Data size: 480 Basic stats: COMPLETE Column 
stats: NONE
+          GatherStats: false
+          Filter Operator
+            isSamplingPred: false
+            predicate: ((key = 3) or ((struct(((ds = '2000-04-08') or (key = 
2)),key)) IN (const struct(true,2), const struct(false,3)) and ((key + 5) > 
0))) (type: boolean)
+            Statistics: Num rows: 40 Data size: 320 Basic stats: COMP

<TRUNCATED>

Reply via email to