http://git-wip-us.apache.org/repos/asf/hive/blob/6f5c1135/ql/src/test/results/clientpositive/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join32_lessSize.q.out 
b/ql/src/test/results/clientpositive/join32_lessSize.q.out
deleted file mode 100644
index f9f3506..0000000
--- a/ql/src/test/results/clientpositive/join32_lessSize.q.out
+++ /dev/null
@@ -1,2651 +0,0 @@
-PREHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest_j1
-POSTHOOK: query: -- SORT_QUERY_RESULTS
-
-CREATE TABLE dest_j1(key STRING, value STRING, val2 STRING) STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest_j1
-PREHOOK: query: CREATE TABLE dest_j2(key STRING, value STRING, val2 STRING) 
STORED AS TEXTFILE
-PREHOOK: type: CREATETABLE
-PREHOOK: Output: database:default
-PREHOOK: Output: default@dest_j2
-POSTHOOK: query: CREATE TABLE dest_j2(key STRING, value STRING, val2 STRING) 
STORED AS TEXTFILE
-POSTHOOK: type: CREATETABLE
-POSTHOOK: Output: database:default
-POSTHOOK: Output: default@dest_j2
-PREHOOK: query: -- Since the inputs are small, it should be automatically 
converted to mapjoin
-
-EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src1 x JOIN src y ON (x.key = y.key) 
-JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11)
-PREHOOK: type: QUERY
-POSTHOOK: query: -- Since the inputs are small, it should be automatically 
converted to mapjoin
-
-EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src1 x JOIN src y ON (x.key = y.key) 
-JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-9 is a root stage
-  Stage-7 depends on stages: Stage-9
-  Stage-8 depends on stages: Stage-7
-  Stage-6 depends on stages: Stage-8
-  Stage-0 depends on stages: Stage-6
-  Stage-3 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-9
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_2:x 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_2:x 
-          TableScan
-            alias: x
-            Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: (key is not null and value is not null) (type: 
boolean)
-              Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col1 (type: string)
-                  Position of Big Table: 0
-
-  Stage: Stage-7
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: z
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: value is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: value (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                Map Join Operator
-                  condition map:
-                       Inner Join 0 to 1
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col1 (type: string)
-                  outputColumnNames: _col0, _col3
-                  Position of Big Table: 0
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-#### A masked pattern was here ####
-                    NumFilesPerFileSink: 1
-                    table:
-                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        properties:
-                          columns _col0,_col3
-                          columns.types string,string
-                          escape.delim \
-                          serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                        serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                    TotalFiles: 1
-                    GatherStats: false
-                    MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src1
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src1
-              numFiles 1
-              numRows 25
-              rawDataSize 191
-              serialization.ddl struct src1 { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 216
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src1
-                numFiles 1
-                numRows 25
-                rawDataSize 191
-                serialization.ddl struct src1 { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 216
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src1
-            name: default.src1
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:z]
-
-  Stage: Stage-8
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_0:y 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_0:y 
-          TableScan
-            alias: y
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col3 (type: string)
-                    1 _col0 (type: string)
-                  Position of Big Table: 0
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              keys:
-                0 _col3 (type: string)
-                1 _col0 (type: string)
-              outputColumnNames: _col0, _col3, _col6
-              Position of Big Table: 0
-              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: _col3 (type: string), _col0 (type: string), _col6 
(type: string)
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                        bucket_count -1
-                        columns key,value,val2
-                        columns.comments 
-                        columns.types string:string:string
-#### A masked pattern was here ####
-                        name default.dest_j1
-                        numFiles 0
-                        numRows 0
-                        rawDataSize 0
-                        serialization.ddl struct dest_j1 { string key, string 
value, string val2}
-                        serialization.format 1
-                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        totalSize 0
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest_j1
-                  TotalFiles: 1
-                  GatherStats: true
-                  MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col3
-              columns.types string,string
-              escape.delim \
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col3
-                columns.types string,string
-                escape.delim \
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value,val2
-                columns.comments 
-                columns.types string:string:string
-#### A masked pattern was here ####
-                name default.dest_j1
-                numFiles 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct dest_j1 { string key, string value, 
string val2}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 0
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest_j1
-
-  Stage: Stage-3
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src1 x JOIN src y ON (x.key = y.key) 
-JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11)
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@src1
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Output: default@dest_j1
-POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src1 x JOIN src y ON (x.key = y.key) 
-JOIN srcpart z ON (x.value = z.value and z.ds='2008-04-08' and z.hr=11)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@src1
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Output: default@dest_j1
-POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j1.value SIMPLE [(srcpart)z.FieldSchema(name:value, 
type:string, comment:default), ]
-PREHOOK: query: select * from dest_j1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest_j1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from dest_j1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest_j1
-#### A masked pattern was here ####
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-150    val_150 val_150
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-66     val_66  val_66
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-PREHOOK: query: EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src w JOIN src1 x ON (x.value = w.value) 
-JOIN src y ON (x.key = y.key) 
-JOIN src1 z ON (x.key = z.key)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src w JOIN src1 x ON (x.value = w.value) 
-JOIN src y ON (x.key = y.key) 
-JOIN src1 z ON (x.key = z.key)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-13 is a root stage
-  Stage-10 depends on stages: Stage-13
-  Stage-12 depends on stages: Stage-10
-  Stage-9 depends on stages: Stage-12
-  Stage-11 depends on stages: Stage-9
-  Stage-8 depends on stages: Stage-11
-  Stage-0 depends on stages: Stage-8
-  Stage-4 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-13
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_2:x 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_2:x 
-          TableScan
-            alias: x
-            Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: (value is not null and key is not null) (type: 
boolean)
-              Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-                  Position of Big Table: 1
-
-  Stage: Stage-10
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: z
-            Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-                Map Join Operator
-                  condition map:
-                       Inner Join 0 to 1
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-                  outputColumnNames: _col0, _col1, _col3
-                  Position of Big Table: 1
-                  Statistics: Num rows: 27 Data size: 210 Basic stats: 
COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    GlobalTableId: 0
-#### A masked pattern was here ####
-                    NumFilesPerFileSink: 1
-                    table:
-                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                        properties:
-                          columns _col0,_col1,_col3
-                          columns.types string,string,string
-                          escape.delim \
-                          serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                        serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                    TotalFiles: 1
-                    GatherStats: false
-                    MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src1
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src1
-              numFiles 1
-              numRows 25
-              rawDataSize 191
-              serialization.ddl struct src1 { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 216
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src1
-                numFiles 1
-                numRows 25
-                rawDataSize 191
-                serialization.ddl struct src1 { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 216
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src1
-            name: default.src1
-      Truncated Path -> Alias:
-        /src1 [$hdt$_3:z]
-
-  Stage: Stage-12
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_1:w 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_1:w 
-          TableScan
-            alias: w
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: value is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: value (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col1 (type: string)
-                    1 _col0 (type: string)
-                  Position of Big Table: 0
-
-  Stage: Stage-9
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              keys:
-                0 _col1 (type: string)
-                1 _col0 (type: string)
-              outputColumnNames: _col0, _col3
-              Position of Big Table: 0
-              Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE 
Column stats: NONE
-              File Output Operator
-                compressed: false
-                GlobalTableId: 0
-#### A masked pattern was here ####
-                NumFilesPerFileSink: 1
-                table:
-                    input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                    output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                    properties:
-                      columns _col0,_col3
-                      columns.types string,string
-                      escape.delim \
-                      serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                    serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                TotalFiles: 1
-                GatherStats: false
-                MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1,_col3
-              columns.types string,string,string
-              escape.delim \
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1,_col3
-                columns.types string,string,string
-                escape.delim \
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-11
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_0:y 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_0:y 
-          TableScan
-            alias: y
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-                  Position of Big Table: 0
-
-  Stage: Stage-8
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              keys:
-                0 _col0 (type: string)
-                1 _col0 (type: string)
-              outputColumnNames: _col0, _col3, _col6
-              Position of Big Table: 0
-              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: _col0 (type: string), _col3 (type: string), _col6 
(type: string)
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                        bucket_count -1
-                        columns key,value,val2
-                        columns.comments 
-                        columns.types string:string:string
-#### A masked pattern was here ####
-                        name default.dest_j1
-                        numFiles 1
-                        numRows 85
-                        rawDataSize 1600
-                        serialization.ddl struct dest_j1 { string key, string 
value, string val2}
-                        serialization.format 1
-                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        totalSize 1685
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest_j1
-                  TotalFiles: 1
-                  GatherStats: true
-                  MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10003
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col3
-              columns.types string,string
-              escape.delim \
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col3
-                columns.types string,string
-                escape.delim \
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value,val2
-                columns.comments 
-                columns.types string:string:string
-#### A masked pattern was here ####
-                name default.dest_j1
-                numFiles 1
-                numRows 85
-                rawDataSize 1600
-                serialization.ddl struct dest_j1 { string key, string value, 
string val2}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 1685
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest_j1
-
-  Stage: Stage-4
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src w JOIN src1 x ON (x.value = w.value) 
-JOIN src y ON (x.key = y.key) 
-JOIN src1 z ON (x.key = z.key)
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@src1
-PREHOOK: Output: default@dest_j1
-POSTHOOK: query: INSERT OVERWRITE TABLE dest_j1
-SELECT x.key, z.value, y.value
-FROM src w JOIN src1 x ON (x.value = w.value) 
-JOIN src y ON (x.key = y.key) 
-JOIN src1 z ON (x.key = z.key)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@src1
-POSTHOOK: Output: default@dest_j1
-POSTHOOK: Lineage: dest_j1.key SIMPLE [(src1)x.FieldSchema(name:key, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)y.FieldSchema(name:value, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)z.FieldSchema(name:value, 
type:string, comment:default), ]
-PREHOOK: query: select * from dest_j1
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest_j1
-#### A masked pattern was here ####
-POSTHOOK: query: select * from dest_j1
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest_j1
-#### A masked pattern was here ####
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-150    val_150 val_150
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-66     val_66  val_66
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-PREHOOK: query: EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x JOIN src y ON (x.key = y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x JOIN src y ON (x.key = y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-8 is a root stage
-  Stage-6 depends on stages: Stage-8
-  Stage-7 depends on stages: Stage-6
-  Stage-5 depends on stages: Stage-7
-  Stage-0 depends on stages: Stage-5
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-8
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_1:$hdt$_2:x 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_1:$hdt$_2:x 
-          TableScan
-            alias: x
-            Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: (key is not null and value is not null) (type: 
boolean)
-              Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-                  Position of Big Table: 0
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: y
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: key is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                Map Join Operator
-                  condition map:
-                       Inner Join 0 to 1
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-                  outputColumnNames: _col1, _col2
-                  Position of Big Table: 0
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col1 (type: string), _col2 (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 0
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      table:
-                          input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                          output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                          properties:
-                            columns _col0,_col1
-                            columns.types string,string
-                            escape.delim \
-                            serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                          serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                      TotalFiles: 1
-                      GatherStats: false
-                      MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: src1
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src1
-              numFiles 1
-              numRows 25
-              rawDataSize 191
-              serialization.ddl struct src1 { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 216
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src1
-                numFiles 1
-                numRows 25
-                rawDataSize 191
-                serialization.ddl struct src1 { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 216
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src1
-            name: default.src1
-      Truncated Path -> Alias:
-        /src [$hdt$_1:$hdt$_1:y]
-
-  Stage: Stage-7
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_0:z 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string 
value}
-                    serialization.format 1
-                    serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string 
value}
-                      serialization.format 1
-                      serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-      Alias -> Map Local Operator Tree:
-        $hdt$_0:z 
-          TableScan
-            alias: z
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: value is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: value (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col1 (type: string)
-                  Position of Big Table: 1
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              keys:
-                0 _col0 (type: string)
-                1 _col1 (type: string)
-              outputColumnNames: _col0, _col3, _col4
-              Position of Big Table: 1
-              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: _col3 (type: string), _col0 (type: string), _col4 
(type: string)
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                        bucket_count -1
-                        columns key,value,val2
-                        columns.comments 
-                        columns.types string:string:string
-#### A masked pattern was here ####
-                        name default.dest_j2
-                        numFiles 0
-                        numRows 0
-                        rawDataSize 0
-                        serialization.ddl struct dest_j2 { string key, string 
value, string val2}
-                        serialization.format 1
-                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        totalSize 0
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest_j2
-                  TotalFiles: 1
-                  GatherStats: true
-                  MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types string,string
-              escape.delim \
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types string,string
-                escape.delim \
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value,val2
-                columns.comments 
-                columns.types string:string:string
-#### A masked pattern was here ####
-                name default.dest_j2
-                numFiles 0
-                numRows 0
-                rawDataSize 0
-                serialization.ddl struct dest_j2 { string key, string value, 
string val2}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 0
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest_j2
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x JOIN src y ON (x.key = y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@src1
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Output: default@dest_j2
-POSTHOOK: query: INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x JOIN src y ON (x.key = y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@src1
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Output: default@dest_j2
-POSTHOOK: Lineage: dest_j2.key SIMPLE [(src1)x.FieldSchema(name:key, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j2.val2 SIMPLE [(src1)x.FieldSchema(name:value, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j2.value SIMPLE [(srcpart)z.FieldSchema(name:value, 
type:string, comment:default), ]
-PREHOOK: query: select * from dest_j2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest_j2
-#### A masked pattern was here ####
-POSTHOOK: query: select * from dest_j2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest_j2
-#### A masked pattern was here ####
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-150    val_150 val_150
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-66     val_66  val_66
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-PREHOOK: query: EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x LEFT OUTER JOIN src y ON (x.key = 
y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN EXTENDED
-INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x LEFT OUTER JOIN src y ON (x.key = 
y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-8 is a root stage
-  Stage-6 depends on stages: Stage-8
-  Stage-7 depends on stages: Stage-6
-  Stage-5 depends on stages: Stage-7
-  Stage-0 depends on stages: Stage-5
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-8
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_1:$hdt$_1:y 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_1:$hdt$_1:y 
-          TableScan
-            alias: y
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Select Operator
-              expressions: key (type: string)
-              outputColumnNames: _col0
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              HashTable Sink Operator
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                Position of Big Table: 1
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            alias: x
-            Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: value is not null (type: boolean)
-              Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-                Map Join Operator
-                  condition map:
-                       Right Outer Join0 to 1
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-                  outputColumnNames: _col1, _col2
-                  Position of Big Table: 1
-                  Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                  Select Operator
-                    expressions: _col1 (type: string), _col2 (type: string)
-                    outputColumnNames: _col0, _col1
-                    Statistics: Num rows: 550 Data size: 5843 Basic stats: 
COMPLETE Column stats: NONE
-                    File Output Operator
-                      compressed: false
-                      GlobalTableId: 0
-#### A masked pattern was here ####
-                      NumFilesPerFileSink: 1
-                      table:
-                          input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                          output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                          properties:
-                            columns _col0,_col1
-                            columns.types string,string
-                            escape.delim \
-                            serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                          serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                      TotalFiles: 1
-                      GatherStats: false
-                      MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: src
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src
-              numFiles 1
-              numRows 500
-              rawDataSize 5312
-              serialization.ddl struct src { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src
-                numFiles 1
-                numRows 500
-                rawDataSize 5312
-                serialization.ddl struct src { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 5812
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src
-            name: default.src
-#### A masked pattern was here ####
-          Partition
-            base file name: src1
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.src1
-              numFiles 1
-              numRows 25
-              rawDataSize 191
-              serialization.ddl struct src1 { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 216
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.src1
-                numFiles 1
-                numRows 25
-                rawDataSize 191
-                serialization.ddl struct src1 { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 216
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.src1
-            name: default.src1
-      Truncated Path -> Alias:
-        /src1 [$hdt$_1:$hdt$_2:x]
-
-  Stage: Stage-7
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_0:z 
-          Fetch Operator
-            limit: -1
-            Partition Description:
-                Partition
-                  base file name: hr=11
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  partition values:
-                    ds 2008-04-08
-                    hr 11
-                  properties:
-                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-                    bucket_count -1
-                    columns key,value
-                    columns.comments 'default','default'
-                    columns.types string:string
-#### A masked pattern was here ####
-                    name default.srcpart
-                    numFiles 1
-                    numRows 500
-                    partition_columns ds/hr
-                    partition_columns.types string:string
-                    rawDataSize 5312
-                    serialization.ddl struct srcpart { string key, string 
value}
-                    serialization.format 1
-                    serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
-#### A masked pattern was here ####
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                
-                    input format: org.apache.hadoop.mapred.TextInputFormat
-                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                    properties:
-                      bucket_count -1
-                      columns key,value
-                      columns.comments 'default','default'
-                      columns.types string:string
-#### A masked pattern was here ####
-                      name default.srcpart
-                      partition_columns ds/hr
-                      partition_columns.types string:string
-                      serialization.ddl struct srcpart { string key, string 
value}
-                      serialization.format 1
-                      serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.srcpart
-                  name: default.srcpart
-      Alias -> Map Local Operator Tree:
-        $hdt$_0:z 
-          TableScan
-            alias: z
-            Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-            GatherStats: false
-            Filter Operator
-              isSamplingPred: false
-              predicate: value is not null (type: boolean)
-              Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: value (type: string)
-                outputColumnNames: _col0
-                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col1 (type: string)
-                  Position of Big Table: 1
-
-  Stage: Stage-5
-    Map Reduce
-      Map Operator Tree:
-          TableScan
-            GatherStats: false
-            Map Join Operator
-              condition map:
-                   Inner Join 0 to 1
-              keys:
-                0 _col0 (type: string)
-                1 _col1 (type: string)
-              outputColumnNames: _col0, _col3, _col4
-              Position of Big Table: 1
-              Statistics: Num rows: 605 Data size: 6427 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: _col3 (type: string), _col0 (type: string), _col4 
(type: string)
-                outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-                File Output Operator
-                  compressed: false
-                  GlobalTableId: 1
-#### A masked pattern was here ####
-                  NumFilesPerFileSink: 1
-                  Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                  table:
-                      input format: org.apache.hadoop.mapred.TextInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                      properties:
-                        COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                        bucket_count -1
-                        columns key,value,val2
-                        columns.comments 
-                        columns.types string:string:string
-#### A masked pattern was here ####
-                        name default.dest_j2
-                        numFiles 1
-                        numRows 85
-                        rawDataSize 1600
-                        serialization.ddl struct dest_j2 { string key, string 
value, string val2}
-                        serialization.format 1
-                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        totalSize 1685
-#### A masked pattern was here ####
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      name: default.dest_j2
-                  TotalFiles: 1
-                  GatherStats: true
-                  MultiFileSpray: false
-      Local Work:
-        Map Reduce Local Work
-      Path -> Alias:
-#### A masked pattern was here ####
-      Path -> Partition:
-#### A masked pattern was here ####
-          Partition
-            base file name: -mr-10002
-            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-            properties:
-              columns _col0,_col1
-              columns.types string,string
-              escape.delim \
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-            serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-          
-              input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-              properties:
-                columns _col0,_col1
-                columns.types string,string
-                escape.delim \
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-#### A masked pattern was here ####
-          Partition
-            base file name: hr=11
-            input format: org.apache.hadoop.mapred.TextInputFormat
-            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-            partition values:
-              ds 2008-04-08
-              hr 11
-            properties:
-              COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
-              bucket_count -1
-              columns key,value
-              columns.comments 'default','default'
-              columns.types string:string
-#### A masked pattern was here ####
-              name default.srcpart
-              numFiles 1
-              numRows 500
-              partition_columns ds/hr
-              partition_columns.types string:string
-              rawDataSize 5312
-              serialization.ddl struct srcpart { string key, string value}
-              serialization.format 1
-              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              totalSize 5812
-#### A masked pattern was here ####
-            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-          
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                bucket_count -1
-                columns key,value
-                columns.comments 'default','default'
-                columns.types string:string
-#### A masked pattern was here ####
-                name default.srcpart
-                partition_columns ds/hr
-                partition_columns.types string:string
-                serialization.ddl struct srcpart { string key, string value}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.srcpart
-            name: default.srcpart
-      Truncated Path -> Alias:
-#### A masked pattern was here ####
-
-  Stage: Stage-0
-    Move Operator
-      tables:
-          replace: true
-#### A masked pattern was here ####
-          table:
-              input format: org.apache.hadoop.mapred.TextInputFormat
-              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-              properties:
-                COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
-                bucket_count -1
-                columns key,value,val2
-                columns.comments 
-                columns.types string:string:string
-#### A masked pattern was here ####
-                name default.dest_j2
-                numFiles 1
-                numRows 85
-                rawDataSize 1600
-                serialization.ddl struct dest_j2 { string key, string value, 
string val2}
-                serialization.format 1
-                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                totalSize 1685
-#### A masked pattern was here ####
-              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-              name: default.dest_j2
-
-  Stage: Stage-2
-    Stats-Aggr Operator
-#### A masked pattern was here ####
-
-PREHOOK: query: INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x LEFT OUTER JOIN src y ON (x.key = 
y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-PREHOOK: type: QUERY
-PREHOOK: Input: default@src
-PREHOOK: Input: default@src1
-PREHOOK: Input: default@srcpart
-PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-PREHOOK: Output: default@dest_j2
-POSTHOOK: query: INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, z.value, res.value
-FROM (select x.key, x.value from src1 x LEFT OUTER JOIN src y ON (x.key = 
y.key)) res 
-JOIN srcpart z ON (res.value = z.value and z.ds='2008-04-08' and z.hr=11)
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@src
-POSTHOOK: Input: default@src1
-POSTHOOK: Input: default@srcpart
-POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
-POSTHOOK: Output: default@dest_j2
-POSTHOOK: Lineage: dest_j2.key SIMPLE [(src1)x.FieldSchema(name:key, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j2.val2 SIMPLE [(src1)x.FieldSchema(name:value, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j2.value SIMPLE [(srcpart)z.FieldSchema(name:value, 
type:string, comment:default), ]
-PREHOOK: query: select * from dest_j2
-PREHOOK: type: QUERY
-PREHOOK: Input: default@dest_j2
-#### A masked pattern was here ####
-POSTHOOK: query: select * from dest_j2
-POSTHOOK: type: QUERY
-POSTHOOK: Input: default@dest_j2
-#### A masked pattern was here ####
-       val_165 val_165
-       val_165 val_165
-       val_193 val_193
-       val_193 val_193
-       val_193 val_193
-       val_265 val_265
-       val_265 val_265
-       val_27  val_27
-       val_409 val_409
-       val_409 val_409
-       val_409 val_409
-       val_484 val_484
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-146    val_146 val_146
-150    val_150 val_150
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-213    val_213 val_213
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-238    val_238 val_238
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-255    val_255 val_255
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-273    val_273 val_273
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-278    val_278 val_278
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-311    val_311 val_311
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-401    val_401 val_401
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-406    val_406 val_406
-66     val_66  val_66
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-98     val_98  val_98
-PREHOOK: query: EXPLAIN
-INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, x.value, res.value  
-FROM (select x.key, x.value from src1 x JOIN src y ON (x.key = y.key)) res 
-JOIN srcpart x ON (res.value = x.value and x.ds='2008-04-08' and x.hr=11)
-PREHOOK: type: QUERY
-POSTHOOK: query: EXPLAIN
-INSERT OVERWRITE TABLE dest_j2
-SELECT res.key, x.value, res.value  
-FROM (select x.key, x.value from src1 x JOIN src y ON (x.key = y.key)) res 
-JOIN srcpart x ON (res.value = x.value and x.ds='2008-04-08' and x.hr=11)
-POSTHOOK: type: QUERY
-STAGE DEPENDENCIES:
-  Stage-8 is a root stage
-  Stage-6 depends on stages: Stage-8
-  Stage-7 depends on stages: Stage-6
-  Stage-5 depends on stages: Stage-7
-  Stage-0 depends on stages: Stage-5
-  Stage-2 depends on stages: Stage-0
-
-STAGE PLANS:
-  Stage: Stage-8
-    Map Reduce Local Work
-      Alias -> Map Local Tables:
-        $hdt$_1:$hdt$_2:x 
-          Fetch Operator
-            limit: -1
-      Alias -> Map Local Operator Tree:
-        $hdt$_1:$hdt$_2:x 
-          TableScan
-            alias: x
-            Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-            Filter Operator
-              predicate: (key is not null and value is not null) (type: 
boolean)
-              Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-              Select Operator
-                expressions: key (type: string), value (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
-                HashTable Sink Operator
-                  keys:
-                    0 _col0 (type: string)
-                    1 _col0 (type: string)
-
-  Stage: Stage-6
-    Map Reduce
-      Map Operator Tree:
-   

<TRUNCATED>

Reply via email to