http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out 
b/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
index a3641d6..9077fc0 100644
--- a/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
+++ b/ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
@@ -287,7 +287,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join JOIN[16][tables = [$hdt$_0, $hdt$_1]] in Stage 
'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[15][tables = [$hdt$_1, $hdt$_2, $hdt$_0]] in Stage 
'Stage-2:MAPRED' is a cross product
 PREHOOK: query: explain select *
 from part p1 join part p2 join part p3 
 where p2.p_partkey = 1 and p3.p_name = p2.p_name
@@ -297,12 +297,12 @@ from part p1 join part p2 join part p3
 where p2.p_partkey = 1 and p3.p_name = p2.p_name
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-2 is a root stage
-  Stage-1 depends on stages: Stage-2
-  Stage-0 depends on stages: Stage-1
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-0 depends on stages: Stage-2
 
 STAGE PLANS:
-  Stage: Stage-2
+  Stage: Stage-1
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -346,21 +346,22 @@ STAGE PLANS:
             1 _col1 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17
           Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
-          Select Operator
-            expressions: _col0 (type: int), _col1 (type: string), _col2 (type: 
string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 
(type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), 
_col10 (type: string), _col11 (type: string), _col12 (type: string), _col13 
(type: string), _col14 (type: int), _col15 (type: string), _col16 (type: 
double), _col17 (type: string)
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16, _col17
-            Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
-            File Output Operator
-              compressed: false
-              table:
-                  input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  serde: 
org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+          File Output Operator
+            compressed: false
+            table:
+                input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
-  Stage: Stage-1
+  Stage: Stage-2
     Map Reduce
       Map Operator Tree:
           TableScan
+            Reduce Output Operator
+              sort order: 
+              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
+              value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col10 
(type: string), _col11 (type: string), _col12 (type: string), _col13 (type: 
string), _col14 (type: int), _col15 (type: string), _col16 (type: double), 
_col17 (type: string)
+          TableScan
             alias: p1
             Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE 
Column stats: NONE
             Select Operator
@@ -371,11 +372,6 @@ STAGE PLANS:
                 sort order: 
                 Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE 
Column stats: NONE
                 value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
-          TableScan
-            Reduce Output Operator
-              sort order: 
-              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
-              value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col9 
(type: int), _col10 (type: string), _col11 (type: string), _col12 (type: 
string), _col13 (type: string), _col14 (type: int), _col15 (type: string), 
_col16 (type: double), _col17 (type: string)
       Reduce Operator Tree:
         Join Operator
           condition map:
@@ -383,10 +379,10 @@ STAGE PLANS:
           keys:
             0 
             1 
-          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, 
_col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
+          outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, 
_col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
           Statistics: Num rows: 28 Data size: 3461 Basic stats: COMPLETE 
Column stats: NONE
           Select Operator
-            expressions: _col0 (type: int), _col1 (type: string), _col2 (type: 
string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 
(type: string), _col7 (type: double), _col8 (type: string), _col18 (type: int), 
_col19 (type: string), _col20 (type: string), _col21 (type: string), _col22 
(type: string), _col23 (type: int), _col24 (type: string), _col25 (type: 
double), _col26 (type: string), _col9 (type: int), _col10 (type: string), 
_col11 (type: string), _col12 (type: string), _col13 (type: string), _col14 
(type: int), _col15 (type: string), _col16 (type: double), _col17 (type: string)
+            expressions: _col18 (type: int), _col19 (type: string), _col20 
(type: string), _col21 (type: string), _col22 (type: string), _col23 (type: 
int), _col24 (type: string), _col25 (type: double), _col26 (type: string), 1 
(type: int), _col10 (type: string), _col11 (type: string), _col12 (type: 
string), _col13 (type: string), _col14 (type: int), _col15 (type: string), 
_col16 (type: double), _col17 (type: string), _col0 (type: int), _col1 (type: 
string), _col2 (type: string), _col3 (type: string), _col4 (type: string), 
_col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: 
string)
             outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, 
_col26
             Statistics: Num rows: 28 Data size: 3461 Basic stats: COMPLETE 
Column stats: NONE
             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out 
b/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out
index a1dd24e..55e7678 100644
--- a/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out
+++ b/ql/src/test/results/clientpositive/join_cond_pushdown_4.q.out
@@ -145,13 +145,13 @@ where p2.p_name = p3.p_name and p1.p_partkey = 
p4.p_partkey
             and p1.p_partkey = p2.p_partkey
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-3 is a root stage
-  Stage-2 depends on stages: Stage-3
-  Stage-1 depends on stages: Stage-2
-  Stage-0 depends on stages: Stage-1
+  Stage-1 is a root stage
+  Stage-2 depends on stages: Stage-1
+  Stage-3 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-3
 
 STAGE PLANS:
-  Stage: Stage-3
+  Stage: Stage-1
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -206,6 +206,13 @@ STAGE PLANS:
     Map Reduce
       Map Operator Tree:
           TableScan
+            Reduce Output Operator
+              key expressions: _col10 (type: string)
+              sort order: +
+              Map-reduce partition columns: _col10 (type: string)
+              Statistics: Num rows: 7 Data size: 931 Basic stats: COMPLETE 
Column stats: NONE
+              value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col9 
(type: int), _col11 (type: string), _col12 (type: string), _col13 (type: 
string), _col14 (type: int), _col15 (type: string), _col16 (type: double), 
_col17 (type: string)
+          TableScan
             alias: p1
             Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE 
Column stats: NONE
             Filter Operator
@@ -221,20 +228,13 @@ STAGE PLANS:
                   Map-reduce partition columns: _col1 (type: string)
                   Statistics: Num rows: 13 Data size: 1573 Basic stats: 
COMPLETE Column stats: NONE
                   value expressions: _col0 (type: int), _col2 (type: string), 
_col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: 
string), _col7 (type: double), _col8 (type: string)
-          TableScan
-            Reduce Output Operator
-              key expressions: _col10 (type: string)
-              sort order: +
-              Map-reduce partition columns: _col10 (type: string)
-              Statistics: Num rows: 7 Data size: 931 Basic stats: COMPLETE 
Column stats: NONE
-              value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col9 
(type: int), _col11 (type: string), _col12 (type: string), _col13 (type: 
string), _col14 (type: int), _col15 (type: string), _col16 (type: double), 
_col17 (type: string)
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           keys:
-            0 _col1 (type: string)
-            1 _col10 (type: string)
+            0 _col10 (type: string)
+            1 _col1 (type: string)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, 
_col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
           Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
           File Output Operator
@@ -244,10 +244,17 @@ STAGE PLANS:
                 output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
 
-  Stage: Stage-1
+  Stage: Stage-3
     Map Reduce
       Map Operator Tree:
           TableScan
+            Reduce Output Operator
+              key expressions: _col0 (type: int)
+              sort order: +
+              Map-reduce partition columns: _col0 (type: int)
+              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
+              value expressions: _col1 (type: string), _col2 (type: string), 
_col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: 
string), _col7 (type: double), _col8 (type: string), _col9 (type: int), _col10 
(type: string), _col11 (type: string), _col12 (type: string), _col13 (type: 
string), _col14 (type: int), _col15 (type: string), _col16 (type: double), 
_col17 (type: string), _col18 (type: int), _col19 (type: string), _col20 (type: 
string), _col21 (type: string), _col22 (type: string), _col23 (type: int), 
_col24 (type: string), _col25 (type: double), _col26 (type: string)
+          TableScan
             alias: p1
             Statistics: Num rows: 26 Data size: 3147 Basic stats: COMPLETE 
Column stats: NONE
             Filter Operator
@@ -263,33 +270,22 @@ STAGE PLANS:
                   Map-reduce partition columns: _col0 (type: int)
                   Statistics: Num rows: 13 Data size: 1573 Basic stats: 
COMPLETE Column stats: NONE
                   value expressions: _col1 (type: string), _col2 (type: 
string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 
(type: string), _col7 (type: double), _col8 (type: string)
-          TableScan
-            Reduce Output Operator
-              key expressions: _col9 (type: int)
-              sort order: +
-              Map-reduce partition columns: _col9 (type: int)
-              Statistics: Num rows: 14 Data size: 1730 Basic stats: COMPLETE 
Column stats: NONE
-              value expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col10 
(type: string), _col11 (type: string), _col12 (type: string), _col13 (type: 
string), _col14 (type: int), _col15 (type: string), _col16 (type: double), 
_col17 (type: string), _col18 (type: int), _col19 (type: string), _col20 (type: 
string), _col21 (type: string), _col22 (type: string), _col23 (type: int), 
_col24 (type: string), _col25 (type: double), _col26 (type: string)
       Reduce Operator Tree:
         Join Operator
           condition map:
                Inner Join 0 to 1
           keys:
             0 _col0 (type: int)
-            1 _col9 (type: int)
+            1 _col0 (type: int)
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, 
_col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, 
_col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35
           Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE 
Column stats: NONE
-          Select Operator
-            expressions: _col18 (type: int), _col19 (type: string), _col20 
(type: string), _col21 (type: string), _col22 (type: string), _col23 (type: 
int), _col24 (type: string), _col25 (type: double), _col26 (type: string), 
_col27 (type: int), _col28 (type: string), _col29 (type: string), _col30 (type: 
string), _col31 (type: string), _col32 (type: int), _col33 (type: string), 
_col34 (type: double), _col35 (type: string), _col9 (type: int), _col10 (type: 
string), _col11 (type: string), _col12 (type: string), _col13 (type: string), 
_col14 (type: int), _col15 (type: string), _col16 (type: double), _col17 (type: 
string), _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 
(type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), 
_col7 (type: double), _col8 (type: string)
-            outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, 
_col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, _col35
+          File Output Operator
+            compressed: false
             Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE 
Column stats: NONE
-            File Output Operator
-              compressed: false
-              Statistics: Num rows: 15 Data size: 1903 Basic stats: COMPLETE 
Column stats: NONE
-              table:
-                  input format: org.apache.hadoop.mapred.TextInputFormat
-                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+            table:
+                input format: org.apache.hadoop.mapred.TextInputFormat
+                output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out 
b/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
index 3fba77c..9775f30 100644
--- a/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
+++ b/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out
@@ -56,18 +56,18 @@ TOK_QUERY
 
 
 STAGE DEPENDENCIES:
-  Stage-6 is a root stage
-  Stage-4 depends on stages: Stage-6
-  Stage-0 depends on stages: Stage-4
+  Stage-7 is a root stage
+  Stage-5 depends on stages: Stage-7
+  Stage-0 depends on stages: Stage-5
 
 STAGE PLANS:
-  Stage: Stage-6
+  Stage: Stage-7
     Map Reduce Local Work
       Alias -> Map Local Tables:
         $hdt$_0:src 
           Fetch Operator
             limit: -1
-        $hdt$_1:$hdt$_2:src1 
+        $hdt$_2:src1 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
@@ -86,10 +86,10 @@ STAGE PLANS:
                 Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   keys:
-                    0 _col0 (type: string)
-                    1 _col1 (type: string)
-                  Position of Big Table: 1
-        $hdt$_1:$hdt$_2:src1 
+                    0 _col1 (type: string)
+                    1 _col0 (type: string)
+                  Position of Big Table: 0
+        $hdt$_2:src1 
           TableScan
             alias: src1
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
@@ -108,7 +108,7 @@ STAGE PLANS:
                     1 _col0 (type: string)
                   Position of Big Table: 0
 
-  Stage: Stage-4
+  Stage: Stage-5
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -136,36 +136,32 @@ STAGE PLANS:
                     condition map:
                          Inner Join 0 to 1
                     keys:
-                      0 _col0 (type: string)
-                      1 _col1 (type: string)
-                    outputColumnNames: _col1
-                    Position of Big Table: 1
+                      0 _col1 (type: string)
+                      1 _col0 (type: string)
+                    outputColumnNames: _col0
+                    Position of Big Table: 0
                     Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: _col1 (type: string)
-                      outputColumnNames: _col0
+                    File Output Operator
+                      compressed: false
+                      GlobalTableId: 0
+#### A masked pattern was here ####
+                      NumFilesPerFileSink: 1
                       Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-                      File Output Operator
-                        compressed: false
-                        GlobalTableId: 0
-#### A masked pattern was here ####
-                        NumFilesPerFileSink: 1
-                        Statistics: Num rows: 605 Data size: 6427 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
-                        table:
-                            input format: 
org.apache.hadoop.mapred.TextInputFormat
-                            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            properties:
-                              columns _col0
-                              columns.types string
-                              escape.delim \
-                              
hive.serialization.extend.additional.nesting.levels true
-                              serialization.format 1
-                              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                            serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                        TotalFiles: 1
-                        GatherStats: false
-                        MultiFileSpray: false
+#### A masked pattern was here ####
+                      table:
+                          input format: 
org.apache.hadoop.mapred.TextInputFormat
+                          output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          properties:
+                            columns _col0
+                            columns.types string
+                            escape.delim \
+                            
hive.serialization.extend.additional.nesting.levels true
+                            serialization.format 1
+                            serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                          serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      TotalFiles: 1
+                      GatherStats: false
+                      MultiFileSpray: false
       Local Work:
         Map Reduce Local Work
       Path -> Alias:
@@ -444,10 +440,10 @@ STAGE PLANS:
               name: default.srcpart
             name: default.srcpart
       Truncated Path -> Alias:
-        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:$hdt$_1:srcpart]
-        /srcpart/ds=2008-04-08/hr=12 [$hdt$_1:$hdt$_1:srcpart]
-        /srcpart/ds=2008-04-09/hr=11 [$hdt$_1:$hdt$_1:srcpart]
-        /srcpart/ds=2008-04-09/hr=12 [$hdt$_1:$hdt$_1:srcpart]
+        /srcpart/ds=2008-04-08/hr=11 [$hdt$_1:srcpart]
+        /srcpart/ds=2008-04-08/hr=12 [$hdt$_1:srcpart]
+        /srcpart/ds=2008-04-09/hr=11 [$hdt$_1:srcpart]
+        /srcpart/ds=2008-04-09/hr=12 [$hdt$_1:srcpart]
 
   Stage: Stage-0
     Fetch Operator
@@ -462,18 +458,18 @@ POSTHOOK: query: explain
 select srcpart.key from srcpart join src on (srcpart.value=src.value) join 
src1 on (srcpart.key=src1.key) where srcpart.value > 'val_450'
 POSTHOOK: type: QUERY
 STAGE DEPENDENCIES:
-  Stage-6 is a root stage
-  Stage-4 depends on stages: Stage-6
-  Stage-0 depends on stages: Stage-4
+  Stage-7 is a root stage
+  Stage-5 depends on stages: Stage-7
+  Stage-0 depends on stages: Stage-5
 
 STAGE PLANS:
-  Stage: Stage-6
+  Stage: Stage-7
     Map Reduce Local Work
       Alias -> Map Local Tables:
         $hdt$_0:src 
           Fetch Operator
             limit: -1
-        $hdt$_1:$hdt$_2:src1 
+        $hdt$_2:src1 
           Fetch Operator
             limit: -1
       Alias -> Map Local Operator Tree:
@@ -490,9 +486,9 @@ STAGE PLANS:
                 Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                 HashTable Sink Operator
                   keys:
-                    0 _col0 (type: string)
-                    1 _col1 (type: string)
-        $hdt$_1:$hdt$_2:src1 
+                    0 _col1 (type: string)
+                    1 _col0 (type: string)
+        $hdt$_2:src1 
           TableScan
             alias: src1
             Statistics: Num rows: 25 Data size: 191 Basic stats: COMPLETE 
Column stats: NONE
@@ -508,7 +504,7 @@ STAGE PLANS:
                     0 _col0 (type: string)
                     1 _col0 (type: string)
 
-  Stage: Stage-4
+  Stage: Stage-5
     Map Reduce
       Map Operator Tree:
           TableScan
@@ -533,21 +529,17 @@ STAGE PLANS:
                     condition map:
                          Inner Join 0 to 1
                     keys:
-                      0 _col0 (type: string)
-                      1 _col1 (type: string)
-                    outputColumnNames: _col1
+                      0 _col1 (type: string)
+                      1 _col0 (type: string)
+                    outputColumnNames: _col0
                     Statistics: Num rows: 201 Data size: 2146 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: _col1 (type: string)
-                      outputColumnNames: _col0
+                    File Output Operator
+                      compressed: false
                       Statistics: Num rows: 201 Data size: 2146 Basic stats: 
COMPLETE Column stats: NONE
-                      File Output Operator
-                        compressed: false
-                        Statistics: Num rows: 201 Data size: 2146 Basic stats: 
COMPLETE Column stats: NONE
-                        table:
-                            input format: 
org.apache.hadoop.mapred.TextInputFormat
-                            output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                            serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      table:
+                          input format: 
org.apache.hadoop.mapred.TextInputFormat
+                          output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                          serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
       Local Work:
         Map Reduce Local Work
 

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/spark/auto_join13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/auto_join13.q.out 
b/ql/src/test/results/clientpositive/spark/auto_join13.q.out
index c81d296..0ffe9e3 100644
--- a/ql/src/test/results/clientpositive/spark/auto_join13.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_join13.q.out
@@ -30,22 +30,22 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: src
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((UDFToDouble(key) < 200.0) and 
UDFToDouble(key) is not null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
+                    predicate: (UDFToDouble(key) < 100.0) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
-                          0 UDFToDouble(_col0) (type: double)
-                          1 (UDFToDouble(_col2) + UDFToDouble(_col0)) (type: 
double)
+                          0 _col0 (type: string)
+                          1 _col0 (type: string)
             Local Work:
               Map Reduce Local Work
         Map 4 
@@ -54,26 +54,26 @@ STAGE PLANS:
                   alias: src
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: (UDFToDouble(key) < 100.0) (type: boolean)
-                    Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                    predicate: ((UDFToDouble(key) < 200.0) and 
UDFToDouble(key) is not null) (type: boolean)
+                    Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
-                          0 _col0 (type: string)
-                          1 _col0 (type: string)
+                          0 (UDFToDouble(_col2) + UDFToDouble(_col0)) (type: 
double)
+                          1 UDFToDouble(_col0) (type: double)
             Local Work:
               Map Reduce Local Work
 
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 3 <- Map 2 (GROUP, 1)
+        Reducer 2 <- Map 1 (GROUP, 1)
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -93,7 +93,7 @@ STAGE PLANS:
                           1 _col0 (type: string)
                         outputColumnNames: _col0, _col1, _col2
                         input vertices:
-                          1 Map 4
+                          1 Map 3
                         Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
                         Filter Operator
                           predicate: (UDFToDouble(_col2) + UDFToDouble(_col0)) 
is not null (type: boolean)
@@ -102,14 +102,14 @@ STAGE PLANS:
                             condition map:
                                  Inner Join 0 to 1
                             keys:
-                              0 UDFToDouble(_col0) (type: double)
-                              1 (UDFToDouble(_col2) + UDFToDouble(_col0)) 
(type: double)
-                            outputColumnNames: _col2, _col3
+                              0 (UDFToDouble(_col2) + UDFToDouble(_col0)) 
(type: double)
+                              1 UDFToDouble(_col0) (type: double)
+                            outputColumnNames: _col1, _col2
                             input vertices:
-                              0 Map 1
+                              1 Map 4
                             Statistics: Num rows: 100 Data size: 1065 Basic 
stats: COMPLETE Column stats: NONE
                             Select Operator
-                              expressions: hash(_col3,_col2) (type: int)
+                              expressions: hash(_col2,_col1) (type: int)
                               outputColumnNames: _col0
                               Statistics: Num rows: 100 Data size: 1065 Basic 
stats: COMPLETE Column stats: NONE
                               Group By Operator
@@ -123,7 +123,7 @@ STAGE PLANS:
                                   value expressions: _col0 (type: bigint)
             Local Work:
               Map Reduce Local Work
-        Reducer 3 
+        Reducer 2 
             Reduce Operator Tree:
               Group By Operator
                 aggregations: sum(VALUE._col0)

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/spark/auto_join_without_localtask.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/auto_join_without_localtask.q.out 
b/ql/src/test/results/clientpositive/spark/auto_join_without_localtask.q.out
index c3d5225..62f9053 100644
--- a/ql/src/test/results/clientpositive/spark/auto_join_without_localtask.q.out
+++ b/ql/src/test/results/clientpositive/spark/auto_join_without_localtask.q.out
@@ -316,9 +316,9 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 5 <- Map 4 (PARTITION-LEVEL SORT, 2), Map 6 (PARTITION-LEVEL 
SORT, 2)
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Reducer 5 
(PARTITION-LEVEL SORT, 2)
-        Reducer 3 <- Reducer 2 (SORT, 1)
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 5 (PARTITION-LEVEL 
SORT, 2)
+        Reducer 3 <- Map 6 (PARTITION-LEVEL SORT, 2), Reducer 2 
(PARTITION-LEVEL SORT, 2)
+        Reducer 4 <- Reducer 3 (SORT, 1)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -327,23 +327,6 @@ STAGE PLANS:
                   alias: a
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: value is not null (type: boolean)
-                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: value (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: _col0 (type: string)
-                        sort order: +
-                        Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
-        Map 4 
-            Map Operator Tree:
-                TableScan
-                  alias: a
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
                     predicate: ((UDFToDouble(key) > 100.0) and value is not 
null) (type: boolean)
                     Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
@@ -356,7 +339,7 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
                         value expressions: _col1 (type: string)
-        Map 6 
+        Map 5 
             Map Operator Tree:
                 TableScan
                   alias: a
@@ -373,6 +356,23 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+        Map 6 
+            Map Operator Tree:
+                TableScan
+                  alias: a
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: value is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: value (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
               Join Operator
@@ -380,18 +380,30 @@ STAGE PLANS:
                      Inner Join 0 to 1
                 keys:
                   0 _col0 (type: string)
-                  1 _col1 (type: string)
-                outputColumnNames: _col1, _col2
+                  1 _col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
+                Reduce Output Operator
+                  key expressions: _col1 (type: string)
+                  sort order: +
+                  Map-reduce partition columns: _col1 (type: string)
+                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
+                  value expressions: _col0 (type: string)
+        Reducer 3 
+            Reduce Operator Tree:
+              Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 _col1 (type: string)
+                  1 _col0 (type: string)
+                outputColumnNames: _col0, _col1
                 Statistics: Num rows: 275 Data size: 2921 Basic stats: 
COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col1 (type: string), _col2 (type: string)
-                  outputColumnNames: _col0, _col1
+                Reduce Output Operator
+                  key expressions: _col0 (type: string), _col1 (type: string)
+                  sort order: ++
                   Statistics: Num rows: 275 Data size: 2921 Basic stats: 
COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col0 (type: string), _col1 (type: string)
-                    sort order: ++
-                    Statistics: Num rows: 275 Data size: 2921 Basic stats: 
COMPLETE Column stats: NONE
-        Reducer 3 
+        Reducer 4 
             Reduce Operator Tree:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: string), 
KEY.reducesinkkey1 (type: string)
@@ -407,22 +419,6 @@ STAGE PLANS:
                         input format: org.apache.hadoop.mapred.TextInputFormat
                         output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 5 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
-                  0 _col0 (type: string)
-                  1 _col0 (type: string)
-                outputColumnNames: _col0, _col1
-                Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                Reduce Output Operator
-                  key expressions: _col1 (type: string)
-                  sort order: +
-                  Map-reduce partition columns: _col1 (type: string)
-                  Statistics: Num rows: 182 Data size: 1939 Basic stats: 
COMPLETE Column stats: NONE
-                  value expressions: _col0 (type: string)
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/spark/join13.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join13.q.out 
b/ql/src/test/results/clientpositive/spark/join13.q.out
index 3eb7720..4a045f7 100644
--- a/ql/src/test/results/clientpositive/spark/join13.q.out
+++ b/ql/src/test/results/clientpositive/spark/join13.q.out
@@ -32,8 +32,8 @@ STAGE PLANS:
   Stage: Stage-1
     Spark
       Edges:
-        Reducer 4 <- Map 3 (PARTITION-LEVEL SORT, 2), Map 5 (PARTITION-LEVEL 
SORT, 2)
-        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Reducer 4 
(PARTITION-LEVEL SORT, 2)
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 2), Map 4 (PARTITION-LEVEL 
SORT, 2)
+        Reducer 3 <- Map 5 (PARTITION-LEVEL SORT, 2), Reducer 2 
(PARTITION-LEVEL SORT, 2)
 #### A masked pattern was here ####
       Vertices:
         Map 1 
@@ -42,23 +42,6 @@ STAGE PLANS:
                   alias: src
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   Filter Operator
-                    predicate: ((UDFToDouble(key) < 200.0) and 
UDFToDouble(key) is not null) (type: boolean)
-                    Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
-                    Select Operator
-                      expressions: key (type: string)
-                      outputColumnNames: _col0
-                      Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
-                      Reduce Output Operator
-                        key expressions: UDFToDouble(_col0) (type: double)
-                        sort order: +
-                        Map-reduce partition columns: UDFToDouble(_col0) 
(type: double)
-                        Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
-        Map 3 
-            Map Operator Tree:
-                TableScan
-                  alias: src
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-                  Filter Operator
                     predicate: (UDFToDouble(key) < 100.0) (type: boolean)
                     Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
@@ -71,7 +54,7 @@ STAGE PLANS:
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
                         value expressions: _col1 (type: string)
-        Map 5 
+        Map 4 
             Map Operator Tree:
                 TableScan
                   alias: src
@@ -88,33 +71,29 @@ STAGE PLANS:
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
                         Statistics: Num rows: 166 Data size: 1763 Basic stats: 
COMPLETE Column stats: NONE
+        Map 5 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: ((UDFToDouble(key) < 200.0) and 
UDFToDouble(key) is not null) (type: boolean)
+                    Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: string)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: UDFToDouble(_col0) (type: double)
+                        sort order: +
+                        Map-reduce partition columns: UDFToDouble(_col0) 
(type: double)
+                        Statistics: Num rows: 83 Data size: 881 Basic stats: 
COMPLETE Column stats: NONE
         Reducer 2 
             Reduce Operator Tree:
               Join Operator
                 condition map:
                      Inner Join 0 to 1
                 keys:
-                  0 UDFToDouble(_col0) (type: double)
-                  1 (UDFToDouble(_col2) + UDFToDouble(_col0)) (type: double)
-                outputColumnNames: _col2, _col3
-                Statistics: Num rows: 100 Data size: 1065 Basic stats: 
COMPLETE Column stats: NONE
-                Select Operator
-                  expressions: _col3 (type: string), _col2 (type: string)
-                  outputColumnNames: _col0, _col1
-                  Statistics: Num rows: 100 Data size: 1065 Basic stats: 
COMPLETE Column stats: NONE
-                  File Output Operator
-                    compressed: false
-                    Statistics: Num rows: 100 Data size: 1065 Basic stats: 
COMPLETE Column stats: NONE
-                    table:
-                        input format: org.apache.hadoop.mapred.TextInputFormat
-                        output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-        Reducer 4 
-            Reduce Operator Tree:
-              Join Operator
-                condition map:
-                     Inner Join 0 to 1
-                keys:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2
@@ -128,6 +107,27 @@ STAGE PLANS:
                     Map-reduce partition columns: (UDFToDouble(_col2) + 
UDFToDouble(_col0)) (type: double)
                     Statistics: Num rows: 91 Data size: 969 Basic stats: 
COMPLETE Column stats: NONE
                     value expressions: _col1 (type: string), _col2 (type: 
string)
+        Reducer 3 
+            Reduce Operator Tree:
+              Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 (UDFToDouble(_col2) + UDFToDouble(_col0)) (type: double)
+                  1 UDFToDouble(_col0) (type: double)
+                outputColumnNames: _col1, _col2
+                Statistics: Num rows: 100 Data size: 1065 Basic stats: 
COMPLETE Column stats: NONE
+                Select Operator
+                  expressions: _col2 (type: string), _col1 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 100 Data size: 1065 Basic stats: 
COMPLETE Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 100 Data size: 1065 Basic stats: 
COMPLETE Column stats: NONE
+                    table:
+                        input format: org.apache.hadoop.mapred.TextInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/spark/join32.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32.q.out 
b/ql/src/test/results/clientpositive/spark/join32.q.out
index a9d50b4..d0095cd 100644
--- a/ql/src/test/results/clientpositive/spark/join32.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32.q.out
@@ -110,25 +110,25 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  alias: x
+                  Statistics: Num rows: 25 Data size: 191 Basic stats: 
COMPLETE Column stats: NONE
                   GatherStats: false
                   Filter Operator
                     isSamplingPred: false
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                    predicate: (value is not null and key is not null) (type: 
boolean)
+                    Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
                           0 _col0 (type: string)
-                          1 _col3 (type: string)
-                        Position of Big Table: 1
+                          1 _col1 (type: string)
+                        Position of Big Table: 0
             Local Work:
               Map Reduce Local Work
             Path -> Alias:
@@ -136,7 +136,7 @@ STAGE PLANS:
             Path -> Partition:
 #### A masked pattern was here ####
                 Partition
-                  base file name: src
+                  base file name: src1
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
@@ -146,14 +146,14 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
-                    name default.src
+                    name default.src1
                     numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
+                    numRows 25
+                    rawDataSize 191
+                    serialization.ddl struct src1 { string key, string value}
                     serialization.format 1
                     serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
+                    totalSize 216
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -166,38 +166,38 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
-                      name default.src
+                      name default.src1
                       numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
+                      numRows 25
+                      rawDataSize 191
+                      serialization.ddl struct src1 { string key, string value}
                       serialization.format 1
                       serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
+                      totalSize 216
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
+                    name: default.src1
+                  name: default.src1
             Truncated Path -> Alias:
-              /src [y]
+              /src1 [x]
         Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: x
-                  Statistics: Num rows: 25 Data size: 191 Basic stats: 
COMPLETE Column stats: NONE
+                  alias: y
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   GatherStats: false
                   Filter Operator
                     isSamplingPred: false
-                    predicate: (value is not null and key is not null) (type: 
boolean)
-                    Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
-                          0 _col0 (type: string)
-                          1 _col1 (type: string)
+                          0 _col3 (type: string)
+                          1 _col0 (type: string)
                         Position of Big Table: 0
             Local Work:
               Map Reduce Local Work
@@ -206,7 +206,7 @@ STAGE PLANS:
             Path -> Partition:
 #### A masked pattern was here ####
                 Partition
-                  base file name: src1
+                  base file name: src
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
@@ -216,14 +216,14 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
-                    name default.src1
+                    name default.src
                     numFiles 1
-                    numRows 25
-                    rawDataSize 191
-                    serialization.ddl struct src1 { string key, string value}
+                    numRows 500
+                    rawDataSize 5312
+                    serialization.ddl struct src { string key, string value}
                     serialization.format 1
                     serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 216
+                    totalSize 5812
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -236,26 +236,26 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
-                      name default.src1
+                      name default.src
                       numFiles 1
-                      numRows 25
-                      rawDataSize 191
-                      serialization.ddl struct src1 { string key, string value}
+                      numRows 500
+                      rawDataSize 5312
+                      serialization.ddl struct src { string key, string value}
                       serialization.format 1
                       serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 216
+                      totalSize 5812
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src1
-                  name: default.src1
+                    name: default.src
+                  name: default.src
             Truncated Path -> Alias:
-              /src1 [x]
+              /src [y]
 
   Stage: Stage-1
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: z
@@ -277,22 +277,22 @@ STAGE PLANS:
                           1 _col1 (type: string)
                         outputColumnNames: _col0, _col3
                         input vertices:
-                          1 Map 3
+                          1 Map 2
                         Position of Big Table: 0
                         Statistics: Num rows: 275 Data size: 2921 Basic stats: 
COMPLETE Column stats: NONE
                         Map Join Operator
                           condition map:
                                Inner Join 0 to 1
                           keys:
-                            0 _col0 (type: string)
-                            1 _col3 (type: string)
-                          outputColumnNames: _col1, _col2, _col5
+                            0 _col3 (type: string)
+                            1 _col0 (type: string)
+                          outputColumnNames: _col0, _col3, _col6
                           input vertices:
-                            0 Map 1
-                          Position of Big Table: 1
+                            1 Map 3
+                          Position of Big Table: 0
                           Statistics: Num rows: 302 Data size: 3213 Basic 
stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col5 (type: string), _col2 (type: 
string), _col1 (type: string)
+                            expressions: _col3 (type: string), _col0 (type: 
string), _col6 (type: string)
                             outputColumnNames: _col0, _col1, _col2
                             Statistics: Num rows: 302 Data size: 3213 Basic 
stats: COMPLETE Column stats: NONE
                             File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/9b7c0eaa/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out 
b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
index dac9610..de28ca5 100644
--- a/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
+++ b/ql/src/test/results/clientpositive/spark/join32_lessSize.q.out
@@ -118,25 +118,25 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: y
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  alias: x
+                  Statistics: Num rows: 25 Data size: 191 Basic stats: 
COMPLETE Column stats: NONE
                   GatherStats: false
                   Filter Operator
                     isSamplingPred: false
-                    predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                    predicate: (value is not null and key is not null) (type: 
boolean)
+                    Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
                           0 _col0 (type: string)
-                          1 _col3 (type: string)
-                        Position of Big Table: 1
+                          1 _col1 (type: string)
+                        Position of Big Table: 0
             Local Work:
               Map Reduce Local Work
             Path -> Alias:
@@ -144,7 +144,7 @@ STAGE PLANS:
             Path -> Partition:
 #### A masked pattern was here ####
                 Partition
-                  base file name: src
+                  base file name: src1
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
@@ -154,14 +154,14 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
-                    name default.src
+                    name default.src1
                     numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
+                    numRows 25
+                    rawDataSize 191
+                    serialization.ddl struct src1 { string key, string value}
                     serialization.format 1
                     serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
+                    totalSize 216
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -174,38 +174,38 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
-                      name default.src
+                      name default.src1
                       numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
+                      numRows 25
+                      rawDataSize 191
+                      serialization.ddl struct src1 { string key, string value}
                       serialization.format 1
                       serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
+                      totalSize 216
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
+                    name: default.src1
+                  name: default.src1
             Truncated Path -> Alias:
-              /src [y]
+              /src1 [x]
         Map 3 
             Map Operator Tree:
                 TableScan
-                  alias: x
-                  Statistics: Num rows: 25 Data size: 191 Basic stats: 
COMPLETE Column stats: NONE
+                  alias: y
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   GatherStats: false
                   Filter Operator
                     isSamplingPred: false
-                    predicate: (value is not null and key is not null) (type: 
boolean)
-                    Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 7 Data size: 53 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
                       Spark HashTable Sink Operator
                         keys:
-                          0 _col0 (type: string)
-                          1 _col1 (type: string)
+                          0 _col3 (type: string)
+                          1 _col0 (type: string)
                         Position of Big Table: 0
             Local Work:
               Map Reduce Local Work
@@ -214,7 +214,7 @@ STAGE PLANS:
             Path -> Partition:
 #### A masked pattern was here ####
                 Partition
-                  base file name: src1
+                  base file name: src
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
@@ -224,14 +224,14 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
-                    name default.src1
+                    name default.src
                     numFiles 1
-                    numRows 25
-                    rawDataSize 191
-                    serialization.ddl struct src1 { string key, string value}
+                    numRows 500
+                    rawDataSize 5312
+                    serialization.ddl struct src { string key, string value}
                     serialization.format 1
                     serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 216
+                    totalSize 5812
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -244,26 +244,26 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
-                      name default.src1
+                      name default.src
                       numFiles 1
-                      numRows 25
-                      rawDataSize 191
-                      serialization.ddl struct src1 { string key, string value}
+                      numRows 500
+                      rawDataSize 5312
+                      serialization.ddl struct src { string key, string value}
                       serialization.format 1
                       serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 216
+                      totalSize 5812
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src1
-                  name: default.src1
+                    name: default.src
+                  name: default.src
             Truncated Path -> Alias:
-              /src1 [x]
+              /src [y]
 
   Stage: Stage-1
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: z
@@ -285,22 +285,22 @@ STAGE PLANS:
                           1 _col1 (type: string)
                         outputColumnNames: _col0, _col3
                         input vertices:
-                          1 Map 3
+                          1 Map 2
                         Position of Big Table: 0
                         Statistics: Num rows: 275 Data size: 2921 Basic stats: 
COMPLETE Column stats: NONE
                         Map Join Operator
                           condition map:
                                Inner Join 0 to 1
                           keys:
-                            0 _col0 (type: string)
-                            1 _col3 (type: string)
-                          outputColumnNames: _col1, _col2, _col5
+                            0 _col3 (type: string)
+                            1 _col0 (type: string)
+                          outputColumnNames: _col0, _col3, _col6
                           input vertices:
-                            0 Map 1
-                          Position of Big Table: 1
+                            1 Map 3
+                          Position of Big Table: 0
                           Statistics: Num rows: 302 Data size: 3213 Basic 
stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col5 (type: string), _col2 (type: 
string), _col1 (type: string)
+                            expressions: _col3 (type: string), _col0 (type: 
string), _col6 (type: string)
                             outputColumnNames: _col0, _col1, _col2
                             Statistics: Num rows: 302 Data size: 3213 Basic 
stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -624,7 +624,7 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 3 
+        Map 1 
             Map Operator Tree:
                 TableScan
                   alias: x
@@ -699,25 +699,36 @@ STAGE PLANS:
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 1 
+        Map 2 
             Map Operator Tree:
                 TableScan
-                  alias: w
-                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  alias: x
+                  Statistics: Num rows: 25 Data size: 191 Basic stats: 
COMPLETE Column stats: NONE
                   GatherStats: false
                   Filter Operator
                     isSamplingPred: false
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 13 Data size: 99 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
-                      Spark HashTable Sink Operator
+                      Statistics: Num rows: 13 Data size: 99 Basic stats: 
COMPLETE Column stats: NONE
+                      Map Join Operator
+                        condition map:
+                             Inner Join 0 to 1
                         keys:
                           0 _col0 (type: string)
-                          1 _col1 (type: string)
+                          1 _col0 (type: string)
+                        outputColumnNames: _col0, _col1, _col3
+                        input vertices:
+                          0 Map 1
                         Position of Big Table: 1
+                        Statistics: Num rows: 14 Data size: 108 Basic stats: 
COMPLETE Column stats: NONE
+                        Spark HashTable Sink Operator
+                          keys:
+                            0 _col1 (type: string)
+                            1 _col0 (type: string)
+                          Position of Big Table: 1
             Local Work:
               Map Reduce Local Work
             Path -> Alias:
@@ -725,7 +736,7 @@ STAGE PLANS:
             Path -> Partition:
 #### A masked pattern was here ####
                 Partition
-                  base file name: src
+                  base file name: src1
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
@@ -735,14 +746,14 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
-                    name default.src
+                    name default.src1
                     numFiles 1
-                    numRows 500
-                    rawDataSize 5312
-                    serialization.ddl struct src { string key, string value}
+                    numRows 25
+                    rawDataSize 191
+                    serialization.ddl struct src1 { string key, string value}
                     serialization.format 1
                     serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 5812
+                    totalSize 216
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -755,50 +766,39 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
-                      name default.src
+                      name default.src1
                       numFiles 1
-                      numRows 500
-                      rawDataSize 5312
-                      serialization.ddl struct src { string key, string value}
+                      numRows 25
+                      rawDataSize 191
+                      serialization.ddl struct src1 { string key, string value}
                       serialization.format 1
                       serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 5812
+                      totalSize 216
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src
-                  name: default.src
+                    name: default.src1
+                  name: default.src1
             Truncated Path -> Alias:
-              /src [w]
+              /src1 [x]
         Map 4 
             Map Operator Tree:
                 TableScan
-                  alias: x
-                  Statistics: Num rows: 25 Data size: 191 Basic stats: 
COMPLETE Column stats: NONE
+                  alias: w
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
                   GatherStats: false
                   Filter Operator
                     isSamplingPred: false
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 13 Data size: 99 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 13 Data size: 99 Basic stats: 
COMPLETE Column stats: NONE
-                      Map Join Operator
-                        condition map:
-                             Inner Join 0 to 1
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                      Spark HashTable Sink Operator
                         keys:
                           0 _col0 (type: string)
                           1 _col0 (type: string)
-                        outputColumnNames: _col0, _col1, _col3
-                        input vertices:
-                          0 Map 3
-                        Position of Big Table: 1
-                        Statistics: Num rows: 14 Data size: 108 Basic stats: 
COMPLETE Column stats: NONE
-                        Spark HashTable Sink Operator
-                          keys:
-                            0 _col0 (type: string)
-                            1 _col1 (type: string)
-                          Position of Big Table: 0
+                        Position of Big Table: 0
             Local Work:
               Map Reduce Local Work
             Path -> Alias:
@@ -806,7 +806,7 @@ STAGE PLANS:
             Path -> Partition:
 #### A masked pattern was here ####
                 Partition
-                  base file name: src1
+                  base file name: src
                   input format: org.apache.hadoop.mapred.TextInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                   properties:
@@ -816,14 +816,14 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
-                    name default.src1
+                    name default.src
                     numFiles 1
-                    numRows 25
-                    rawDataSize 191
-                    serialization.ddl struct src1 { string key, string value}
+                    numRows 500
+                    rawDataSize 5312
+                    serialization.ddl struct src { string key, string value}
                     serialization.format 1
                     serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    totalSize 216
+                    totalSize 5812
 #### A masked pattern was here ####
                   serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
                 
@@ -836,26 +836,26 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
-                      name default.src1
+                      name default.src
                       numFiles 1
-                      numRows 25
-                      rawDataSize 191
-                      serialization.ddl struct src1 { string key, string value}
+                      numRows 500
+                      rawDataSize 5312
+                      serialization.ddl struct src { string key, string value}
                       serialization.format 1
                       serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                      totalSize 216
+                      totalSize 5812
 #### A masked pattern was here ####
                     serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
-                    name: default.src1
-                  name: default.src1
+                    name: default.src
+                  name: default.src
             Truncated Path -> Alias:
-              /src1 [x]
+              /src [w]
 
   Stage: Stage-1
     Spark
 #### A masked pattern was here ####
       Vertices:
-        Map 2 
+        Map 3 
             Map Operator Tree:
                 TableScan
                   alias: w
@@ -873,26 +873,26 @@ STAGE PLANS:
                         condition map:
                              Inner Join 0 to 1
                         keys:
-                          0 _col0 (type: string)
-                          1 _col1 (type: string)
-                        outputColumnNames: _col1, _col4
+                          0 _col1 (type: string)
+                          1 _col0 (type: string)
+                        outputColumnNames: _col0, _col3
                         input vertices:
-                          1 Map 4
-                        Position of Big Table: 0
+                          0 Map 2
+                        Position of Big Table: 1
                         Statistics: Num rows: 275 Data size: 2921 Basic stats: 
COMPLETE Column stats: NONE
                         Map Join Operator
                           condition map:
                                Inner Join 0 to 1
                           keys:
                             0 _col0 (type: string)
-                            1 _col1 (type: string)
-                          outputColumnNames: _col1, _col3, _col6
+                            1 _col0 (type: string)
+                          outputColumnNames: _col0, _col3, _col6
                           input vertices:
-                            0 Map 1
-                          Position of Big Table: 1
+                            1 Map 4
+                          Position of Big Table: 0
                           Statistics: Num rows: 302 Data size: 3213 Basic 
stats: COMPLETE Column stats: NONE
                           Select Operator
-                            expressions: _col3 (type: string), _col6 (type: 
string), _col1 (type: string)
+                            expressions: _col0 (type: string), _col3 (type: 
string), _col6 (type: string)
                             outputColumnNames: _col0, _col1, _col2
                             Statistics: Num rows: 302 Data size: 3213 Basic 
stats: COMPLETE Column stats: NONE
                             File Output Operator
@@ -1029,7 +1029,7 @@ POSTHOOK: Input: default@src1
 POSTHOOK: Output: default@dest_j1
 POSTHOOK: Lineage: dest_j1.key EXPRESSION [(src1)x.FieldSchema(name:key, 
type:string, comment:default), ]
 POSTHOOK: Lineage: dest_j1.val2 SIMPLE [(src)w.FieldSchema(name:value, 
type:string, comment:default), ]
-POSTHOOK: Lineage: dest_j1.value SIMPLE [(src1)x.FieldSchema(name:value, 
type:string, comment:default), ]
+POSTHOOK: Lineage: dest_j1.value EXPRESSION [(src1)x.FieldSchema(name:value, 
type:string, comment:default), ]
 PREHOOK: query: select * from dest_j1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest_j1

Reply via email to