http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucket5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket5.q.out 
b/ql/src/test/results/clientpositive/spark/bucket5.q.out
index e3e18a5..27f0740 100644
--- a/ql/src/test/results/clientpositive/spark/bucket5.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket5.q.out
@@ -59,9 +59,9 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col1 (type: 
string)
                       auto parallelism: false
             Path -> Alias:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### [src]
             Path -> Partition:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### 
                 Partition
                   base file name: src
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -74,6 +74,7 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
+                    location hdfs://### HDFS PATH ###
                     name default.src
                     numFiles 1
                     numRows 500
@@ -95,6 +96,7 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
+                      location hdfs://### HDFS PATH ###
                       name default.src
                       numFiles 1
                       numRows 500
@@ -129,9 +131,9 @@ STAGE PLANS:
                       value expressions: _col1 (type: string)
                       auto parallelism: false
             Path -> Alias:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### [src]
             Path -> Partition:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### 
                 Partition
                   base file name: src
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -144,6 +146,7 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
+                    location hdfs://### HDFS PATH ###
                     name default.src
                     numFiles 1
                     numRows 500
@@ -165,6 +168,7 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
+                      location hdfs://### HDFS PATH ###
                       name default.src
                       numFiles 1
                       numRows 500
@@ -189,10 +193,10 @@ STAGE PLANS:
                 File Output Operator
                   compressed: false
                   GlobalTableId: 1
-#### A masked pattern was here ####
+                  directory: hdfs://### HDFS PATH ###
                   NumFilesPerFileSink: 1
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
+                  Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -206,6 +210,7 @@ STAGE PLANS:
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
+                        location hdfs://### HDFS PATH ###
                         name default.bucketed_table
                         numFiles 0
                         numRows 0
@@ -230,10 +235,10 @@ STAGE PLANS:
                 File Output Operator
                   compressed: false
                   GlobalTableId: 2
-#### A masked pattern was here ####
+                  directory: hdfs://### HDFS PATH ###
                   NumFilesPerFileSink: 1
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
+                  Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -245,6 +250,7 @@ STAGE PLANS:
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
+                        location hdfs://### HDFS PATH ###
                         name default.unbucketed_table
                         numFiles 0
                         numRows 0
@@ -264,7 +270,7 @@ STAGE PLANS:
     Move Operator
       tables:
           replace: true
-#### A masked pattern was here ####
+          source: hdfs://### HDFS PATH ###
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -278,6 +284,7 @@ STAGE PLANS:
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
+                location hdfs://### HDFS PATH ###
                 name default.bucketed_table
                 numFiles 0
                 numRows 0
@@ -293,13 +300,13 @@ STAGE PLANS:
   Stage: Stage-3
     Stats Work
       Basic Stats Work:
-#### A masked pattern was here ####
+          Stats Aggregation Key Prefix: hdfs://### HDFS PATH ###
 
   Stage: Stage-1
     Move Operator
       tables:
           replace: true
-#### A masked pattern was here ####
+          source: hdfs://### HDFS PATH ###
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -311,6 +318,7 @@ STAGE PLANS:
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
+                location hdfs://### HDFS PATH ###
                 name default.unbucketed_table
                 numFiles 0
                 numRows 0
@@ -326,7 +334,7 @@ STAGE PLANS:
   Stage: Stage-4
     Stats Work
       Basic Stats Work:
-#### A masked pattern was here ####
+          Stats Aggregation Key Prefix: hdfs://### HDFS PATH ###
 
 PREHOOK: query: FROM src
 INSERT OVERWRITE TABLE bucketed_table SELECT key, value
@@ -384,11 +392,11 @@ Storage Desc Params:
 PREHOOK: query: SELECT * FROM bucketed_table TABLESAMPLE (BUCKET 1 OUT OF 2) s 
LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@bucketed_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM bucketed_table TABLESAMPLE (BUCKET 1 OUT OF 2) 
s LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucketed_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0      val_0
 0      val_0
 0      val_0
@@ -402,11 +410,11 @@ POSTHOOK: Input: default@bucketed_table
 PREHOOK: query: SELECT * FROM bucketed_table TABLESAMPLE (BUCKET 2 OUT OF 2) s 
LIMIT 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@bucketed_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM bucketed_table TABLESAMPLE (BUCKET 2 OUT OF 2) 
s LIMIT 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucketed_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 5      val_5
 5      val_5
 5      val_5
@@ -420,18 +428,18 @@ POSTHOOK: Input: default@bucketed_table
 PREHOOK: query: SELECT COUNT(DISTINCT INPUT__FILE__NAME) FROM bucketed_table
 PREHOOK: type: QUERY
 PREHOOK: Input: default@bucketed_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT COUNT(DISTINCT INPUT__FILE__NAME) FROM bucketed_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucketed_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 2
 PREHOOK: query: SELECT COUNT(DISTINCT INPUT__FILE__NAME) FROM unbucketed_table
 PREHOOK: type: QUERY
 PREHOOK: Input: default@unbucketed_table
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT COUNT(DISTINCT INPUT__FILE__NAME) FROM unbucketed_table
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@unbucketed_table
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 2

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucket6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucket6.q.out 
b/ql/src/test/results/clientpositive/spark/bucket6.q.out
index b5d3347..77632fa 100644
--- a/ql/src/test/results/clientpositive/spark/bucket6.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucket6.q.out
@@ -89,11 +89,11 @@ POSTHOOK: Lineage: src_bucket.value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:va
 PREHOOK: query: select * from src_bucket limit 100
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src_bucket
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select * from src_bucket limit 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src_bucket
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0      val_0
 0      val_0
 0      val_0

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucketizedhiveinputformat.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketizedhiveinputformat.q.out 
b/ql/src/test/results/clientpositive/spark/bucketizedhiveinputformat.q.out
index c4c2bce..12d9325 100644
--- a/ql/src/test/results/clientpositive/spark/bucketizedhiveinputformat.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketizedhiveinputformat.q.out
@@ -69,36 +69,36 @@ POSTHOOK: Output: default@t3
 PREHOOK: query: SELECT COUNT(1) FROM T2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT COUNT(1) FROM T2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 5000000
 PREHOOK: query: SELECT COUNT(1) FROM T3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT COUNT(1) FROM T3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1000
 PREHOOK: query: SELECT COUNT(1) FROM T2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT COUNT(1) FROM T2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 5000000
 PREHOOK: query: SELECT COUNT(1) FROM T3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT COUNT(1) FROM T3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1000

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucketmapjoin6.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin6.q.out 
b/ql/src/test/results/clientpositive/spark/bucketmapjoin6.q.out
index c9b344e..87fb856 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin6.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin6.q.out
@@ -62,11 +62,11 @@ POSTHOOK: Lineage: tmp3.c SIMPLE 
[(tmp2)l.FieldSchema(name:b, type:string, comme
 PREHOOK: query: select * from tmp3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tmp3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select * from tmp3
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tmp3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0      val_0   val_0
 0      val_0   val_0
 0      val_0   val_0

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out 
b/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
index f67dec5..d7967d1 100644
--- a/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out
@@ -93,13 +93,14 @@ STAGE PLANS:
               Map Reduce Local Work
                 Bucket Mapjoin Context:
                     Alias Bucket File Name Mapping:
-#### A masked pattern was here ####
+                      b {hdfs://### HDFS PATH ###[hdfs://### HDFS PATH ###], 
hdfs://### HDFS PATH ###[hdfs://### HDFS PATH ###]}
                     Alias Bucket Output File Name Mapping:
-#### A masked pattern was here ####
+                      hdfs://### HDFS PATH ### 0
+                      hdfs://### HDFS PATH ### 1
             Path -> Alias:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### [b]
             Path -> Partition:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### 
                 Partition
                   base file name: hr=0
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -115,6 +116,7 @@ STAGE PLANS:
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
+                    location hdfs://### HDFS PATH ###
                     name default.srcbucket_mapjoin_part_2
                     numFiles 2
                     numRows 0
@@ -138,6 +140,7 @@ STAGE PLANS:
                       columns.comments 
                       columns.types int:string
 #### A masked pattern was here ####
+                      location hdfs://### HDFS PATH ###
                       name default.srcbucket_mapjoin_part_2
                       partition_columns ds/hr
                       partition_columns.types string:string
@@ -196,13 +199,14 @@ STAGE PLANS:
               Map Reduce Local Work
                 Bucket Mapjoin Context:
                     Alias Bucket File Name Mapping:
-#### A masked pattern was here ####
+                      b {hdfs://### HDFS PATH ###[hdfs://### HDFS PATH ###], 
hdfs://### HDFS PATH ###[hdfs://### HDFS PATH ###]}
                     Alias Bucket Output File Name Mapping:
-#### A masked pattern was here ####
+                      hdfs://### HDFS PATH ### 0
+                      hdfs://### HDFS PATH ### 1
             Path -> Alias:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### [a]
             Path -> Partition:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### 
                 Partition
                   base file name: hr=0
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -218,6 +222,7 @@ STAGE PLANS:
                     columns.comments 
                     columns.types int:string
 #### A masked pattern was here ####
+                    location hdfs://### HDFS PATH ###
                     name default.srcbucket_mapjoin_part_1
                     numFiles 2
                     numRows 0
@@ -241,6 +246,7 @@ STAGE PLANS:
                       columns.comments 
                       columns.types int:string
 #### A masked pattern was here ####
+                      location hdfs://### HDFS PATH ###
                       name default.srcbucket_mapjoin_part_1
                       partition_columns ds/hr
                       partition_columns.types string:string
@@ -266,10 +272,10 @@ STAGE PLANS:
                   File Output Operator
                     compressed: false
                     GlobalTableId: 0
-#### A masked pattern was here ####
+                    directory: hdfs://### HDFS PATH ###
                     NumFilesPerFileSink: 1
                     Statistics: Num rows: 1 Data size: 403 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
+                    Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
                     table:
                         input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                         output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -301,7 +307,7 @@ PREHOOK: Input: default@srcbucket_mapjoin_part_1
 PREHOOK: Input: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
 PREHOOK: Input: default@srcbucket_mapjoin_part_2
 PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ a.key, b.value
 FROM srcbucket_mapjoin_part_1 a JOIN srcbucket_mapjoin_part_2 b
 ON a.key = b.key AND a.ds = '2008-04-08' AND b.ds = '2008-04-08'
@@ -311,5 +317,5 @@ POSTHOOK: Input: default@srcbucket_mapjoin_part_1
 POSTHOOK: Input: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2
 POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0      val_0

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark 
b/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark
new file mode 100644
index 0000000..f67dec5
--- /dev/null
+++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin7.q.out_spark
@@ -0,0 +1,315 @@
+PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) 
PARTITIONED BY (ds STRING, hr STRING) 
+CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcbucket_mapjoin_part_1
+POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_1 (key INT, value STRING) 
PARTITIONED BY (ds STRING, hr STRING) 
+CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcbucket_mapjoin_part_1
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000000_0' INTO 
TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_1
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000000_0' INTO 
TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_1
+POSTHOOK: Output: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000001_0' INTO 
TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000001_0' INTO 
TABLE srcbucket_mapjoin_part_1 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
+PREHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) 
PARTITIONED BY (ds STRING, hr STRING) 
+CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@srcbucket_mapjoin_part_2
+POSTHOOK: query: CREATE TABLE srcbucket_mapjoin_part_2 (key INT, value STRING) 
PARTITIONED BY (ds STRING, hr STRING) 
+CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@srcbucket_mapjoin_part_2
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000000_0' INTO 
TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_2
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000000_0' INTO 
TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_2
+POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000001_0' INTO 
TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+PREHOOK: type: LOAD
+#### A masked pattern was here ####
+PREHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/bmj/000001_0' INTO 
TABLE srcbucket_mapjoin_part_2 PARTITION (ds='2008-04-08', hr='0')
+POSTHOOK: type: LOAD
+#### A masked pattern was here ####
+POSTHOOK: Output: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
+PREHOOK: query: EXPLAIN EXTENDED
+SELECT /*+ MAPJOIN(b) */ a.key, b.value
+FROM srcbucket_mapjoin_part_1 a JOIN srcbucket_mapjoin_part_2 b
+ON a.key = b.key AND a.ds = '2008-04-08' AND b.ds = '2008-04-08'
+ORDER BY a.key, b.value LIMIT 1
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN EXTENDED
+SELECT /*+ MAPJOIN(b) */ a.key, b.value
+FROM srcbucket_mapjoin_part_1 a JOIN srcbucket_mapjoin_part_2 b
+ON a.key = b.key AND a.ds = '2008-04-08' AND b.ds = '2008-04-08'
+ORDER BY a.key, b.value LIMIT 1
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-2 is a root stage
+  Stage-1 depends on stages: Stage-2
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-2
+    Spark
+#### A masked pattern was here ####
+      Vertices:
+        Map 3 
+            Map Operator Tree:
+                TableScan
+                  alias: b
+                  Statistics: Num rows: 69 Data size: 27500 Basic stats: 
COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 69 Data size: 27500 Basic stats: 
COMPLETE Column stats: NONE
+                    Spark HashTable Sink Operator
+                      keys:
+                        0 key (type: int)
+                        1 key (type: int)
+                      Position of Big Table: 0
+            Local Work:
+              Map Reduce Local Work
+                Bucket Mapjoin Context:
+                    Alias Bucket File Name Mapping:
+#### A masked pattern was here ####
+                    Alias Bucket Output File Name Mapping:
+#### A masked pattern was here ####
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: hr=0
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                    hr 0
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.comments 
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.srcbucket_mapjoin_part_2
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds/hr
+                    partition_columns.types string:string
+                    rawDataSize 0
+                    serialization.ddl struct srcbucket_mapjoin_part_2 { i32 
key, string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count 2
+                      bucket_field_name key
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 
+                      columns.types int:string
+#### A masked pattern was here ####
+                      name default.srcbucket_mapjoin_part_2
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct srcbucket_mapjoin_part_2 { i32 
key, string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.srcbucket_mapjoin_part_2
+                  name: default.srcbucket_mapjoin_part_2
+            Truncated Path -> Alias:
+              /srcbucket_mapjoin_part_2/ds=2008-04-08/hr=0 [b]
+
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (SORT, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: a
+                  Statistics: Num rows: 69 Data size: 27500 Basic stats: 
COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: key is not null (type: boolean)
+                    Statistics: Num rows: 69 Data size: 27500 Basic stats: 
COMPLETE Column stats: NONE
+                    Map Join Operator
+                      condition map:
+                           Inner Join 0 to 1
+                      keys:
+                        0 key (type: int)
+                        1 key (type: int)
+                      outputColumnNames: _col0, _col8
+                      input vertices:
+                        1 Map 3
+                      Position of Big Table: 0
+                      Statistics: Num rows: 75 Data size: 30250 Basic stats: 
COMPLETE Column stats: NONE
+                      BucketMapJoin: true
+                      Select Operator
+                        expressions: _col0 (type: int), _col8 (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 75 Data size: 30250 Basic stats: 
COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int), _col1 (type: 
string)
+                          null sort order: aa
+                          sort order: ++
+                          Statistics: Num rows: 75 Data size: 30250 Basic 
stats: COMPLETE Column stats: NONE
+                          tag: -1
+                          TopN: 1
+                          TopN Hash Memory Usage: 0.1
+                          auto parallelism: false
+            Local Work:
+              Map Reduce Local Work
+                Bucket Mapjoin Context:
+                    Alias Bucket File Name Mapping:
+#### A masked pattern was here ####
+                    Alias Bucket Output File Name Mapping:
+#### A masked pattern was here ####
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: hr=0
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  partition values:
+                    ds 2008-04-08
+                    hr 0
+                  properties:
+                    bucket_count 2
+                    bucket_field_name key
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.comments 
+                    columns.types int:string
+#### A masked pattern was here ####
+                    name default.srcbucket_mapjoin_part_1
+                    numFiles 2
+                    numRows 0
+                    partition_columns ds/hr
+                    partition_columns.types string:string
+                    rawDataSize 0
+                    serialization.ddl struct srcbucket_mapjoin_part_1 { i32 
key, string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 2750
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucket_count 2
+                      bucket_field_name key
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 
+                      columns.types int:string
+#### A masked pattern was here ####
+                      name default.srcbucket_mapjoin_part_1
+                      partition_columns ds/hr
+                      partition_columns.types string:string
+                      serialization.ddl struct srcbucket_mapjoin_part_1 { i32 
key, string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.srcbucket_mapjoin_part_1
+                  name: default.srcbucket_mapjoin_part_1
+            Truncated Path -> Alias:
+              /srcbucket_mapjoin_part_1/ds=2008-04-08/hr=0 [a]
+        Reducer 2 
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), 
KEY.reducesinkkey1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 75 Data size: 30250 Basic stats: 
COMPLETE Column stats: NONE
+                Limit
+                  Number of rows: 1
+                  Statistics: Num rows: 1 Data size: 403 Basic stats: COMPLETE 
Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 1 Data size: 403 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        properties:
+                          columns _col0,_col1
+                          columns.types int:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels 
true
+                          serialization.escape.crlf true
+                          serialization.format 1
+                          serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: SELECT /*+ MAPJOIN(b) */ a.key, b.value
+FROM srcbucket_mapjoin_part_1 a JOIN srcbucket_mapjoin_part_2 b
+ON a.key = b.key AND a.ds = '2008-04-08' AND b.ds = '2008-04-08'
+ORDER BY a.key, b.value LIMIT 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcbucket_mapjoin_part_1
+PREHOOK: Input: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
+PREHOOK: Input: default@srcbucket_mapjoin_part_2
+PREHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT /*+ MAPJOIN(b) */ a.key, b.value
+FROM srcbucket_mapjoin_part_1 a JOIN srcbucket_mapjoin_part_2 b
+ON a.key = b.key AND a.ds = '2008-04-08' AND b.ds = '2008-04-08'
+ORDER BY a.key, b.value LIMIT 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcbucket_mapjoin_part_1
+POSTHOOK: Input: default@srcbucket_mapjoin_part_1@ds=2008-04-08/hr=0
+POSTHOOK: Input: default@srcbucket_mapjoin_part_2
+POSTHOOK: Input: default@srcbucket_mapjoin_part_2@ds=2008-04-08/hr=0
+#### A masked pattern was here ####
+0      val_0

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out 
b/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
index 01bac11..ae9da65 100644
--- a/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
+++ b/ql/src/test/results/clientpositive/spark/constprog_semijoin.q.out
@@ -140,12 +140,12 @@ PREHOOK: query: select table1.id, table1.val, table1.val1 
from table1 left semi
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id where table1.val = 't1val01'
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1      t1val01 val101
 3      t1val01 val104
 3      t1val01 val106
@@ -272,13 +272,13 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table2
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table2.val2 from table1 inner 
join table2 on table1.val = 't1val01' and table1.id = table2.id left semi join 
table3 on table1.dimid = table3.id
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table2
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1      t1val01 t2val201
 3      t1val01 t2val203
 3      t1val01 t2val203
@@ -404,13 +404,13 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table2
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table2.val2 from table1 left 
semi join table3 on table1.dimid = table3.id inner join table2 on table1.val = 
't1val01' and table1.id = table2.id
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table2
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1      t1val01 t2val201
 3      t1val01 t2val203
 3      t1val01 t2val203
@@ -497,12 +497,12 @@ PREHOOK: query: select table1.id, table1.val, table1.val1 
from table1 left semi
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid <> 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: explain select table1.id, table1.val, table1.val1 from table1 
left semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  IN (100,200)
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select table1.id, table1.val, table1.val1 from table1 
left semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  IN (100,200)
@@ -586,12 +586,12 @@ PREHOOK: query: select table1.id, table1.val, table1.val1 
from table1 left semi
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  IN (100,200)
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1      t1val01 val101
 3      t1val01 val104
 PREHOOK: query: explain select table1.id, table1.val, table1.val1 from table1 
left semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 200
@@ -677,12 +677,12 @@ PREHOOK: query: select table1.id, table1.val, table1.val1 
from table1 left semi
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 200
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: explain select table1.id, table1.val, table1.val1 from table1 
left semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 100
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select table1.id, table1.val, table1.val1 from table1 
left semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 100
@@ -766,12 +766,12 @@ PREHOOK: query: select table1.id, table1.val, table1.val1 
from table1 left semi
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1      t1val01 val101
 3      t1val01 val104
 PREHOOK: query: explain select table1.id, table1.val, table1.val1 from table1 
left semi join table3 on table1.dimid = table3.id and table3.id = 100
@@ -857,11 +857,11 @@ PREHOOK: query: select table1.id, table1.val, table1.val1 
from table1 left semi
 PREHOOK: type: QUERY
 PREHOOK: Input: default@table1
 PREHOOK: Input: default@table3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@table1
 POSTHOOK: Input: default@table3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 1      t1val01 val101
 3      t1val01 val104

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out 
b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
index 86d6e1a..96a405a 100644
--- a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out
@@ -45,9 +45,9 @@ STAGE PLANS:
                       value expressions: _col0 (type: string), _col1 (type: 
string)
                       auto parallelism: false
             Path -> Alias:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### [src]
             Path -> Partition:
-#### A masked pattern was here ####
+              hdfs://### HDFS PATH ### 
                 Partition
                   base file name: src
                   input format: org.apache.hadoop.mapred.TextInputFormat
@@ -60,6 +60,7 @@ STAGE PLANS:
                     columns.comments 'default','default'
                     columns.types string:string
 #### A masked pattern was here ####
+                    location hdfs://### HDFS PATH ###
                     name default.src
                     numFiles 1
                     numRows 500
@@ -81,6 +82,7 @@ STAGE PLANS:
                       columns.comments 'default','default'
                       columns.types string:string
 #### A masked pattern was here ####
+                      location hdfs://### HDFS PATH ###
                       name default.src
                       numFiles 1
                       numRows 500
@@ -105,10 +107,10 @@ STAGE PLANS:
                 File Output Operator
                   compressed: false
                   GlobalTableId: 1
-#### A masked pattern was here ####
+                  directory: hdfs://### HDFS PATH ###
                   NumFilesPerFileSink: 2
                   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
-#### A masked pattern was here ####
+                  Stats Publishing Key Prefix: hdfs://### HDFS PATH ###
                   table:
                       input format: org.apache.hadoop.mapred.TextInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -121,6 +123,7 @@ STAGE PLANS:
                         columns.comments 
                         columns.types int:string
 #### A masked pattern was here ####
+                        location hdfs://### HDFS PATH ###
                         name default.bucket2_1
                         numFiles 0
                         numRows 0
@@ -140,7 +143,7 @@ STAGE PLANS:
     Move Operator
       tables:
           replace: true
-#### A masked pattern was here ####
+          source: hdfs://### HDFS PATH ###
           table:
               input format: org.apache.hadoop.mapred.TextInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -153,6 +156,7 @@ STAGE PLANS:
                 columns.comments 
                 columns.types int:string
 #### A masked pattern was here ####
+                location hdfs://### HDFS PATH ###
                 name default.bucket2_1
                 numFiles 0
                 numRows 0
@@ -168,7 +172,7 @@ STAGE PLANS:
   Stage: Stage-2
     Stats Work
       Basic Stats Work:
-#### A masked pattern was here ####
+          Stats Aggregation Key Prefix: hdfs://### HDFS PATH ###
 
 PREHOOK: query: insert overwrite table bucket2_1
 select * from src
@@ -239,11 +243,11 @@ STAGE PLANS:
 PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s 
order by key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@bucket2_1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s 
order by key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@bucket2_1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0      val_0
 0      val_0
 0      val_0

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out_spark
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out_spark
 
b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out_spark
new file mode 100644
index 0000000..86d6e1a
--- /dev/null
+++ 
b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out_spark
@@ -0,0 +1,493 @@
+PREHOOK: query: CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY 
(key) INTO 2 BUCKETS
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@bucket2_1
+POSTHOOK: query: CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY 
(key) INTO 2 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@bucket2_1
+PREHOOK: query: explain extended
+insert overwrite table bucket2_1
+select * from src
+PREHOOK: type: QUERY
+POSTHOOK: query: explain extended
+insert overwrite table bucket2_1
+select * from src
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+  Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (PARTITION-LEVEL SORT, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  GatherStats: false
+                  Select Operator
+                    expressions: key (type: string), value (type: string)
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                    Reduce Output Operator
+                      null sort order: 
+                      sort order: 
+                      Map-reduce partition columns: UDFToInteger(_col0) (type: 
int)
+                      Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                      tag: -1
+                      value expressions: _col0 (type: string), _col1 (type: 
string)
+                      auto parallelism: false
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                    bucket_count -1
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.comments 'default','default'
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    numFiles 1
+                    numRows 500
+                    rawDataSize 5312
+                    serialization.ddl struct src { string key, string value}
+                    serialization.format 1
+                    serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    totalSize 5812
+#### A masked pattern was here ####
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                      bucket_count -1
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      numFiles 1
+                      numRows 500
+                      rawDataSize 5312
+                      serialization.ddl struct src { string key, string value}
+                      serialization.format 1
+                      serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      totalSize 5812
+#### A masked pattern was here ####
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Select Operator
+                expressions: UDFToInteger(VALUE._col0) (type: int), 
VALUE._col1 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  GlobalTableId: 1
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 2
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.TextInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                      properties:
+                        COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                        bucket_count 2
+                        bucket_field_name key
+                        column.name.delimiter ,
+                        columns key,value
+                        columns.comments 
+                        columns.types int:string
+#### A masked pattern was here ####
+                        name default.bucket2_1
+                        numFiles 0
+                        numRows 0
+                        rawDataSize 0
+                        serialization.ddl struct bucket2_1 { i32 key, string 
value}
+                        serialization.format 1
+                        serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        totalSize 0
+#### A masked pattern was here ####
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      name: default.bucket2_1
+                  TotalFiles: 2
+                  GatherStats: true
+                  MultiFileSpray: true
+
+  Stage: Stage-0
+    Move Operator
+      tables:
+          replace: true
+#### A masked pattern was here ####
+          table:
+              input format: org.apache.hadoop.mapred.TextInputFormat
+              output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+              properties:
+                COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
+                bucket_count 2
+                bucket_field_name key
+                column.name.delimiter ,
+                columns key,value
+                columns.comments 
+                columns.types int:string
+#### A masked pattern was here ####
+                name default.bucket2_1
+                numFiles 0
+                numRows 0
+                rawDataSize 0
+                serialization.ddl struct bucket2_1 { i32 key, string value}
+                serialization.format 1
+                serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                totalSize 0
+#### A masked pattern was here ####
+              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+              name: default.bucket2_1
+
+  Stage: Stage-2
+    Stats Work
+      Basic Stats Work:
+#### A masked pattern was here ####
+
+PREHOOK: query: insert overwrite table bucket2_1
+select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@bucket2_1
+POSTHOOK: query: insert overwrite table bucket2_1
+select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@bucket2_1
+POSTHOOK: Lineage: bucket2_1.key EXPRESSION [(src)src.FieldSchema(name:key, 
type:string, comment:default), ]
+POSTHOOK: Lineage: bucket2_1.value SIMPLE [(src)src.FieldSchema(name:value, 
type:string, comment:default), ]
+PREHOOK: query: explain
+select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Spark
+      Edges:
+        Reducer 2 <- Map 1 (SORT, 1)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: s
+                  Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
+                  Filter Operator
+                    predicate: (((hash(key) & 2147483647) % 2) = 0) (type: 
boolean)
+                    Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: key (type: int), value (type: string)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: int)
+                        sort order: +
+                        Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                        value expressions: _col1 (type: string)
+        Reducer 2 
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 
(type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
+                  table:
+                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s 
order by key
+PREHOOK: type: QUERY
+PREHOOK: Input: default@bucket2_1
+#### A masked pattern was here ####
+POSTHOOK: query: select * from bucket2_1 tablesample (bucket 1 out of 2) s 
order by key
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@bucket2_1
+#### A masked pattern was here ####
+0      val_0
+0      val_0
+0      val_0
+2      val_2
+4      val_4
+8      val_8
+10     val_10
+12     val_12
+12     val_12
+18     val_18
+18     val_18
+20     val_20
+24     val_24
+24     val_24
+26     val_26
+26     val_26
+28     val_28
+30     val_30
+34     val_34
+42     val_42
+42     val_42
+44     val_44
+54     val_54
+58     val_58
+58     val_58
+64     val_64
+66     val_66
+70     val_70
+70     val_70
+70     val_70
+72     val_72
+72     val_72
+74     val_74
+76     val_76
+76     val_76
+78     val_78
+80     val_80
+82     val_82
+84     val_84
+84     val_84
+86     val_86
+90     val_90
+90     val_90
+90     val_90
+92     val_92
+96     val_96
+98     val_98
+98     val_98
+100    val_100
+100    val_100
+104    val_104
+104    val_104
+114    val_114
+116    val_116
+118    val_118
+118    val_118
+120    val_120
+120    val_120
+126    val_126
+128    val_128
+128    val_128
+128    val_128
+134    val_134
+134    val_134
+136    val_136
+138    val_138
+138    val_138
+138    val_138
+138    val_138
+146    val_146
+146    val_146
+150    val_150
+152    val_152
+152    val_152
+156    val_156
+158    val_158
+160    val_160
+162    val_162
+164    val_164
+164    val_164
+166    val_166
+168    val_168
+170    val_170
+172    val_172
+172    val_172
+174    val_174
+174    val_174
+176    val_176
+176    val_176
+178    val_178
+180    val_180
+186    val_186
+190    val_190
+192    val_192
+194    val_194
+196    val_196
+200    val_200
+200    val_200
+202    val_202
+208    val_208
+208    val_208
+208    val_208
+214    val_214
+216    val_216
+216    val_216
+218    val_218
+222    val_222
+224    val_224
+224    val_224
+226    val_226
+228    val_228
+230    val_230
+230    val_230
+230    val_230
+230    val_230
+230    val_230
+238    val_238
+238    val_238
+242    val_242
+242    val_242
+244    val_244
+248    val_248
+252    val_252
+256    val_256
+256    val_256
+258    val_258
+260    val_260
+262    val_262
+266    val_266
+272    val_272
+272    val_272
+274    val_274
+278    val_278
+278    val_278
+280    val_280
+280    val_280
+282    val_282
+282    val_282
+284    val_284
+286    val_286
+288    val_288
+288    val_288
+292    val_292
+296    val_296
+298    val_298
+298    val_298
+298    val_298
+302    val_302
+306    val_306
+308    val_308
+310    val_310
+316    val_316
+316    val_316
+316    val_316
+318    val_318
+318    val_318
+318    val_318
+322    val_322
+322    val_322
+332    val_332
+336    val_336
+338    val_338
+342    val_342
+342    val_342
+344    val_344
+344    val_344
+348    val_348
+348    val_348
+348    val_348
+348    val_348
+348    val_348
+356    val_356
+360    val_360
+362    val_362
+364    val_364
+366    val_366
+368    val_368
+374    val_374
+378    val_378
+382    val_382
+382    val_382
+384    val_384
+384    val_384
+384    val_384
+386    val_386
+392    val_392
+394    val_394
+396    val_396
+396    val_396
+396    val_396
+400    val_400
+402    val_402
+404    val_404
+404    val_404
+406    val_406
+406    val_406
+406    val_406
+406    val_406
+414    val_414
+414    val_414
+418    val_418
+424    val_424
+424    val_424
+430    val_430
+430    val_430
+430    val_430
+432    val_432
+436    val_436
+438    val_438
+438    val_438
+438    val_438
+444    val_444
+446    val_446
+448    val_448
+452    val_452
+454    val_454
+454    val_454
+454    val_454
+458    val_458
+458    val_458
+460    val_460
+462    val_462
+462    val_462
+466    val_466
+466    val_466
+466    val_466
+468    val_468
+468    val_468
+468    val_468
+468    val_468
+470    val_470
+472    val_472
+478    val_478
+478    val_478
+480    val_480
+480    val_480
+480    val_480
+482    val_482
+484    val_484
+490    val_490
+492    val_492
+492    val_492
+494    val_494
+496    val_496
+498    val_498
+498    val_498
+498    val_498

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out 
b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index 0379c9c..39a5d5d 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -259,19 +259,19 @@ STAGE PLANS:
 PREHOOK: query: SELECT dest1.* FROM dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest1.* FROM dest1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: SELECT dest2.* FROM dest2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest2
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT dest2.* FROM dest2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@dest2
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: DROP TABLE dest1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@dest1
@@ -461,11 +461,11 @@ STAGE PLANS:
 PREHOOK: query: SELECT * FROM tmptable x SORT BY x.key
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tmptable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM tmptable x SORT BY x.key
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@tmptable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: DROP TABLE tmtable
 PREHOOK: type: DROPTABLE
 POSTHOOK: query: DROP TABLE tmtable

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/empty_dir_in_table.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/spark/empty_dir_in_table.q.out 
b/ql/src/test/results/clientpositive/spark/empty_dir_in_table.q.out
index e804de3..c91a3e2 100644
--- a/ql/src/test/results/clientpositive/spark/empty_dir_in_table.q.out
+++ b/ql/src/test/results/clientpositive/spark/empty_dir_in_table.q.out
@@ -1,21 +1,21 @@
-#### A masked pattern was here ####
+PREHOOK: query: create external table roottable (key string) row format 
delimited fields terminated by '\\t' stored as textfile location 'hdfs://### 
HDFS PATH ###'
 PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: database:default
 PREHOOK: Output: default@roottable
-#### A masked pattern was here ####
+POSTHOOK: query: create external table roottable (key string) row format 
delimited fields terminated by '\\t' stored as textfile location 'hdfs://### 
HDFS PATH ###'
 POSTHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@roottable
 PREHOOK: query: select count(*) from roottable
 PREHOOK: type: QUERY
 PREHOOK: Input: default@roottable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select count(*) from roottable
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@roottable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 0
 PREHOOK: query: insert into table roottable select key from src where (key < 
20) order by key
 PREHOOK: type: QUERY
@@ -29,18 +29,18 @@ POSTHOOK: Lineage: roottable.key SIMPLE 
[(src)src.FieldSchema(name:key, type:str
 PREHOOK: query: select count(*) from roottable
 PREHOOK: type: QUERY
 PREHOOK: Input: default@roottable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select count(*) from roottable
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@roottable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 20
 PREHOOK: query: select count(*) from roottable
 PREHOOK: type: QUERY
 PREHOOK: Input: default@roottable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select count(*) from roottable
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@roottable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 20

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/external_table_with_space_in_location_path.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/external_table_with_space_in_location_path.q.out
 
b/ql/src/test/results/clientpositive/spark/external_table_with_space_in_location_path.q.out
index 831e339..e8fe19f 100644
--- 
a/ql/src/test/results/clientpositive/spark/external_table_with_space_in_location_path.q.out
+++ 
b/ql/src/test/results/clientpositive/spark/external_table_with_space_in_location_path.q.out
@@ -1,32 +1,32 @@
 #### A masked pattern was here ####
 PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###+with space
 PREHOOK: Output: database:default
 PREHOOK: Output: default@spacetest
 #### A masked pattern was here ####
 POSTHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###+with space
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@spacetest
 PREHOOK: query: SELECT * FROM spacetest
 PREHOOK: type: QUERY
 PREHOOK: Input: default@spacetest
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM spacetest
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@spacetest
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 12     jason
 13     steven
 15     joe
 PREHOOK: query: SELECT count(*) FROM spacetest
 PREHOOK: type: QUERY
 PREHOOK: Input: default@spacetest
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT count(*) FROM spacetest
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@spacetest
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 3
 PREHOOK: query: DROP TABLE spacetest
 PREHOOK: type: DROPTABLE
@@ -46,23 +46,23 @@ POSTHOOK: Output: database:default
 POSTHOOK: Output: default@spacetestpartition
 #### A masked pattern was here ####
 PREHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###+with space
 PREHOOK: Output: default@spacetestpartition
 #### A masked pattern was here ####
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###+with space
 POSTHOOK: Output: default@spacetestpartition
 POSTHOOK: Output: default@spacetestpartition@day=10
 PREHOOK: query: SELECT * FROM spacetestpartition
 PREHOOK: type: QUERY
 PREHOOK: Input: default@spacetestpartition
 PREHOOK: Input: default@spacetestpartition@day=10
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM spacetestpartition
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@spacetestpartition
 POSTHOOK: Input: default@spacetestpartition@day=10
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 12     jason   10
 13     steven  10
 15     joe     10
@@ -70,12 +70,12 @@ PREHOOK: query: SELECT count(*) FROM spacetestpartition
 PREHOOK: type: QUERY
 PREHOOK: Input: default@spacetestpartition
 PREHOOK: Input: default@spacetestpartition@day=10
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT count(*) FROM spacetestpartition
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@spacetestpartition
 POSTHOOK: Input: default@spacetestpartition@day=10
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 3
 PREHOOK: query: DROP TABLE spacetestpartition
 PREHOOK: type: DROPTABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/file_with_header_footer.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/file_with_header_footer.q.out 
b/ql/src/test/results/clientpositive/spark/file_with_header_footer.q.out
index ca3dadb..e16df6b 100644
--- a/ql/src/test/results/clientpositive/spark/file_with_header_footer.q.out
+++ b/ql/src/test/results/clientpositive/spark/file_with_header_footer.q.out
@@ -1,21 +1,21 @@
 #### A masked pattern was here ####
 PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: database:default
 PREHOOK: Output: default@header_footer_table_1
 #### A masked pattern was here ####
 POSTHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@header_footer_table_1
 PREHOOK: query: SELECT * FROM header_footer_table_1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@header_footer_table_1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM header_footer_table_1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@header_footer_table_1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 steven hive    1
 dave   oozie   2
 xifa   phd     3
@@ -30,11 +30,11 @@ david3      oozie   22
 PREHOOK: query: SELECT * FROM header_footer_table_1 WHERE id < 50
 PREHOOK: type: QUERY
 PREHOOK: Input: default@header_footer_table_1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM header_footer_table_1 WHERE id < 50
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@header_footer_table_1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 steven hive    1
 dave   oozie   2
 xifa   phd     3
@@ -54,31 +54,31 @@ POSTHOOK: query: CREATE EXTERNAL TABLE 
header_footer_table_2 (name string, messa
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@header_footer_table_2
-#### A masked pattern was here ####
+PREHOOK: query: ALTER TABLE header_footer_table_2 ADD PARTITION (year=2012, 
month=1, day=1) location 'hdfs://### HDFS PATH ###'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: default@header_footer_table_2
-#### A masked pattern was here ####
+POSTHOOK: query: ALTER TABLE header_footer_table_2 ADD PARTITION (year=2012, 
month=1, day=1) location 'hdfs://### HDFS PATH ###'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: default@header_footer_table_2
 POSTHOOK: Output: default@header_footer_table_2@year=2012/month=1/day=1
-#### A masked pattern was here ####
+PREHOOK: query: ALTER TABLE header_footer_table_2 ADD PARTITION (year=2012, 
month=1, day=2) location 'hdfs://### HDFS PATH ###'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: default@header_footer_table_2
-#### A masked pattern was here ####
+POSTHOOK: query: ALTER TABLE header_footer_table_2 ADD PARTITION (year=2012, 
month=1, day=2) location 'hdfs://### HDFS PATH ###'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: default@header_footer_table_2
 POSTHOOK: Output: default@header_footer_table_2@year=2012/month=1/day=2
-#### A masked pattern was here ####
+PREHOOK: query: ALTER TABLE header_footer_table_2 ADD PARTITION (year=2012, 
month=1, day=3) location 'hdfs://### HDFS PATH ###'
 PREHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: default@header_footer_table_2
-#### A masked pattern was here ####
+POSTHOOK: query: ALTER TABLE header_footer_table_2 ADD PARTITION (year=2012, 
month=1, day=3) location 'hdfs://### HDFS PATH ###'
 POSTHOOK: type: ALTERTABLE_ADDPARTS
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: default@header_footer_table_2
 POSTHOOK: Output: default@header_footer_table_2@year=2012/month=1/day=3
 PREHOOK: query: SELECT * FROM header_footer_table_2
@@ -87,14 +87,14 @@ PREHOOK: Input: default@header_footer_table_2
 PREHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=1
 PREHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=2
 PREHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM header_footer_table_2
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@header_footer_table_2
 POSTHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=1
 POSTHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=2
 POSTHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 steven hive    1       2012    1       1
 dave   oozie   2       2012    1       1
 xifa   phd     3       2012    1       1
@@ -112,14 +112,14 @@ PREHOOK: Input: default@header_footer_table_2
 PREHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=1
 PREHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=2
 PREHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=3
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM header_footer_table_2 WHERE id < 50
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@header_footer_table_2
 POSTHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=1
 POSTHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=2
 POSTHOOK: Input: default@header_footer_table_2@year=2012/month=1/day=3
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 steven hive    1       2012    1       1
 dave   oozie   2       2012    1       1
 xifa   phd     3       2012    1       1
@@ -133,30 +133,30 @@ shanyu2   senior  15      2012    1       2
 david3 oozie   22      2012    1       3
 #### A masked pattern was here ####
 PREHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: database:default
 PREHOOK: Output: default@emptytable
 #### A masked pattern was here ####
 POSTHOOK: type: CREATETABLE
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@emptytable
 PREHOOK: query: SELECT * FROM emptytable
 PREHOOK: type: QUERY
 PREHOOK: Input: default@emptytable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM emptytable
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@emptytable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: SELECT * FROM emptytable WHERE id < 50
 PREHOOK: type: QUERY
 PREHOOK: Input: default@emptytable
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * FROM emptytable WHERE id < 50
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@emptytable
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 PREHOOK: query: DROP TABLE header_footer_table_1
 PREHOOK: type: DROPTABLE
 PREHOOK: Input: default@header_footer_table_1

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/gen_udf_example_add10.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/gen_udf_example_add10.q.out 
b/ql/src/test/results/clientpositive/spark/gen_udf_example_add10.q.out
index 53d982a..c277fdd 100644
--- a/ql/src/test/results/clientpositive/spark/gen_udf_example_add10.q.out
+++ b/ql/src/test/results/clientpositive/spark/gen_udf_example_add10.q.out
@@ -75,11 +75,11 @@ STAGE PLANS:
 PREHOOK: query: select example_add10(x) as a,example_add10(y) as b from t1 
order by a desc,b limit 10
 PREHOOK: type: QUERY
 PREHOOK: Input: default@t1
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: select example_add10(x) as a,example_add10(y) as b from t1 
order by a desc,b limit 10
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@t1
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 18     28.0
 18     38.0
 17     27.0

http://git-wip-us.apache.org/repos/asf/hive/blob/42527189/ql/src/test/results/clientpositive/spark/import_exported_table.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/import_exported_table.q.out 
b/ql/src/test/results/clientpositive/spark/import_exported_table.q.out
index 65d7480..68d3dd8 100644
--- a/ql/src/test/results/clientpositive/spark/import_exported_table.q.out
+++ b/ql/src/test/results/clientpositive/spark/import_exported_table.q.out
@@ -1,10 +1,10 @@
 #### A masked pattern was here ####
 PREHOOK: type: IMPORT
-#### A masked pattern was here ####
+PREHOOK: Input: hdfs://### HDFS PATH ###
 PREHOOK: Output: database:default
 #### A masked pattern was here ####
 POSTHOOK: type: IMPORT
-#### A masked pattern was here ####
+POSTHOOK: Input: hdfs://### HDFS PATH ###
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@j1_41
 PREHOOK: query: DESCRIBE j1_41
@@ -18,11 +18,11 @@ b                           int
 PREHOOK: query: SELECT * from j1_41
 PREHOOK: type: QUERY
 PREHOOK: Input: default@j1_41
-#### A masked pattern was here ####
+PREHOOK: Output: hdfs://### HDFS PATH ###
 POSTHOOK: query: SELECT * from j1_41
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@j1_41
-#### A masked pattern was here ####
+POSTHOOK: Output: hdfs://### HDFS PATH ###
 johndee        1
 burks  2
 #### A masked pattern was here ####

Reply via email to