http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/cbo_rp_lineage2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/cbo_rp_lineage2.q.out 
b/ql/src/test/results/clientpositive/llap/cbo_rp_lineage2.q.out
index 44f9d68..ed63ddd 100644
--- a/ql/src/test/results/clientpositive/llap/cbo_rp_lineage2.q.out
+++ b/ql/src/test/results/clientpositive/llap/cbo_rp_lineage2.q.out
@@ -36,7 +36,7 @@ PREHOOK: query: insert into table dest1 select * from src2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src2
 PREHOOK: Output: default@dest1
-{"version":"1.0","engine":"tez","database":"default","hash":"ecc718a966d8887b18084a55dd96f0bc","queryText":"insert
 into table dest1 select * from 
src2","edges":[{"sources":[2],"targets":[0],"edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest1.key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest1.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.src2.key2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.src2.value2"}]}
+{"version":"1.0","engine":"tez","database":"default","hash":"ecc718a966d8887b18084a55dd96f0bc","queryText":"insert
 into table dest1 select * from 
src2","edges":[{"sources":[2],"targets":[0],"edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"edgeType":"PROJECTION"},{"sources":[2],"targets":[0],"expression":"compute_stats(default.src2.key2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"expression":"compute_stats(default.src2.value2,
 
'hll')","edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest1.key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest1.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.src2.key2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.src2.value2"}]}
 PREHOOK: query: select key k, dest1.value from dest1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@dest1
@@ -467,20 +467,20 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 PREHOOK: Input: default@src2
 PREHOOK: Output: default@dest2
-{"version":"1.0","engine":"tez","database":"default","hash":"e494b771d94800dc3430bf5d0810cd9f","queryText":"insert
 overwrite table dest2 select * from src1 JOIN src2 ON src1.key = 
src2.key2","edges":[],"vertices":[]}
+{"version":"1.0","engine":"tez","database":"default","hash":"e494b771d94800dc3430bf5d0810cd9f","queryText":"insert
 overwrite table dest2 select * from src1 JOIN src2 ON src1.key = 
src2.key2","edges":[{"sources":[4],"targets":[0],"expression":"compute_stats(default.src1.key,
 
'hll')","edgeType":"PROJECTION"},{"sources":[5],"targets":[1],"expression":"compute_stats(default.src1.value,
 
'hll')","edgeType":"PROJECTION"},{"sources":[6],"targets":[2],"expression":"compute_stats(default.src2.key2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[7],"targets":[3],"expression":"compute_stats(default.src2.value2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[4],"targets":[0,1,2,3],"expression":"src1.key
 is not 
null","edgeType":"PREDICATE"},{"sources":[4,6],"targets":[0,1,2,3],"expression":"(src1.key
 = 
src2.key2)","edgeType":"PREDICATE"},{"sources":[6],"targets":[0,1,2,3],"expression":"src2.key2
 is not 
null","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest2.
 
key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest2.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest2.key2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest2.value2"},{"id":4,"vertexType":"COLUMN","vertexId":"default.src1.key"},{"id":5,"vertexType":"COLUMN","vertexId":"default.src1.value"},{"id":6,"vertexType":"COLUMN","vertexId":"default.src2.key2"},{"id":7,"vertexType":"COLUMN","vertexId":"default.src2.value2"}]}
 PREHOOK: query: insert into table dest2 select * from src1 JOIN src2 ON 
src1.key = src2.key2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 PREHOOK: Input: default@src2
 PREHOOK: Output: default@dest2
-{"version":"1.0","engine":"tez","database":"default","hash":"efeaddd0d36105b1013b414627850dc2","queryText":"insert
 into table dest2 select * from src1 JOIN src2 ON src1.key = 
src2.key2","edges":[],"vertices":[]}
+{"version":"1.0","engine":"tez","database":"default","hash":"efeaddd0d36105b1013b414627850dc2","queryText":"insert
 into table dest2 select * from src1 JOIN src2 ON src1.key = 
src2.key2","edges":[{"sources":[4],"targets":[0],"expression":"compute_stats(default.src1.key,
 
'hll')","edgeType":"PROJECTION"},{"sources":[5],"targets":[1],"expression":"compute_stats(default.src1.value,
 
'hll')","edgeType":"PROJECTION"},{"sources":[6],"targets":[2],"expression":"compute_stats(default.src2.key2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[7],"targets":[3],"expression":"compute_stats(default.src2.value2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[4],"targets":[0,1,2,3],"expression":"src1.key
 is not 
null","edgeType":"PREDICATE"},{"sources":[4,6],"targets":[0,1,2,3],"expression":"(src1.key
 = 
src2.key2)","edgeType":"PREDICATE"},{"sources":[6],"targets":[0,1,2,3],"expression":"src2.key2
 is not 
null","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest2.key"}
 
,{"id":1,"vertexType":"COLUMN","vertexId":"default.dest2.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest2.key2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest2.value2"},{"id":4,"vertexType":"COLUMN","vertexId":"default.src1.key"},{"id":5,"vertexType":"COLUMN","vertexId":"default.src1.value"},{"id":6,"vertexType":"COLUMN","vertexId":"default.src2.key2"},{"id":7,"vertexType":"COLUMN","vertexId":"default.src2.value2"}]}
 PREHOOK: query: insert into table dest2
   select * from src1 JOIN src2 ON length(src1.value) = length(src2.value2) + 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 PREHOOK: Input: default@src2
 PREHOOK: Output: default@dest2
-{"version":"1.0","engine":"tez","database":"default","hash":"e9450a56b3d103642e06bef0e4f0d482","queryText":"insert
 into table dest2\n  select * from src1 JOIN src2 ON length(src1.value) = 
length(src2.value2) + 1","edges":[],"vertices":[]}
+{"version":"1.0","engine":"tez","database":"default","hash":"e9450a56b3d103642e06bef0e4f0d482","queryText":"insert
 into table dest2\n  select * from src1 JOIN src2 ON length(src1.value) = 
length(src2.value2) + 
1","edges":[{"sources":[4],"targets":[0],"expression":"compute_stats(default.src1.key,
 
'hll')","edgeType":"PROJECTION"},{"sources":[5],"targets":[1],"expression":"compute_stats(default.src1.value,
 
'hll')","edgeType":"PROJECTION"},{"sources":[6],"targets":[2],"expression":"compute_stats(default.src2.key2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[7],"targets":[3],"expression":"compute_stats(default.src2.value2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[5],"targets":[0,1,2,3],"expression":"length(src1.value)
 is not 
null","edgeType":"PREDICATE"},{"sources":[5,7],"targets":[0,1,2,3],"expression":"(length(src1.value)
 = (length(src2.value2) + 
1))","edgeType":"PREDICATE"},{"sources":[7],"targets":[0,1,2,3],"expression":"length(src2.value2)
 is not null","edgeType":"PREDICATE"}],
 
"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest2.key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest2.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest2.key2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest2.value2"},{"id":4,"vertexType":"COLUMN","vertexId":"default.src1.key"},{"id":5,"vertexType":"COLUMN","vertexId":"default.src1.value"},{"id":6,"vertexType":"COLUMN","vertexId":"default.src2.key2"},{"id":7,"vertexType":"COLUMN","vertexId":"default.src2.value2"}]}
 PREHOOK: query: select * from src1 where length(key) > 2
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
@@ -530,7 +530,7 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src1
 PREHOOK: Input: default@src2
 PREHOOK: Output: default@dest2
-{"version":"1.0","engine":"tez","database":"default","hash":"76d84512204ddc576ad4d93f252e4358","queryText":"insert
 overwrite table dest2\n  select * from src1 JOIN src2 ON src1.key = src2.key2 
WHERE length(key) > 3","edges":[],"vertices":[]}
+{"version":"1.0","engine":"tez","database":"default","hash":"76d84512204ddc576ad4d93f252e4358","queryText":"insert
 overwrite table dest2\n  select * from src1 JOIN src2 ON src1.key = src2.key2 
WHERE length(key) > 
3","edges":[{"sources":[4],"targets":[0],"expression":"compute_stats(default.src1.key,
 
'hll')","edgeType":"PROJECTION"},{"sources":[5],"targets":[1],"expression":"compute_stats(default.src1.value,
 
'hll')","edgeType":"PROJECTION"},{"sources":[6],"targets":[2],"expression":"compute_stats(default.src2.key2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[7],"targets":[3],"expression":"compute_stats(default.src2.value2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[4],"targets":[0,1,2,3],"expression":"((length(src1.key)
 > 3) and src1.key is not 
null)","edgeType":"PREDICATE"},{"sources":[4,6],"targets":[0,1,2,3],"expression":"(src1.key
 = 
src2.key2)","edgeType":"PREDICATE"},{"sources":[6],"targets":[0,1,2,3],"expression":"((length(src2.key2)
 > 3) and src2.key2 is not null)","edgeType
 
":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest2.key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest2.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest2.key2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest2.value2"},{"id":4,"vertexType":"COLUMN","vertexId":"default.src1.key"},{"id":5,"vertexType":"COLUMN","vertexId":"default.src1.value"},{"id":6,"vertexType":"COLUMN","vertexId":"default.src2.key2"},{"id":7,"vertexType":"COLUMN","vertexId":"default.src2.value2"}]}
 PREHOOK: query: drop table if exists dest_l1
 PREHOOK: type: DROPTABLE
 PREHOOK: query: CREATE TABLE dest_l1(key INT, value STRING) STORED AS TEXTFILE
@@ -552,7 +552,7 @@ PREHOOK: type: QUERY
 PREHOOK: Input: default@src
 PREHOOK: Input: default@src1
 PREHOOK: Output: default@dest_l1
-{"version":"1.0","engine":"tez","database":"default","hash":"60b589744e2527dd235a6c8168d6a653","queryText":"INSERT
 OVERWRITE TABLE dest_l1\nSELECT j.*\nFROM (SELECT t1.key, p1.value\n      FROM 
src1 t1\n      LEFT OUTER JOIN src p1\n      ON (t1.key = p1.key)\n      UNION 
ALL\n      SELECT t2.key, p2.value\n      FROM src1 t2\n      LEFT OUTER JOIN 
src p2\n      ON (t2.key = p2.key)) 
j","edges":[{"sources":[2],"targets":[0],"expression":"UDFToInteger(key)","edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"expression":"value","edgeType":"PROJECTION"},{"sources":[4,2],"targets":[0,1],"expression":"(j-subquery1:_u1-subquery1:p1.key
 = 
j-subquery1:_u1-subquery1:t1.key)","edgeType":"PREDICATE"},{"sources":[4,2],"targets":[0,1],"expression":"(j-subquery2:_u1-subquery2:p2.key
 = 
j-subquery2:_u1-subquery2:t2.key)","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_l1.key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l1.value"},{"id":2,"
 
vertexType":"COLUMN","vertexId":"default.src1.key"},{"id":3,"vertexType":"COLUMN","vertexId":"default.src.value"},{"id":4,"vertexType":"COLUMN","vertexId":"default.src.key"}]}
+{"version":"1.0","engine":"tez","database":"default","hash":"60b589744e2527dd235a6c8168d6a653","queryText":"INSERT
 OVERWRITE TABLE dest_l1\nSELECT j.*\nFROM (SELECT t1.key, p1.value\n      FROM 
src1 t1\n      LEFT OUTER JOIN src p1\n      ON (t1.key = p1.key)\n      UNION 
ALL\n      SELECT t2.key, p2.value\n      FROM src1 t2\n      LEFT OUTER JOIN 
src p2\n      ON (t2.key = p2.key)) 
j","edges":[{"sources":[2],"targets":[0],"expression":"UDFToInteger(key)","edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"expression":"value","edgeType":"PROJECTION"},{"sources":[4,2],"targets":[0,1],"expression":"(j-subquery1:_u1-subquery1:p1.key
 = 
j-subquery1:_u1-subquery1:t1.key)","edgeType":"PREDICATE"},{"sources":[4,2],"targets":[0,1],"expression":"(j-subquery2:_u1-subquery2:p2.key
 = 
j-subquery2:_u1-subquery2:t2.key)","edgeType":"PREDICATE"},{"sources":[2],"targets":[0],"expression":"compute_stats(UDFToInteger(key),
 'hll')","edgeType":"PROJECTION"},{"sources":[3],"targets":[1],"expression":"
 compute_stats(value, 
'hll')","edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_l1.key"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l1.value"},{"id":2,"vertexType":"COLUMN","vertexId":"default.src1.key"},{"id":3,"vertexType":"COLUMN","vertexId":"default.src.value"},{"id":4,"vertexType":"COLUMN","vertexId":"default.src.key"}]}
 PREHOOK: query: drop table if exists emp
 PREHOOK: type: DROPTABLE
 PREHOOK: query: drop table if exists dept
@@ -593,7 +593,7 @@ PREHOOK: Input: default@dept
 PREHOOK: Input: default@emp
 PREHOOK: Input: default@project
 PREHOOK: Output: default@tgt
-{"version":"1.0","engine":"tez","database":"default","hash":"f59797e0422d2e51515063374dfac361","queryText":"INSERT
 INTO TABLE tgt\nSELECT emd.dept_name, emd.name, emd.emp_id, emd.mgr_id, 
p.project_id, p.project_name\nFROM (\n  SELECT d.dept_name, em.name, em.emp_id, 
em.mgr_id, em.dept_id\n  FROM (\n    SELECT e.name, e.dept_id, e.emp_id emp_id, 
m.emp_id mgr_id\n    FROM emp e JOIN emp m ON e.emp_id = m.emp_id\n    ) em\n  
JOIN dept d ON d.dept_id = em.dept_id\n  ) emd JOIN project p ON emd.dept_id = 
p.project_id","edges":[{"sources":[6],"targets":[0],"edgeType":"PROJECTION"},{"sources":[7],"targets":[1],"edgeType":"PROJECTION"},{"sources":[8],"targets":[2,3],"edgeType":"PROJECTION"},{"sources":[9],"targets":[4],"edgeType":"PROJECTION"},{"sources":[10],"targets":[5],"edgeType":"PROJECTION"},{"sources":[8,11],"targets":[0,1,2,3,4,5],"expression":"(e.emp_id
 is not null and e.dept_id is not 
null)","edgeType":"PREDICATE"},{"sources":[8],"targets":[0,1,2,3,4,5],"expression":"(emd:em:e.emp
 _id = 
emd:em:m.emp_id)","edgeType":"PREDICATE"},{"sources":[8],"targets":[0,1,2,3,4,5],"expression":"m.emp_id
 is not 
null","edgeType":"PREDICATE"},{"sources":[11,12,9],"targets":[0,1,2,3,4,5],"expression":"(emd:em:e.dept_id
 = emd:d.dept_id AND emd:em:e.dept_id = 
p.project_id)","edgeType":"PREDICATE"},{"sources":[12],"targets":[0,1,2,3,4,5],"expression":"d.dept_id
 is not 
null","edgeType":"PREDICATE"},{"sources":[9],"targets":[0,1,2,3,4,5],"expression":"p.project_id
 is not 
null","edgeType":"PREDICATE"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.tgt.dept_name"},{"id":1,"vertexType":"COLUMN","vertexId":"default.tgt.name"},{"id":2,"vertexType":"COLUMN","vertexId":"default.tgt.emp_id"},{"id":3,"vertexType":"COLUMN","vertexId":"default.tgt.mgr_id"},{"id":4,"vertexType":"COLUMN","vertexId":"default.tgt.proj_id"},{"id":5,"vertexType":"COLUMN","vertexId":"default.tgt.proj_name"},{"id":6,"vertexType":"COLUMN","vertexId":"default.dept.dept_name"},{"id":7,"vertexType":"COLUMN
 
","vertexId":"default.emp.name"},{"id":8,"vertexType":"COLUMN","vertexId":"default.emp.emp_id"},{"id":9,"vertexType":"COLUMN","vertexId":"default.project.project_id"},{"id":10,"vertexType":"COLUMN","vertexId":"default.project.project_name"},{"id":11,"vertexType":"COLUMN","vertexId":"default.emp.dept_id"},{"id":12,"vertexType":"COLUMN","vertexId":"default.dept.dept_id"}]}
+{"version":"1.0","engine":"tez","database":"default","hash":"f59797e0422d2e51515063374dfac361","queryText":"INSERT
 INTO TABLE tgt\nSELECT emd.dept_name, emd.name, emd.emp_id, emd.mgr_id, 
p.project_id, p.project_name\nFROM (\n  SELECT d.dept_name, em.name, em.emp_id, 
em.mgr_id, em.dept_id\n  FROM (\n    SELECT e.name, e.dept_id, e.emp_id emp_id, 
m.emp_id mgr_id\n    FROM emp e JOIN emp m ON e.emp_id = m.emp_id\n    ) em\n  
JOIN dept d ON d.dept_id = em.dept_id\n  ) emd JOIN project p ON emd.dept_id = 
p.project_id","edges":[{"sources":[6],"targets":[0],"edgeType":"PROJECTION"},{"sources":[7],"targets":[1],"edgeType":"PROJECTION"},{"sources":[8],"targets":[2,3],"edgeType":"PROJECTION"},{"sources":[9],"targets":[4],"edgeType":"PROJECTION"},{"sources":[10],"targets":[5],"edgeType":"PROJECTION"},{"sources":[8,11],"targets":[0,1,2,3,4,5],"expression":"(e.emp_id
 is not null and e.dept_id is not 
null)","edgeType":"PREDICATE"},{"sources":[8],"targets":[0,1,2,3,4,5],"expression":"(emd:em:e.emp
 _id = 
emd:em:m.emp_id)","edgeType":"PREDICATE"},{"sources":[8],"targets":[0,1,2,3,4,5],"expression":"m.emp_id
 is not 
null","edgeType":"PREDICATE"},{"sources":[11,12,9],"targets":[0,1,2,3,4,5],"expression":"(emd:em:e.dept_id
 = emd:d.dept_id AND emd:em:e.dept_id = 
p.project_id)","edgeType":"PREDICATE"},{"sources":[12],"targets":[0,1,2,3,4,5],"expression":"d.dept_id
 is not 
null","edgeType":"PREDICATE"},{"sources":[9],"targets":[0,1,2,3,4,5],"expression":"p.project_id
 is not 
null","edgeType":"PREDICATE"},{"sources":[6],"targets":[0],"expression":"compute_stats(default.dept.dept_name,
 
'hll')","edgeType":"PROJECTION"},{"sources":[7],"targets":[1],"expression":"compute_stats(default.emp.name,
 
'hll')","edgeType":"PROJECTION"},{"sources":[8],"targets":[2,3],"expression":"compute_stats(default.emp.emp_id,
 
'hll')","edgeType":"PROJECTION"},{"sources":[9],"targets":[4],"expression":"compute_stats(default.project.project_id,
 'hll')","edgeType":"PROJECTION"},{"sources":[10],"targets":[5],"expressi
 on":"compute_stats(default.project.project_name, 
'hll')","edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.tgt.dept_name"},{"id":1,"vertexType":"COLUMN","vertexId":"default.tgt.name"},{"id":2,"vertexType":"COLUMN","vertexId":"default.tgt.emp_id"},{"id":3,"vertexType":"COLUMN","vertexId":"default.tgt.mgr_id"},{"id":4,"vertexType":"COLUMN","vertexId":"default.tgt.proj_id"},{"id":5,"vertexType":"COLUMN","vertexId":"default.tgt.proj_name"},{"id":6,"vertexType":"COLUMN","vertexId":"default.dept.dept_name"},{"id":7,"vertexType":"COLUMN","vertexId":"default.emp.name"},{"id":8,"vertexType":"COLUMN","vertexId":"default.emp.emp_id"},{"id":9,"vertexType":"COLUMN","vertexId":"default.project.project_id"},{"id":10,"vertexType":"COLUMN","vertexId":"default.project.project_name"},{"id":11,"vertexType":"COLUMN","vertexId":"default.emp.dept_id"},{"id":12,"vertexType":"COLUMN","vertexId":"default.dept.dept_id"}]}
 PREHOOK: query: drop table if exists dest_l2
 PREHOOK: type: DROPTABLE
 PREHOOK: query: create table dest_l2 (id int, c1 tinyint, c2 int, c3 bigint) 
stored as textfile
@@ -603,7 +603,7 @@ PREHOOK: Output: default@dest_l2
 PREHOOK: query: insert into dest_l2 values(0, 1, 100, 10000)
 PREHOOK: type: QUERY
 PREHOOK: Output: default@dest_l2
-{"version":"1.0","engine":"tez","database":"default","hash":"e001334e3f8384806b0f25a7c303045f","queryText":"insert
 into dest_l2 values(0, 1, 100, 
10000)","edges":[{"sources":[],"targets":[0],"expression":"UDFToInteger(tmp_values_col1)","edgeType":"PROJECTION"},{"sources":[],"targets":[1],"expression":"UDFToByte(tmp_values_col2)","edgeType":"PROJECTION"},{"sources":[],"targets":[2],"expression":"UDFToInteger(tmp_values_col3)","edgeType":"PROJECTION"},{"sources":[],"targets":[3],"expression":"UDFToLong(tmp_values_col4)","edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_l2.id"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l2.c1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_l2.c2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_l2.c3"}]}
+{"version":"1.0","engine":"tez","database":"default","hash":"e001334e3f8384806b0f25a7c303045f","queryText":"insert
 into dest_l2 values(0, 1, 100, 
10000)","edges":[{"sources":[],"targets":[0],"expression":"UDFToInteger(tmp_values_col1)","edgeType":"PROJECTION"},{"sources":[],"targets":[1],"expression":"UDFToByte(tmp_values_col2)","edgeType":"PROJECTION"},{"sources":[],"targets":[2],"expression":"UDFToInteger(tmp_values_col3)","edgeType":"PROJECTION"},{"sources":[],"targets":[3],"expression":"UDFToLong(tmp_values_col4)","edgeType":"PROJECTION"},{"sources":[],"targets":[0],"expression":"compute_stats(UDFToInteger(tmp_values_col1),
 
'hll')","edgeType":"PROJECTION"},{"sources":[],"targets":[1],"expression":"compute_stats(UDFToByte(tmp_values_col2),
 
'hll')","edgeType":"PROJECTION"},{"sources":[],"targets":[2],"expression":"compute_stats(UDFToInteger(tmp_values_col3),
 
'hll')","edgeType":"PROJECTION"},{"sources":[],"targets":[3],"expression":"compute_stats(UDFToLong(tmp_values_col4),
 'hll')"
 
,"edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_l2.id"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l2.c1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_l2.c2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_l2.c3"}]}
 PREHOOK: query: select * from (
   select c1 + c2 x from dest_l2
   union all
@@ -623,7 +623,7 @@ PREHOOK: Output: default@dest_l3
 PREHOOK: query: insert into dest_l3 values(0, "s1", "s2", 15)
 PREHOOK: type: QUERY
 PREHOOK: Output: default@dest_l3
-{"version":"1.0","engine":"tez","database":"default","hash":"09df51ba6ba2d07f2304523ee505f094","queryText":"insert
 into dest_l3 values(0, \"s1\", \"s2\", 
15)","edges":[{"sources":[],"targets":[0],"expression":"UDFToInteger(tmp_values_col1)","edgeType":"PROJECTION"},{"sources":[],"targets":[1,2],"edgeType":"PROJECTION"},{"sources":[],"targets":[3],"expression":"UDFToInteger(tmp_values_col4)","edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.dest_l3.id"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l3.c1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_l3.c2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_l3.c3"}]}
+{"version":"1.0","engine":"tez","database":"default","hash":"09df51ba6ba2d07f2304523ee505f094","queryText":"insert
 into dest_l3 values(0, \"s1\", \"s2\", 
15)","edges":[{"sources":[],"targets":[0],"expression":"UDFToInteger(tmp_values_col1)","edgeType":"PROJECTION"},{"sources":[],"targets":[1,2],"edgeType":"PROJECTION"},{"sources":[],"targets":[3],"expression":"UDFToInteger(tmp_values_col4)","edgeType":"PROJECTION"},{"sources":[],"targets":[0],"expression":"compute_stats(UDFToInteger(tmp_values_col1),
 
'hll')","edgeType":"PROJECTION"},{"sources":[],"targets":[1],"expression":"compute_stats(default.values__tmp__table__2.tmp_values_col2,
 
'hll')","edgeType":"PROJECTION"},{"sources":[],"targets":[2],"expression":"compute_stats(default.values__tmp__table__2.tmp_values_col3,
 
'hll')","edgeType":"PROJECTION"},{"sources":[],"targets":[3],"expression":"compute_stats(UDFToInteger(tmp_values_col4),
 
'hll')","edgeType":"PROJECTION"}],"vertices":[{"id":0,"vertexType":"COLUMN","vertexId":"default.des
 
t_l3.id"},{"id":1,"vertexType":"COLUMN","vertexId":"default.dest_l3.c1"},{"id":2,"vertexType":"COLUMN","vertexId":"default.dest_l3.c2"},{"id":3,"vertexType":"COLUMN","vertexId":"default.dest_l3.c3"}]}
 PREHOOK: query: select sum(a.c1) over (partition by a.c1 order by a.id)
 from dest_l2 a
 where a.c2 != 10

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/column_access_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/column_access_stats.q.out 
b/ql/src/test/results/clientpositive/llap/column_access_stats.q.out
index 6f02442..7929645 100644
--- a/ql/src/test/results/clientpositive/llap/column_access_stats.q.out
+++ b/ql/src/test/results/clientpositive/llap/column_access_stats.q.out
@@ -406,19 +406,19 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: t2
-                  Statistics: Num rows: 5 Data size: 920 Basic stats: COMPLETE 
Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 425 Basic stats: COMPLETE 
Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 5 Data size: 920 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 425 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 5 Data size: 920 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 5 Data size: 425 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 5 Data size: 920 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 5 Data size: 425 Basic stats: 
COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 2 
@@ -431,10 +431,10 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 5 Data size: 1012 Basic stats: COMPLETE 
Column stats: NONE
+                Statistics: Num rows: 5 Data size: 467 Basic stats: COMPLETE 
Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 5 Data size: 1012 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 467 Basic stats: COMPLETE 
Column stats: NONE
                   table:
                       input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -538,19 +538,19 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: t2
-                  Statistics: Num rows: 5 Data size: 1840 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 850 Basic stats: COMPLETE 
Column stats: COMPLETE
                   Filter Operator
                     predicate: ((UDFToDouble(val) = 3.0) and key is not null) 
(type: boolean)
-                    Statistics: Num rows: 2 Data size: 736 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 2 Data size: 340 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string), val (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 2 Data size: 736 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 2 Data size: 340 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 2 Data size: 736 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 2 Data size: 340 Basic stats: 
COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
@@ -564,10 +564,10 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2, _col3
-                Statistics: Num rows: 2 Data size: 809 Basic stats: COMPLETE 
Column stats: NONE
+                Statistics: Num rows: 2 Data size: 374 Basic stats: COMPLETE 
Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 2 Data size: 809 Basic stats: COMPLETE 
Column stats: NONE
+                  Statistics: Num rows: 2 Data size: 374 Basic stats: COMPLETE 
Column stats: NONE
                   table:
                       input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -639,19 +639,19 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: t2
-                  Statistics: Num rows: 5 Data size: 1840 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 850 Basic stats: COMPLETE 
Column stats: COMPLETE
                   Filter Operator
                     predicate: ((UDFToDouble(key) = 6.0) and val is not null) 
(type: boolean)
-                    Statistics: Num rows: 2 Data size: 736 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 2 Data size: 340 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: val (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 2 Data size: 736 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 2 Data size: 170 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 2 Data size: 736 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 2 Data size: 170 Basic stats: 
COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Reducer 2 
@@ -664,10 +664,10 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0
-                Statistics: Num rows: 2 Data size: 809 Basic stats: COMPLETE 
Column stats: NONE
+                Statistics: Num rows: 2 Data size: 187 Basic stats: COMPLETE 
Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 2 Data size: 809 Basic stats: COMPLETE 
Column stats: NONE
+                  Statistics: Num rows: 2 Data size: 187 Basic stats: COMPLETE 
Column stats: NONE
                   table:
                       input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -734,19 +734,19 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: t2
-                  Statistics: Num rows: 5 Data size: 920 Basic stats: COMPLETE 
Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 425 Basic stats: COMPLETE 
Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 5 Data size: 920 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 425 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string)
                       outputColumnNames: _col0
-                      Statistics: Num rows: 5 Data size: 920 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 5 Data size: 425 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 5 Data size: 920 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 5 Data size: 425 Basic stats: 
COMPLETE Column stats: COMPLETE
             Execution mode: llap
             LLAP IO: no inputs
         Map 4 
@@ -772,19 +772,19 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: t3
-                  Statistics: Num rows: 5 Data size: 1840 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 865 Basic stats: COMPLETE 
Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 5 Data size: 1840 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 865 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string), val (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 5 Data size: 1840 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 5 Data size: 865 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 5 Data size: 1840 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 5 Data size: 865 Basic stats: 
COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
@@ -798,16 +798,16 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col1
-                Statistics: Num rows: 5 Data size: 1012 Basic stats: COMPLETE 
Column stats: NONE
+                Statistics: Num rows: 5 Data size: 467 Basic stats: COMPLETE 
Column stats: NONE
                 Select Operator
                   expressions: _col1 (type: string)
                   outputColumnNames: _col0
-                  Statistics: Num rows: 5 Data size: 1012 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 467 Basic stats: COMPLETE 
Column stats: NONE
                   Reduce Output Operator
                     key expressions: _col0 (type: string)
                     sort order: +
                     Map-reduce partition columns: _col0 (type: string)
-                    Statistics: Num rows: 5 Data size: 1012 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 5 Data size: 467 Basic stats: 
COMPLETE Column stats: NONE
         Reducer 3 
             Execution mode: llap
             Reduce Operator Tree:
@@ -818,10 +818,10 @@ STAGE PLANS:
                   0 _col0 (type: string)
                   1 _col0 (type: string)
                 outputColumnNames: _col0, _col1, _col2
-                Statistics: Num rows: 5 Data size: 1113 Basic stats: COMPLETE 
Column stats: NONE
+                Statistics: Num rows: 5 Data size: 513 Basic stats: COMPLETE 
Column stats: NONE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 5 Data size: 1113 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 5 Data size: 513 Basic stats: COMPLETE 
Column stats: NONE
                   table:
                       input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out 
b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
index 6e71803..56b376e 100644
--- a/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
+++ b/ql/src/test/results/clientpositive/llap/column_table_stats_orc.q.out
@@ -35,7 +35,7 @@ Retention:            0
 #### A masked pattern was here ####
 Table Type:            MANAGED_TABLE            
 Table Parameters:               
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       COLUMN_STATS_ACCURATE   
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
        numFiles                1                   
        numRows                 1                   
        rawDataSize             170                 
@@ -72,22 +72,22 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: s
-                  Statistics: Num rows: 1 Data size: 368 Basic stats: COMPLETE 
Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 170 Basic stats: COMPLETE 
Column stats: COMPLETE
                   Statistics Aggregation Key Prefix: default.s/
                   GatherStats: true
                   Select Operator
                     expressions: key (type: string), value (type: string)
                     outputColumnNames: key, value
-                    Statistics: Num rows: 1 Data size: 368 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 1 Data size: 170 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Group By Operator
                       aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
                       mode: hash
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 1 Data size: 1248 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 1 Data size: 880 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         null sort order: 
                         sort order: 
-                        Statistics: Num rows: 1 Data size: 1248 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 1 Data size: 880 Basic stats: 
COMPLETE Column stats: COMPLETE
                         tag: -1
                         value expressions: _col0 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:binary>)
                         auto parallelism: false
@@ -102,7 +102,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                   output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
                   properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
                     column.name.delimiter ,
                     columns key,value
@@ -123,7 +123,7 @@ STAGE PLANS:
                     input format: 
org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
                     output format: 
org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
                     properties:
-                      COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                      COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                       bucket_count -1
                       column.name.delimiter ,
                       columns key,value
@@ -152,13 +152,13 @@ STAGE PLANS:
                 aggregations: compute_stats(VALUE._col0), 
compute_stats(VALUE._col1)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 1 Data size: 1248 Basic stats: COMPLETE 
Column stats: NONE
+                Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
                 File Output Operator
                   compressed: false
                   GlobalTableId: 0
 #### A masked pattern was here ####
                   NumFilesPerFileSink: 1
-                  Statistics: Num rows: 1 Data size: 1248 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 1 Data size: 880 Basic stats: COMPLETE 
Column stats: COMPLETE
 #### A masked pattern was here ####
                   table:
                       input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
@@ -321,13 +321,13 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: spart
-                  Statistics: Num rows: 2 Data size: 1812 Basic stats: 
COMPLETE Column stats: PARTIAL
+                  Statistics: Num rows: 2 Data size: 1076 Basic stats: 
COMPLETE Column stats: PARTIAL
                   Statistics Aggregation Key Prefix: default.spart/
                   GatherStats: true
                   Select Operator
                     expressions: key (type: string), value (type: string), ds 
(type: string), hr (type: string)
                     outputColumnNames: key, value, ds, hr
-                    Statistics: Num rows: 2 Data size: 1812 Basic stats: 
COMPLETE Column stats: PARTIAL
+                    Statistics: Num rows: 2 Data size: 1076 Basic stats: 
COMPLETE Column stats: PARTIAL
                     Group By Operator
                       aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
                       keys: ds (type: string), hr (type: string)
@@ -357,7 +357,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
 #### A masked pattern was here ####
                     name default.spart
@@ -401,7 +401,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 12
                   properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
 #### A masked pattern was here ####
                     name default.spart
@@ -715,13 +715,13 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: spart
-                  Statistics: Num rows: 1 Data size: 722 Basic stats: COMPLETE 
Column stats: PARTIAL
+                  Statistics: Num rows: 1 Data size: 354 Basic stats: COMPLETE 
Column stats: PARTIAL
                   Statistics Aggregation Key Prefix: default.spart/
                   GatherStats: true
                   Select Operator
                     expressions: key (type: string), value (type: string), ds 
(type: string)
                     outputColumnNames: key, value, ds
-                    Statistics: Num rows: 1 Data size: 722 Basic stats: 
COMPLETE Column stats: PARTIAL
+                    Statistics: Num rows: 1 Data size: 354 Basic stats: 
COMPLETE Column stats: PARTIAL
                     Group By Operator
                       aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
                       keys: ds (type: string), '11' (type: string)
@@ -751,7 +751,7 @@ STAGE PLANS:
                     ds 2008-04-08
                     hr 11
                   properties:
-                    COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
+                    COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                     bucket_count -1
 #### A masked pattern was here ####
                     name default.spart
@@ -947,7 +947,7 @@ Database:                   default
 Table:                 spart                    
 #### A masked pattern was here ####
 Partition Parameters:           
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       COLUMN_STATS_ACCURATE   
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}}
        numFiles                1                   
        numRows                 1                   
        rawDataSize             170                 

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out 
b/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
index 72f9aa2..fff076e 100644
--- a/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
+++ b/ql/src/test/results/clientpositive/llap/columnstats_part_coltype.q.out
@@ -116,16 +116,36 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=2, part='partB') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-key                    int                     from deserializer               
                                                         
+col_name               key                                                     
                                         
+data_type              int                                                     
                                         
+min                    27                                                      
                                         
+max                    484                                                     
                                         
+num_nulls              0                                                       
                                         
+distinct_count         20                                                      
                                         
+avg_col_len                                                                    
                                         
+max_col_len                                                                    
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=2, part='partB') value
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=2, part='partB') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-value                  string                  from deserializer               
                                                         
+col_name               value                                                   
                                         
+data_type              string                                                  
                                         
+min                                                                            
                                         
+max                                                                            
                                         
+num_nulls              0                                                       
                                         
+distinct_count         20                                                      
                                         
+avg_col_len            6.8                                                     
                                         
+max_col_len            7                                                       
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr=2, part) compute statistics for columns
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partcolstats
@@ -186,16 +206,36 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=3, part='partA') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-key                    int                     from deserializer               
                                                         
+col_name               key                                                     
                                         
+data_type              int                                                     
                                         
+min                    27                                                      
                                         
+max                    495                                                     
                                         
+num_nulls              0                                                       
                                         
+distinct_count         30                                                      
                                         
+avg_col_len                                                                    
                                         
+max_col_len                                                                    
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=3, part='partA') value
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-02', hr=3, part='partA') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-value                  string                  from deserializer               
                                                         
+col_name               value                                                   
                                         
+data_type              string                                                  
                                         
+min                                                                            
                                         
+max                                                                            
                                         
+num_nulls              0                                                       
                                         
+distinct_count         30                                                      
                                         
+avg_col_len            6.833333333333333                                       
                                         
+max_col_len            7                                                       
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: analyze table partcolstats partition (ds=date '2015-04-02', 
hr, part) compute statistics for columns
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partcolstats
@@ -260,32 +300,72 @@ PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partA') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-key                    int                     from deserializer               
                                                         
+col_name               key                                                     
                                         
+data_type              int                                                     
                                         
+min                    15                                                      
                                         
+max                    495                                                     
                                         
+num_nulls              0                                                       
                                         
+distinct_count         40                                                      
                                         
+avg_col_len                                                                    
                                         
+max_col_len                                                                    
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partA') value
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partA') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-value                  string                  from deserializer               
                                                         
+col_name               value                                                   
                                         
+data_type              string                                                  
                                         
+min                                                                            
                                         
+max                                                                            
                                         
+num_nulls              0                                                       
                                         
+distinct_count         40                                                      
                                         
+avg_col_len            6.825                                                   
                                         
+max_col_len            7                                                       
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') key
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') key
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-key                    int                     from deserializer               
                                                         
+col_name               key                                                     
                                         
+data_type              int                                                     
                                         
+min                    15                                                      
                                         
+max                    495                                                     
                                         
+num_nulls              0                                                       
                                         
+distinct_count         58                                                      
                                         
+avg_col_len                                                                    
                                         
+max_col_len                                                                    
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') value
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@partcolstats
 POSTHOOK: query: describe formatted partcolstats partition (ds=date 
'2015-04-03', hr=3, part='partB') value
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@partcolstats
-# col_name             data_type               comment                         
                                                         
-value                  string                  from deserializer               
                                                         
+col_name               value                                                   
                                         
+data_type              string                                                  
                                         
+min                                                                            
                                         
+max                                                                            
                                         
+num_nulls              0                                                       
                                         
+distinct_count         58                                                      
                                         
+avg_col_len            6.883333333333334                                       
                                         
+max_col_len            7                                                       
                                         
+num_trues                                                                      
                                         
+num_falses                                                                     
                                         
+bitVector              HL                                                      
                                         
+comment                from deserializer                                       
                                         
 PREHOOK: query: analyze table partcolstats partition (ds, hr, part) compute 
statistics for columns
 PREHOOK: type: QUERY
 PREHOOK: Input: default@partcolstats

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/constprog_semijoin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/constprog_semijoin.q.out 
b/ql/src/test/results/clientpositive/llap/constprog_semijoin.q.out
index 7a48615..3be74ec 100644
--- a/ql/src/test/results/clientpositive/llap/constprog_semijoin.q.out
+++ b/ql/src/test/results/clientpositive/llap/constprog_semijoin.q.out
@@ -62,30 +62,30 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_12]
-        Select Operator [SEL_11] (rows=5 width=413)
+        Select Operator [SEL_11] (rows=1 width=185)
           Output:["_col0","_col1","_col2"]
-          Merge Join Operator [MERGEJOIN_17] (rows=5 width=413)
+          Merge Join Operator [MERGEJOIN_17] (rows=1 width=94)
             Conds:RS_8._col3=RS_9._col0(Left Semi),Output:["_col0","_col2"]
           <-Map 1 [SIMPLE_EDGE] llap
             SHUFFLE [RS_8]
               PartitionCols:_col3
-              Select Operator [SEL_2] (rows=5 width=376)
+              Select Operator [SEL_2] (rows=2 width=189)
                 Output:["_col0","_col2","_col3"]
-                Filter Operator [FIL_15] (rows=5 width=376)
+                Filter Operator [FIL_15] (rows=2 width=189)
                   predicate:((val = 't1val01') and dimid is not null)
-                  TableScan [TS_0] (rows=10 width=376)
-                    
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","val1","dimid"]
+                  TableScan [TS_0] (rows=10 width=189)
+                    
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","val1","dimid"]
           <-Map 3 [SIMPLE_EDGE] llap
             SHUFFLE [RS_9]
               PartitionCols:_col0
-              Group By Operator [GBY_7] (rows=5 width=4)
+              Group By Operator [GBY_7] (rows=2 width=4)
                 Output:["_col0"],keys:_col0
                 Select Operator [SEL_5] (rows=5 width=4)
                   Output:["_col0"]
                   Filter Operator [FIL_16] (rows=5 width=4)
                     predicate:id is not null
                     TableScan [TS_3] (rows=5 width=4)
-                      default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                      
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id where table1.val = 't1val01'
 PREHOOK: type: QUERY
@@ -116,44 +116,44 @@ Stage-0
     Stage-1
       Reducer 3 llap
       File Output Operator [FS_18]
-        Select Operator [SEL_17] (rows=5 width=232)
+        Select Operator [SEL_17] (rows=1 width=187)
           Output:["_col0","_col1","_col2"]
-          Merge Join Operator [MERGEJOIN_28] (rows=5 width=232)
+          Merge Join Operator [MERGEJOIN_28] (rows=1 width=96)
             Conds:RS_14._col2=RS_15._col0(Left Semi),Output:["_col0","_col4"]
           <-Map 5 [SIMPLE_EDGE] llap
             SHUFFLE [RS_15]
               PartitionCols:_col0
-              Group By Operator [GBY_13] (rows=5 width=4)
+              Group By Operator [GBY_13] (rows=2 width=4)
                 Output:["_col0"],keys:_col0
                 Select Operator [SEL_8] (rows=5 width=4)
                   Output:["_col0"]
                   Filter Operator [FIL_26] (rows=5 width=4)
                     predicate:id is not null
                     TableScan [TS_6] (rows=5 width=4)
-                      default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                      
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
           <-Reducer 2 [SIMPLE_EDGE] llap
             SHUFFLE [RS_14]
               PartitionCols:_col2
-              Merge Join Operator [MERGEJOIN_27] (rows=5 width=211)
+              Merge Join Operator [MERGEJOIN_27] (rows=2 width=100)
                 
Conds:RS_9._col0=RS_10._col0(Inner),Output:["_col0","_col2","_col4"]
               <-Map 1 [SIMPLE_EDGE] llap
                 SHUFFLE [RS_9]
                   PartitionCols:_col0
-                  Select Operator [SEL_2] (rows=5 width=192)
+                  Select Operator [SEL_2] (rows=2 width=99)
                     Output:["_col0","_col2"]
-                    Filter Operator [FIL_24] (rows=5 width=192)
+                    Filter Operator [FIL_24] (rows=2 width=99)
                       predicate:((val = 't1val01') and dimid is not null and 
id is not null)
-                      TableScan [TS_0] (rows=10 width=192)
-                        
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","dimid"]
+                      TableScan [TS_0] (rows=10 width=99)
+                        
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","dimid"]
               <-Map 4 [SIMPLE_EDGE] llap
                 SHUFFLE [RS_10]
                   PartitionCols:_col0
-                  Select Operator [SEL_5] (rows=3 width=188)
+                  Select Operator [SEL_5] (rows=3 width=96)
                     Output:["_col0","_col1"]
-                    Filter Operator [FIL_25] (rows=3 width=188)
+                    Filter Operator [FIL_25] (rows=3 width=96)
                       predicate:id is not null
-                      TableScan [TS_3] (rows=3 width=188)
-                        
default@table2,table2,Tbl:COMPLETE,Col:NONE,Output:["id","val2"]
+                      TableScan [TS_3] (rows=3 width=96)
+                        
default@table2,table2,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val2"]
 
 PREHOOK: query: select table1.id, table1.val, table2.val2 from table1 inner 
join table2 on table1.val = 't1val01' and table1.id = table2.id left semi join 
table3 on table1.dimid = table3.id
 PREHOOK: type: QUERY
@@ -186,44 +186,44 @@ Stage-0
     Stage-1
       Reducer 3 llap
       File Output Operator [FS_18]
-        Select Operator [SEL_17] (rows=5 width=232)
+        Select Operator [SEL_17] (rows=1 width=187)
           Output:["_col0","_col1","_col2"]
-          Merge Join Operator [MERGEJOIN_28] (rows=5 width=232)
+          Merge Join Operator [MERGEJOIN_28] (rows=1 width=96)
             Conds:RS_14._col0=RS_15._col0(Inner),Output:["_col0","_col4"]
           <-Map 5 [SIMPLE_EDGE] llap
             SHUFFLE [RS_15]
               PartitionCols:_col0
-              Select Operator [SEL_8] (rows=3 width=188)
+              Select Operator [SEL_8] (rows=3 width=96)
                 Output:["_col0","_col1"]
-                Filter Operator [FIL_26] (rows=3 width=188)
+                Filter Operator [FIL_26] (rows=3 width=96)
                   predicate:id is not null
-                  TableScan [TS_6] (rows=3 width=188)
-                    
default@table2,table2,Tbl:COMPLETE,Col:NONE,Output:["id","val2"]
+                  TableScan [TS_6] (rows=3 width=96)
+                    
default@table2,table2,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val2"]
           <-Reducer 2 [SIMPLE_EDGE] llap
             SHUFFLE [RS_14]
               PartitionCols:_col0
-              Merge Join Operator [MERGEJOIN_27] (rows=5 width=211)
+              Merge Join Operator [MERGEJOIN_27] (rows=1 width=4)
                 Conds:RS_11._col2=RS_12._col0(Left Semi),Output:["_col0"]
               <-Map 1 [SIMPLE_EDGE] llap
                 SHUFFLE [RS_11]
                   PartitionCols:_col2
-                  Select Operator [SEL_2] (rows=5 width=192)
+                  Select Operator [SEL_2] (rows=2 width=99)
                     Output:["_col0","_col2"]
-                    Filter Operator [FIL_24] (rows=5 width=192)
+                    Filter Operator [FIL_24] (rows=2 width=99)
                       predicate:((val = 't1val01') and dimid is not null and 
id is not null)
-                      TableScan [TS_0] (rows=10 width=192)
-                        
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","dimid"]
+                      TableScan [TS_0] (rows=10 width=99)
+                        
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","dimid"]
               <-Map 4 [SIMPLE_EDGE] llap
                 SHUFFLE [RS_12]
                   PartitionCols:_col0
-                  Group By Operator [GBY_10] (rows=5 width=4)
+                  Group By Operator [GBY_10] (rows=2 width=4)
                     Output:["_col0"],keys:_col0
                     Select Operator [SEL_5] (rows=5 width=4)
                       Output:["_col0"]
                       Filter Operator [FIL_25] (rows=5 width=4)
                         predicate:id is not null
                         TableScan [TS_3] (rows=5 width=4)
-                          
default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                          
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table2.val2 from table1 left 
semi join table3 on table1.dimid = table3.id inner join table2 on table1.val = 
't1val01' and table1.id = table2.id
 PREHOOK: type: QUERY
@@ -255,28 +255,28 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_12]
-        Merge Join Operator [MERGEJOIN_17] (rows=5 width=4)
+        Merge Join Operator [MERGEJOIN_17] (rows=1 width=185)
           Conds:RS_8.100, true=RS_9._col0, _col1(Left 
Semi),Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] llap
           SHUFFLE [RS_8]
             PartitionCols:100, true
-            Select Operator [SEL_2] (rows=1 width=372)
+            Select Operator [SEL_2] (rows=1 width=193)
               Output:["_col0","_col1","_col2"]
-              Filter Operator [FIL_15] (rows=1 width=372)
+              Filter Operator [FIL_15] (rows=1 width=185)
                 predicate:false
-                TableScan [TS_0] (rows=10 width=372)
-                  
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","val1"]
+                TableScan [TS_0] (rows=10 width=185)
+                  
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","val1"]
         <-Map 3 [SIMPLE_EDGE] llap
           SHUFFLE [RS_9]
             PartitionCols:_col0, _col1
-            Group By Operator [GBY_7] (rows=5 width=4)
+            Group By Operator [GBY_7] (rows=1 width=8)
               Output:["_col0","_col1"],keys:_col0, _col1
-              Select Operator [SEL_5] (rows=5 width=4)
+              Select Operator [SEL_5] (rows=1 width=8)
                 Output:["_col0","_col1"]
-                Filter Operator [FIL_16] (rows=5 width=4)
+                Filter Operator [FIL_16] (rows=1 width=4)
                   predicate:(id = 100)
                   TableScan [TS_3] (rows=5 width=4)
-                    default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                    
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid <> 100
 PREHOOK: type: QUERY
@@ -303,28 +303,28 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_12]
-        Merge Join Operator [MERGEJOIN_17] (rows=5 width=413)
+        Merge Join Operator [MERGEJOIN_17] (rows=3 width=185)
           Conds:RS_8.100, true=RS_9._col0, _col1(Left 
Semi),Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] llap
           SHUFFLE [RS_8]
             PartitionCols:100, true
-            Select Operator [SEL_2] (rows=5 width=376)
+            Select Operator [SEL_2] (rows=3 width=193)
               Output:["_col0","_col1","_col2"]
-              Filter Operator [FIL_15] (rows=5 width=376)
+              Filter Operator [FIL_15] (rows=3 width=189)
                 predicate:(dimid = 100)
-                TableScan [TS_0] (rows=10 width=376)
-                  
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","val1","dimid"]
+                TableScan [TS_0] (rows=10 width=189)
+                  
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","val1","dimid"]
         <-Map 3 [SIMPLE_EDGE] llap
           SHUFFLE [RS_9]
             PartitionCols:_col0, _col1
-            Group By Operator [GBY_7] (rows=5 width=4)
+            Group By Operator [GBY_7] (rows=1 width=8)
               Output:["_col0","_col1"],keys:_col0, _col1
-              Select Operator [SEL_5] (rows=5 width=4)
+              Select Operator [SEL_5] (rows=1 width=8)
                 Output:["_col0","_col1"]
-                Filter Operator [FIL_16] (rows=5 width=4)
+                Filter Operator [FIL_16] (rows=1 width=4)
                   predicate:(id = 100)
                   TableScan [TS_3] (rows=5 width=4)
-                    default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                    
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  IN (100,200)
 PREHOOK: type: QUERY
@@ -353,28 +353,28 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_12]
-        Merge Join Operator [MERGEJOIN_17] (rows=5 width=4)
+        Merge Join Operator [MERGEJOIN_17] (rows=1 width=185)
           Conds:RS_8.100, true=RS_9._col0, _col1(Left 
Semi),Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] llap
           SHUFFLE [RS_8]
             PartitionCols:100, true
-            Select Operator [SEL_2] (rows=1 width=372)
+            Select Operator [SEL_2] (rows=1 width=193)
               Output:["_col0","_col1","_col2"]
-              Filter Operator [FIL_15] (rows=1 width=372)
+              Filter Operator [FIL_15] (rows=1 width=185)
                 predicate:false
-                TableScan [TS_0] (rows=10 width=372)
-                  
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","val1"]
+                TableScan [TS_0] (rows=10 width=185)
+                  
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","val1"]
         <-Map 3 [SIMPLE_EDGE] llap
           SHUFFLE [RS_9]
             PartitionCols:_col0, _col1
-            Group By Operator [GBY_7] (rows=5 width=4)
+            Group By Operator [GBY_7] (rows=1 width=8)
               Output:["_col0","_col1"],keys:_col0, _col1
-              Select Operator [SEL_5] (rows=5 width=4)
+              Select Operator [SEL_5] (rows=1 width=8)
                 Output:["_col0","_col1"]
-                Filter Operator [FIL_16] (rows=5 width=4)
+                Filter Operator [FIL_16] (rows=1 width=4)
                   predicate:(id = 100)
                   TableScan [TS_3] (rows=5 width=4)
-                    default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                    
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 200
 PREHOOK: type: QUERY
@@ -401,28 +401,28 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_12]
-        Merge Join Operator [MERGEJOIN_17] (rows=5 width=413)
+        Merge Join Operator [MERGEJOIN_17] (rows=3 width=185)
           Conds:RS_8.100, true=RS_9._col0, _col1(Left 
Semi),Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] llap
           SHUFFLE [RS_8]
             PartitionCols:100, true
-            Select Operator [SEL_2] (rows=5 width=376)
+            Select Operator [SEL_2] (rows=3 width=193)
               Output:["_col0","_col1","_col2"]
-              Filter Operator [FIL_15] (rows=5 width=376)
+              Filter Operator [FIL_15] (rows=3 width=189)
                 predicate:(dimid = 100)
-                TableScan [TS_0] (rows=10 width=376)
-                  
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","val1","dimid"]
+                TableScan [TS_0] (rows=10 width=189)
+                  
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","val1","dimid"]
         <-Map 3 [SIMPLE_EDGE] llap
           SHUFFLE [RS_9]
             PartitionCols:_col0, _col1
-            Group By Operator [GBY_7] (rows=5 width=4)
+            Group By Operator [GBY_7] (rows=1 width=8)
               Output:["_col0","_col1"],keys:_col0, _col1
-              Select Operator [SEL_5] (rows=5 width=4)
+              Select Operator [SEL_5] (rows=1 width=8)
                 Output:["_col0","_col1"]
-                Filter Operator [FIL_16] (rows=5 width=4)
+                Filter Operator [FIL_16] (rows=1 width=4)
                   predicate:(id = 100)
                   TableScan [TS_3] (rows=5 width=4)
-                    default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                    
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100 where 
table1.dimid  = 100
 PREHOOK: type: QUERY
@@ -451,28 +451,28 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_12]
-        Merge Join Operator [MERGEJOIN_17] (rows=5 width=413)
+        Merge Join Operator [MERGEJOIN_17] (rows=3 width=185)
           Conds:RS_8.100, true=RS_9._col0, _col1(Left 
Semi),Output:["_col0","_col1","_col2"]
         <-Map 1 [SIMPLE_EDGE] llap
           SHUFFLE [RS_8]
             PartitionCols:100, true
-            Select Operator [SEL_2] (rows=5 width=376)
+            Select Operator [SEL_2] (rows=3 width=193)
               Output:["_col0","_col1","_col2"]
-              Filter Operator [FIL_15] (rows=5 width=376)
+              Filter Operator [FIL_15] (rows=3 width=189)
                 predicate:(dimid = 100)
-                TableScan [TS_0] (rows=10 width=376)
-                  
default@table1,table1,Tbl:COMPLETE,Col:NONE,Output:["id","val","val1","dimid"]
+                TableScan [TS_0] (rows=10 width=189)
+                  
default@table1,table1,Tbl:COMPLETE,Col:COMPLETE,Output:["id","val","val1","dimid"]
         <-Map 3 [SIMPLE_EDGE] llap
           SHUFFLE [RS_9]
             PartitionCols:_col0, _col1
-            Group By Operator [GBY_7] (rows=5 width=4)
+            Group By Operator [GBY_7] (rows=1 width=8)
               Output:["_col0","_col1"],keys:_col0, _col1
-              Select Operator [SEL_5] (rows=5 width=4)
+              Select Operator [SEL_5] (rows=1 width=8)
                 Output:["_col0","_col1"]
-                Filter Operator [FIL_16] (rows=5 width=4)
+                Filter Operator [FIL_16] (rows=1 width=4)
                   predicate:(id = 100)
                   TableScan [TS_3] (rows=5 width=4)
-                    default@table3,table3,Tbl:COMPLETE,Col:NONE,Output:["id"]
+                    
default@table3,table3,Tbl:COMPLETE,Col:COMPLETE,Output:["id"]
 
 PREHOOK: query: select table1.id, table1.val, table1.val1 from table1 left 
semi join table3 on table1.dimid = table3.id and table3.id = 100
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/cross_prod_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/cross_prod_3.q.out 
b/ql/src/test/results/clientpositive/llap/cross_prod_3.q.out
index 94fe942..76200c6 100644
--- a/ql/src/test/results/clientpositive/llap/cross_prod_3.q.out
+++ b/ql/src/test/results/clientpositive/llap/cross_prod_3.q.out
@@ -50,14 +50,14 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: a
-                  Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Map Join Operator
                         condition map:
                              Inner Join 0 to 1
@@ -70,14 +70,14 @@ STAGE PLANS:
                         input vertices:
                           1 Map 2
                           2 Map 3
-                        Statistics: Num rows: 22 Data size: 8096 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 22 Data size: 3916 Basic stats: 
COMPLETE Column stats: NONE
                         Select Operator
                           expressions: _col2 (type: string), _col3 (type: 
string), _col0 (type: string), _col1 (type: string), _col4 (type: string), 
_col5 (type: string)
                           outputColumnNames: _col0, _col1, _col2, _col3, 
_col4, _col5
-                          Statistics: Num rows: 22 Data size: 8096 Basic 
stats: COMPLETE Column stats: NONE
+                          Statistics: Num rows: 22 Data size: 3916 Basic 
stats: COMPLETE Column stats: NONE
                           File Output Operator
                             compressed: false
-                            Statistics: Num rows: 22 Data size: 8096 Basic 
stats: COMPLETE Column stats: NONE
+                            Statistics: Num rows: 22 Data size: 3916 Basic 
stats: COMPLETE Column stats: NONE
                             table:
                                 input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                                 output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -108,19 +108,19 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: b
-                  Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: key is not null (type: boolean)
-                    Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: string), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: string)
                         sort order: +
                         Map-reduce partition columns: _col0 (type: string)
-                        Statistics: Num rows: 10 Data size: 3680 Basic stats: 
COMPLETE Column stats: NONE
+                        Statistics: Num rows: 10 Data size: 1780 Basic stats: 
COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/cte_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/cte_5.q.out 
b/ql/src/test/results/clientpositive/llap/cte_5.q.out
index 5b3c8ca..e7c10aa 100644
--- a/ql/src/test/results/clientpositive/llap/cte_5.q.out
+++ b/ql/src/test/results/clientpositive/llap/cte_5.q.out
@@ -86,9 +86,9 @@ Stage-0
     Stage-1
       Reducer 2 llap
       File Output Operator [FS_10]
-        Select Operator [SEL_9] (rows=2 width=90)
+        Select Operator [SEL_9] (rows=2 width=4)
           Output:["_col0"]
-          Merge Join Operator [MERGEJOIN_13] (rows=2 width=90)
+          Merge Join Operator [MERGEJOIN_13] (rows=2 width=8)
             Conds:(Inner)
           <-Map 1 [XPROD_EDGE] llap
             XPROD_EDGE [RS_6]
@@ -96,7 +96,7 @@ Stage-0
                 Filter Operator [FIL_11] (rows=1 width=4)
                   predicate:(UDFToDouble(colnum) = 5.0)
                   TableScan [TS_0] (rows=1 width=4)
-                    mydb@q1,a,Tbl:COMPLETE,Col:NONE,Output:["colnum"]
+                    mydb@q1,a,Tbl:COMPLETE,Col:COMPLETE,Output:["colnum"]
           <-Map 3 [XPROD_EDGE] llap
             XPROD_EDGE [RS_7]
               Select Operator [SEL_5] (rows=2 width=85)

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/cte_mat_5.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/cte_mat_5.q.out 
b/ql/src/test/results/clientpositive/llap/cte_mat_5.q.out
index e75c778..84b5ce3 100644
--- a/ql/src/test/results/clientpositive/llap/cte_mat_5.q.out
+++ b/ql/src/test/results/clientpositive/llap/cte_mat_5.q.out
@@ -95,7 +95,7 @@ Stage-3
               Filter Operator [FIL_18] (rows=1 width=4)
                 predicate:colnum is not null
                 TableScan [TS_5] (rows=1 width=4)
-                  mydb@q1,a,Tbl:COMPLETE,Col:NONE,Output:["colnum"]
+                  mydb@q1,a,Tbl:COMPLETE,Col:COMPLETE,Output:["colnum"]
         <-Map 4 [SIMPLE_EDGE] llap
           SHUFFLE [RS_12]
             PartitionCols:UDFToDouble(_col0)

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out 
b/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
index d2552fe..253d5b7 100644
--- a/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
+++ b/ql/src/test/results/clientpositive/llap/deleteAnalyze.q.out
@@ -47,7 +47,7 @@ Retention:            0
 #### A masked pattern was here ####
 Table Type:            MANAGED_TABLE            
 Table Parameters:               
-       COLUMN_STATS_ACCURATE   {\"BASIC_STATS\":\"true\"}
+       COLUMN_STATS_ACCURATE   
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"amount\":\"true\",\"id\":\"true\",\"item\":\"true\",\"sales_tax\":\"true\"}}
        numFiles                1                   
        numRows                 2                   
        rawDataSize             634                 
@@ -73,17 +73,17 @@ POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@testdeci2
 col_name               amount                                                  
                                         
 data_type              decimal(10,3)                                           
                                         
-min                                                                            
                                         
-max                                                                            
                                         
-num_nulls                                                                      
                                         
-distinct_count                                                                 
                                         
+min                    12.123                                                  
                                         
+max                    123.123                                                 
                                         
+num_nulls              0                                                       
                                         
+distinct_count         2                                                       
                                         
 avg_col_len                                                                    
                                         
 max_col_len                                                                    
                                         
 num_trues                                                                      
                                         
 num_falses                                                                     
                                         
-bitVector                                                                      
                                         
+bitVector              HL                                                      
                                         
 comment                from deserializer                                       
                                         
-COLUMN_STATS_ACCURATE  {\"BASIC_STATS\":\"true\"}                              
                                                 
+COLUMN_STATS_ACCURATE  
{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"amount\":\"true\",\"id\":\"true\",\"item\":\"true\",\"sales_tax\":\"true\"}}
                                                                               
 PREHOOK: query: analyze table testdeci2 compute statistics for columns
 PREHOOK: type: QUERY
 PREHOOK: Input: default@testdeci2

http://git-wip-us.apache.org/repos/asf/hive/blob/3bbc24d2/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out 
b/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out
index acd6076..e45d49e 100644
--- a/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out
+++ b/ql/src/test/results/clientpositive/llap/disable_merge_for_bucketing.q.out
@@ -140,6 +140,41 @@ STAGE PLANS:
                   TotalFiles: 2
                   GatherStats: true
                   MultiFileSpray: true
+                Select Operator
+                  expressions: _col0 (type: int), _col1 (type: string)
+                  outputColumnNames: key, value
+                  Statistics: Num rows: 500 Data size: 47500 Basic stats: 
COMPLETE Column stats: COMPLETE
+                  Group By Operator
+                    aggregations: compute_stats(key, 'hll'), 
compute_stats(value, 'hll')
+                    mode: complete
+                    outputColumnNames: _col0, _col1
+                    Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col0 (type: 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>),
 _col1 (type: 
struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>)
+                      outputColumnNames: _col0, _col1
+                      Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        GlobalTableId: 0
+#### A masked pattern was here ####
+                        NumFilesPerFileSink: 1
+                        Statistics: Num rows: 1 Data size: 864 Basic stats: 
COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                        table:
+                            input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            properties:
+                              columns _col0,_col1
+                              columns.types 
struct<columntype:string,min:bigint,max:bigint,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>:struct<columntype:string,maxlength:bigint,avglength:double,countnulls:bigint,numdistinctvalues:bigint,ndvbitvector:binary>
+                              escape.delim \
+                              
hive.serialization.extend.additional.nesting.levels true
+                              serialization.escape.crlf true
+                              serialization.format 1
+                              serialization.lib 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                            serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        TotalFiles: 1
+                        GatherStats: false
+                        MultiFileSpray: false
 
   Stage: Stage-2
     Dependency Collection
@@ -177,6 +212,11 @@ STAGE PLANS:
     Stats Work
       Basic Stats Work:
 #### A masked pattern was here ####
+      Column Stats Desc:
+          Columns: key, value
+          Column Types: int, string
+          Table: default.bucket2_1
+          Is Table Level Stats: true
 
 PREHOOK: query: insert overwrite table bucket2_1
 select * from src
@@ -212,18 +252,18 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: s
-                  Statistics: Num rows: 500 Data size: 89488 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 500 Data size: 47500 Basic stats: 
COMPLETE Column stats: COMPLETE
                   Filter Operator
                     predicate: (((hash(key) & 2147483647) % 2) = 0) (type: 
boolean)
-                    Statistics: Num rows: 250 Data size: 44744 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 250 Data size: 23750 Basic stats: 
COMPLETE Column stats: COMPLETE
                     Select Operator
                       expressions: key (type: int), value (type: string)
                       outputColumnNames: _col0, _col1
-                      Statistics: Num rows: 250 Data size: 44744 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 250 Data size: 23750 Basic stats: 
COMPLETE Column stats: COMPLETE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
-                        Statistics: Num rows: 250 Data size: 44744 Basic 
stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 250 Data size: 23750 Basic 
stats: COMPLETE Column stats: COMPLETE
                         value expressions: _col1 (type: string)
             Execution mode: llap
             LLAP IO: no inputs
@@ -233,10 +273,10 @@ STAGE PLANS:
               Select Operator
                 expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 
(type: string)
                 outputColumnNames: _col0, _col1
-                Statistics: Num rows: 250 Data size: 44744 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 250 Data size: 23750 Basic stats: 
COMPLETE Column stats: COMPLETE
                 File Output Operator
                   compressed: false
-                  Statistics: Num rows: 250 Data size: 44744 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 250 Data size: 23750 Basic stats: 
COMPLETE Column stats: COMPLETE
                   table:
                       input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                       output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

Reply via email to