HIVE-15591: Hive can not use , in quoted column name (Pengcheng Xiong, reviewed 
by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/588c3911
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/588c3911
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/588c3911

Branch: refs/heads/master
Commit: 588c3911a325c4a746c71e0d0364ff7745aba8e4
Parents: 38c3f1a
Author: Pengcheng Xiong <pxi...@apache.org>
Authored: Mon Jan 23 10:37:54 2017 -0800
Committer: Pengcheng Xiong <pxi...@apache.org>
Committed: Mon Jan 23 10:37:54 2017 -0800

----------------------------------------------------------------------
 .../hadoop/hive/contrib/serde2/RegexSerDe.java  |   5 +-
 .../hive/contrib/serde2/TypedBytesSerDe.java    |   5 +-
 .../hive/hcatalog/data/HCatRecordSerDe.java     |   6 +-
 .../apache/hive/hcatalog/data/JsonSerDe.java    |   5 +-
 .../hive/hcatalog/mapreduce/InternalUtil.java   |   3 +
 .../apache/hadoop/hive/serde2/CustomSerDe1.java |   5 +-
 .../apache/hadoop/hive/serde2/CustomSerDe2.java |   5 +-
 .../apache/hadoop/hive/serde2/CustomSerDe3.java |   7 +-
 .../apache/hadoop/hive/serde2/CustomSerDe4.java |   5 +-
 .../apache/hadoop/hive/serde2/CustomSerDe5.java |   5 +-
 .../hadoop/hive/metastore/MetaStoreUtils.java   |  20 ++-
 .../hadoop/hive/ql/io/orc/OrcOutputFormat.java  |   7 +-
 .../apache/hadoop/hive/ql/io/orc/OrcSerde.java  |   6 +-
 .../io/parquet/MapredParquetOutputFormat.java   |   8 +-
 .../ql/io/parquet/serde/ParquetHiveSerDe.java   |   7 +-
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   |   5 +
 .../clientpositive/comma_in_column_name.q       |  14 ++
 .../alter_partition_coltype.q.out               |   8 +
 .../analyze_table_null_partition.q.out          |   8 +
 .../clientpositive/autoColumnStats_8.q.out      |  15 ++
 .../auto_join_reordering_values.q.out           |  17 ++
 .../clientpositive/auto_sortmerge_join_1.q.out  |  30 ++++
 .../clientpositive/auto_sortmerge_join_11.q.out |  32 ++++
 .../clientpositive/auto_sortmerge_join_12.q.out |  14 ++
 .../clientpositive/auto_sortmerge_join_2.q.out  |  26 +++
 .../clientpositive/auto_sortmerge_join_3.q.out  |  24 +++
 .../clientpositive/auto_sortmerge_join_4.q.out  |  24 +++
 .../clientpositive/auto_sortmerge_join_5.q.out  |  14 ++
 .../clientpositive/auto_sortmerge_join_7.q.out  |  36 +++++
 .../clientpositive/binary_output_format.q.out   |  10 ++
 .../test/results/clientpositive/bucket1.q.out   |   4 +
 .../test/results/clientpositive/bucket2.q.out   |   4 +
 .../test/results/clientpositive/bucket3.q.out   |   4 +
 .../clientpositive/bucket_map_join_1.q.out      |   2 +
 .../clientpositive/bucket_map_join_2.q.out      |   2 +
 .../clientpositive/bucket_map_join_spark1.q.out |  16 ++
 .../clientpositive/bucket_map_join_spark2.q.out |  16 ++
 .../clientpositive/bucket_map_join_spark3.q.out |  16 ++
 .../clientpositive/bucket_map_join_spark4.q.out |  12 ++
 .../clientpositive/bucketcontext_1.q.out        |  10 ++
 .../clientpositive/bucketcontext_2.q.out        |  10 ++
 .../clientpositive/bucketcontext_3.q.out        |   8 +
 .../clientpositive/bucketcontext_4.q.out        |   8 +
 .../clientpositive/bucketcontext_5.q.out        |   4 +
 .../clientpositive/bucketcontext_6.q.out        |   8 +
 .../clientpositive/bucketcontext_7.q.out        |  12 ++
 .../clientpositive/bucketcontext_8.q.out        |  12 ++
 .../clientpositive/bucketmapjoin10.q.out        |   8 +
 .../clientpositive/bucketmapjoin11.q.out        |  16 ++
 .../clientpositive/bucketmapjoin12.q.out        |   8 +
 .../clientpositive/bucketmapjoin13.q.out        |  18 +++
 .../results/clientpositive/bucketmapjoin5.q.out |  24 +++
 .../results/clientpositive/bucketmapjoin8.q.out |   8 +
 .../results/clientpositive/bucketmapjoin9.q.out |   8 +
 .../clientpositive/bucketmapjoin_negative.q.out |  12 ++
 .../bucketmapjoin_negative2.q.out               |  14 ++
 .../bucketmapjoin_negative3.q.out               |  18 +++
 .../clientpositive/cbo_rp_outer_join_ppr.q.out  |  16 ++
 .../clientpositive/columnstats_partlvl.q.out    |   4 +
 .../clientpositive/columnstats_tbllvl.q.out     |   4 +
 .../clientpositive/comma_in_column_name.q.out   |  56 +++++++
 .../constantPropagateForSubQuery.q.out          |   4 +
 .../display_colstats_tbllvl.q.out               |   2 +
 .../results/clientpositive/druid_basic2.q.out   |   4 +
 .../dynamic_partition_skip_default.q.out        |   8 +
 .../encryption_join_unencrypted_tbl.q.out       |   4 +
 .../extrapolate_part_stats_date.q.out           |   8 +
 .../extrapolate_part_stats_full.q.out           |  24 +++
 .../extrapolate_part_stats_partial.q.out        |  76 +++++++++
 .../clientpositive/filter_join_breaktask.q.out  |   7 +
 .../clientpositive/fouter_join_ppr.q.out        |  32 ++++
 .../clientpositive/groupby_map_ppr.q.out        |   6 +
 .../groupby_map_ppr_multi_distinct.q.out        |   6 +
 .../results/clientpositive/groupby_ppr.q.out    |   6 +
 .../groupby_ppr_multi_distinct.q.out            |  12 ++
 .../clientpositive/groupby_sort_1_23.q.out      | 138 ++++++++++++++++
 .../results/clientpositive/groupby_sort_6.q.out |   8 +
 .../clientpositive/groupby_sort_skew_1_23.q.out | 159 +++++++++++++++++++
 .../test/results/clientpositive/input23.q.out   |   2 +
 .../test/results/clientpositive/input42.q.out   |  12 ++
 .../results/clientpositive/input_part1.q.out    |  10 ++
 .../results/clientpositive/input_part2.q.out    |  20 +++
 .../results/clientpositive/input_part7.q.out    |   4 +
 .../results/clientpositive/input_part9.q.out    |   4 +
 ql/src/test/results/clientpositive/join17.q.out |   4 +
 ql/src/test/results/clientpositive/join26.q.out |  10 ++
 ql/src/test/results/clientpositive/join32.q.out |   8 +
 ql/src/test/results/clientpositive/join33.q.out |   8 +
 ql/src/test/results/clientpositive/join34.q.out |   7 +
 ql/src/test/results/clientpositive/join35.q.out |  15 ++
 ql/src/test/results/clientpositive/join9.q.out  |   6 +
 .../clientpositive/join_filters_overlap.q.out   |  10 ++
 .../results/clientpositive/join_map_ppr.q.out   |  20 +++
 .../clientpositive/list_bucket_dml_1.q.out      |   8 +
 .../clientpositive/list_bucket_dml_11.q.out     |   6 +
 .../clientpositive/list_bucket_dml_12.q.out     |   8 +
 .../clientpositive/list_bucket_dml_13.q.out     |   6 +
 .../clientpositive/list_bucket_dml_14.q.out     |   6 +
 .../clientpositive/list_bucket_dml_2.q.out      |   8 +
 .../clientpositive/list_bucket_dml_3.q.out      |   8 +
 .../clientpositive/list_bucket_dml_4.q.out      |  18 +++
 .../clientpositive/list_bucket_dml_5.q.out      |  10 ++
 .../clientpositive/list_bucket_dml_6.q.out      |  20 +++
 .../clientpositive/list_bucket_dml_7.q.out      |  20 +++
 .../clientpositive/list_bucket_dml_8.q.out      |  10 ++
 .../clientpositive/list_bucket_dml_9.q.out      |  18 +++
 .../list_bucket_query_multiskew_1.q.out         |   8 +
 .../list_bucket_query_multiskew_2.q.out         |   6 +
 .../list_bucket_query_multiskew_3.q.out         |   6 +
 .../list_bucket_query_oneskew_1.q.out           |   6 +
 .../list_bucket_query_oneskew_2.q.out           |   8 +
 .../list_bucket_query_oneskew_3.q.out           |   2 +
 .../llap/acid_bucket_pruning.q.out              |   2 +
 .../llap/auto_sortmerge_join_1.q.out            |  18 +++
 .../llap/auto_sortmerge_join_11.q.out           |  28 ++++
 .../llap/auto_sortmerge_join_12.q.out           |  10 ++
 .../llap/auto_sortmerge_join_2.q.out            |  12 ++
 .../llap/auto_sortmerge_join_3.q.out            |  18 +++
 .../llap/auto_sortmerge_join_4.q.out            |  18 +++
 .../llap/auto_sortmerge_join_5.q.out            |  12 ++
 .../llap/auto_sortmerge_join_7.q.out            |  24 +++
 .../llap/auto_sortmerge_join_8.q.out            |  24 +++
 .../results/clientpositive/llap/bucket2.q.out   |   4 +
 .../results/clientpositive/llap/bucket3.q.out   |   4 +
 .../results/clientpositive/llap/bucket4.q.out   |   4 +
 .../results/clientpositive/llap/bucket5.q.out   |  12 ++
 .../clientpositive/llap/bucket_many.q.out       |   4 +
 .../clientpositive/llap/bucketmapjoin1.q.out    |  12 ++
 .../clientpositive/llap/bucketmapjoin2.q.out    |  20 +++
 .../clientpositive/llap/bucketmapjoin3.q.out    |  12 ++
 .../clientpositive/llap/bucketmapjoin4.q.out    |  12 ++
 .../clientpositive/llap/bucketmapjoin7.q.out    |   4 +
 .../llap/disable_merge_for_bucketing.q.out      |   4 +
 .../extrapolate_part_stats_partial_ndv.q.out    |  38 +++++
 .../llap/filter_join_breaktask.q.out            |   6 +
 .../clientpositive/llap/join32_lessSize.q.out   |  34 ++++
 .../llap/list_bucket_dml_10.q.out               |   4 +
 .../clientpositive/llap/llap_nullscan.q.out     |   2 +
 .../clientpositive/llap/mapjoin_mapjoin.q.out   |  12 ++
 .../clientpositive/llap/metadataonly1.q.out     |  40 +++++
 .../clientpositive/llap/optimize_nullscan.q.out |  58 +++++--
 .../clientpositive/llap/ppd_union_view.q.out    |   6 +
 .../llap/reduce_deduplicate.q.out               |   6 +
 .../results/clientpositive/llap/sample1.q.out   |   4 +
 .../results/clientpositive/llap/sample10.q.out  |   8 +
 .../clientpositive/llap/smb_mapjoin_15.q.out    |  16 ++
 .../results/clientpositive/llap/stats11.q.out   |  12 ++
 .../llap/tez_join_result_complex.q.out          |   8 +
 .../clientpositive/llap/unionDistinct_1.q.out   |  32 ++++
 .../clientpositive/llap/union_stats.q.out       |   4 +
 .../clientpositive/llap/vectorization_0.q.out   |   4 +
 .../clientpositive/llap/vectorized_ptf.q.out    |  58 +++++++
 .../results/clientpositive/load_dyn_part8.q.out |  12 ++
 .../clientpositive/louter_join_ppr.q.out        |  28 ++++
 .../clientpositive/mapjoin_mapjoin.q.out        |  12 ++
 ql/src/test/results/clientpositive/merge3.q.out |  26 +++
 .../offset_limit_global_optimizer.q.out         |  52 ++++++
 .../results/clientpositive/outer_join_ppr.q.out |  16 ++
 ql/src/test/results/clientpositive/pcr.q.out    | 120 ++++++++++++++
 ql/src/test/results/clientpositive/pcs.q.out    |  38 +++++
 .../results/clientpositive/pointlookup2.q.out   |  36 +++++
 .../results/clientpositive/pointlookup3.q.out   |  27 ++++
 .../results/clientpositive/pointlookup4.q.out   |   8 +
 .../clientpositive/ppd_join_filter.q.out        |  28 ++++
 ql/src/test/results/clientpositive/ppd_vc.q.out |  21 +++
 .../clientpositive/ppr_allchildsarenull.q.out   |  12 ++
 .../test/results/clientpositive/push_or.q.out   |   4 +
 .../clientpositive/rand_partitionpruner1.q.out  |   2 +
 .../clientpositive/rand_partitionpruner2.q.out  |  12 ++
 .../clientpositive/rand_partitionpruner3.q.out  |   4 +
 .../results/clientpositive/regexp_extract.q.out |   4 +
 .../clientpositive/router_join_ppr.q.out        |  28 ++++
 .../test/results/clientpositive/sample1.q.out   |  10 ++
 .../test/results/clientpositive/sample2.q.out   |  10 ++
 .../test/results/clientpositive/sample4.q.out   |  10 ++
 .../test/results/clientpositive/sample5.q.out   |  10 ++
 .../test/results/clientpositive/sample6.q.out   |  24 +++
 .../test/results/clientpositive/sample7.q.out   |  10 ++
 .../test/results/clientpositive/sample8.q.out   |   8 +
 .../test/results/clientpositive/sample9.q.out   |   2 +
 .../clientpositive/serde_user_properties.q.out  |   6 +
 .../results/clientpositive/smb_mapjoin_11.q.out |   8 +
 .../results/clientpositive/smb_mapjoin_12.q.out |   8 +
 .../results/clientpositive/smb_mapjoin_13.q.out |   4 +
 .../clientpositive/sort_merge_join_desc_5.q.out |   2 +
 .../clientpositive/sort_merge_join_desc_6.q.out |   4 +
 .../clientpositive/sort_merge_join_desc_7.q.out |   8 +
 .../spark/auto_join_reordering_values.q.out     |  10 ++
 .../spark/auto_sortmerge_join_1.q.out           |  14 ++
 .../spark/auto_sortmerge_join_12.q.out          |  10 ++
 .../spark/auto_sortmerge_join_3.q.out           |  10 ++
 .../spark/auto_sortmerge_join_4.q.out           |  10 ++
 .../spark/auto_sortmerge_join_5.q.out           |   8 +
 .../spark/auto_sortmerge_join_7.q.out           |  16 ++
 .../spark/auto_sortmerge_join_8.q.out           |  16 ++
 .../results/clientpositive/spark/bucket2.q.out  |   4 +
 .../results/clientpositive/spark/bucket3.q.out  |   4 +
 .../results/clientpositive/spark/bucket4.q.out  |   4 +
 .../spark/bucket_map_join_1.q.out               |   4 +
 .../spark/bucket_map_join_2.q.out               |   4 +
 .../spark/bucket_map_join_spark1.q.out          |  12 ++
 .../spark/bucket_map_join_spark2.q.out          |  12 ++
 .../spark/bucket_map_join_spark3.q.out          |  12 ++
 .../spark/bucket_map_join_spark4.q.out          |  12 ++
 .../clientpositive/spark/bucketmapjoin1.q.out   |  12 ++
 .../clientpositive/spark/bucketmapjoin10.q.out  |   8 +
 .../clientpositive/spark/bucketmapjoin11.q.out  |  16 ++
 .../clientpositive/spark/bucketmapjoin12.q.out  |   8 +
 .../clientpositive/spark/bucketmapjoin13.q.out  |  18 +++
 .../clientpositive/spark/bucketmapjoin2.q.out   |  20 +++
 .../clientpositive/spark/bucketmapjoin3.q.out   |  12 ++
 .../clientpositive/spark/bucketmapjoin4.q.out   |  12 ++
 .../clientpositive/spark/bucketmapjoin5.q.out   |  16 ++
 .../clientpositive/spark/bucketmapjoin7.q.out   |   4 +
 .../clientpositive/spark/bucketmapjoin8.q.out   |   8 +
 .../clientpositive/spark/bucketmapjoin9.q.out   |   8 +
 .../spark/bucketmapjoin_negative.q.out          |   6 +
 .../spark/bucketmapjoin_negative2.q.out         |   8 +
 .../spark/bucketmapjoin_negative3.q.out         |  36 +++++
 .../spark/disable_merge_for_bucketing.q.out     |   4 +
 .../spark/filter_join_breaktask.q.out           |   6 +
 .../clientpositive/spark/groupby_map_ppr.q.out  |   6 +
 .../spark/groupby_map_ppr_multi_distinct.q.out  |   6 +
 .../clientpositive/spark/groupby_ppr.q.out      |   6 +
 .../spark/groupby_ppr_multi_distinct.q.out      |  12 ++
 .../spark/groupby_sort_1_23.q.out               |  76 +++++++++
 .../spark/groupby_sort_skew_1_23.q.out          |  76 +++++++++
 .../clientpositive/spark/input_part2.q.out      |   8 +
 .../results/clientpositive/spark/join17.q.out   |   6 +
 .../results/clientpositive/spark/join26.q.out   |   8 +
 .../results/clientpositive/spark/join32.q.out   |   8 +
 .../clientpositive/spark/join32_lessSize.q.out  |  34 ++++
 .../results/clientpositive/spark/join33.q.out   |   8 +
 .../results/clientpositive/spark/join34.q.out   |   8 +
 .../results/clientpositive/spark/join35.q.out   |   8 +
 .../results/clientpositive/spark/join9.q.out    |   6 +
 .../spark/join_filters_overlap.q.out            |  34 ++++
 .../clientpositive/spark/join_map_ppr.q.out     |  16 ++
 .../spark/list_bucket_dml_2.q.out               |   8 +
 .../clientpositive/spark/load_dyn_part8.q.out   |  12 ++
 .../clientpositive/spark/louter_join_ppr.q.out  |  28 ++++
 .../clientpositive/spark/mapjoin_mapjoin.q.out  |  12 ++
 .../spark/optimize_nullscan.q.out               |  40 +++++
 .../clientpositive/spark/outer_join_ppr.q.out   |  16 ++
 .../test/results/clientpositive/spark/pcr.q.out |  92 +++++++++++
 .../clientpositive/spark/ppd_join_filter.q.out  |  16 ++
 .../clientpositive/spark/router_join_ppr.q.out  |  28 ++++
 .../results/clientpositive/spark/sample1.q.out  |   4 +
 .../results/clientpositive/spark/sample10.q.out |   8 +
 .../results/clientpositive/spark/sample2.q.out  |   4 +
 .../results/clientpositive/spark/sample4.q.out  |   4 +
 .../results/clientpositive/spark/sample5.q.out  |   4 +
 .../results/clientpositive/spark/sample6.q.out  |  18 +++
 .../results/clientpositive/spark/sample7.q.out  |   4 +
 .../results/clientpositive/spark/sample8.q.out  |  10 ++
 .../clientpositive/spark/smb_mapjoin_11.q.out   |   8 +
 .../clientpositive/spark/smb_mapjoin_12.q.out   |   8 +
 .../clientpositive/spark/smb_mapjoin_13.q.out   |   6 +
 .../clientpositive/spark/smb_mapjoin_15.q.out   |  10 ++
 .../results/clientpositive/spark/stats0.q.out   |   8 +
 .../results/clientpositive/spark/stats12.q.out  |   4 +
 .../results/clientpositive/spark/stats13.q.out  |   2 +
 .../results/clientpositive/spark/stats3.q.out   |   1 +
 .../clientpositive/spark/transform_ppr1.q.out   |   8 +
 .../clientpositive/spark/transform_ppr2.q.out   |   4 +
 .../results/clientpositive/spark/union22.q.out  |   9 ++
 .../results/clientpositive/spark/union24.q.out  |  24 +++
 .../clientpositive/spark/union_ppr.q.out        |   8 +
 .../clientpositive/spark/vectorization_0.q.out  |   4 +
 .../clientpositive/spark/vectorized_ptf.q.out   |  58 +++++++
 ql/src/test/results/clientpositive/stats0.q.out |  14 ++
 .../test/results/clientpositive/stats12.q.out   |   4 +
 .../test/results/clientpositive/stats13.q.out   |   2 +
 ql/src/test/results/clientpositive/stats3.q.out |   1 +
 .../temp_table_display_colstats_tbllvl.q.out    |   2 +
 .../results/clientpositive/transform_ppr1.q.out |   8 +
 .../results/clientpositive/transform_ppr2.q.out |   4 +
 .../truncate_column_list_bucket.q.out           |   4 +
 .../results/clientpositive/udf_explode.q.out    |   4 +
 .../results/clientpositive/udtf_explode.q.out   |  10 ++
 .../test/results/clientpositive/union22.q.out   |  19 +++
 .../test/results/clientpositive/union24.q.out   |  36 +++++
 .../test/results/clientpositive/union_ppr.q.out |   4 +
 .../results/clientpositive/union_stats.q.out    |   2 +
 serde/if/serde.thrift                           |   1 +
 .../src/gen/thrift/gen-cpp/serde_constants.cpp  |   2 +
 serde/src/gen/thrift/gen-cpp/serde_constants.h  |   1 +
 .../hadoop/hive/serde/serdeConstants.java       |   2 +
 .../org/apache/hadoop/hive/serde/Types.php      |   5 +
 .../org_apache_hadoop_hive_serde/constants.py   |   1 +
 serde/src/gen/thrift/gen-rb/serde_constants.rb  |   2 +
 .../serde2/MetadataTypedColumnsetSerDe.java     |   4 +-
 .../apache/hadoop/hive/serde2/RegexSerDe.java   |   5 +-
 .../apache/hadoop/hive/serde2/SerDeUtils.java   |   4 +-
 .../hadoop/hive/serde2/avro/AvroSerDe.java      |   7 +-
 .../hadoop/hive/serde2/avro/AvroSerdeUtils.java |   5 +-
 .../binarysortable/BinarySortableSerDe.java     |   5 +-
 .../hive/serde2/lazy/LazySerDeParameters.java   |   6 +-
 .../hive/serde2/lazybinary/LazyBinarySerDe.java |   5 +-
 .../serde2/thrift/ThriftJDBCBinarySerDe.java    |   5 +-
 300 files changed, 4070 insertions(+), 52 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
----------------------------------------------------------------------
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java 
b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
index 8defe34..f27b0c7 100644
--- a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
+++ b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/RegexSerDe.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
@@ -113,7 +114,9 @@ public class RegexSerDe extends AbstractSerDe {
     } else {
       inputPattern = null;
     }
-    List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
+    List<String> columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     List<TypeInfo> columnTypes = TypeInfoUtils
         .getTypeInfosFromTypeString(columnTypeProperty);
     assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
----------------------------------------------------------------------
diff --git 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java 
b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
index 5a018ae..c294747 100644
--- 
a/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
+++ 
b/contrib/src/java/org/apache/hadoop/hive/contrib/serde2/TypedBytesSerDe.java
@@ -109,8 +109,9 @@ public class TypedBytesSerDe extends AbstractSerDe {
     // Read the configuration parameters
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
-    columnNames = Arrays.asList(columnNameProperty.split(","));
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
+    columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     columnTypes = null;
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
index 235d186..989d6c2 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -79,12 +80,13 @@ public class HCatRecordSerDe extends AbstractSerDe {
     // Get column names and types
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     // all table column names
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
 
     // all column types

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
index ef17079..831e857 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java
@@ -114,12 +114,13 @@ public class JsonSerDe extends AbstractSerDe {
     // Get column names and types
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     // all table column names
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
 
     // all column types

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
index 1230795..8fd676f 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -162,6 +163,8 @@ class InternalUtil {
     List<FieldSchema> fields = HCatUtil.getFieldSchemaList(s.getFields());
     props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
       MetaStoreUtils.getColumnNamesFromFieldSchema(fields));
+    props.setProperty(serdeConstants.COLUMN_NAME_DELIMITER,
+        MetaStoreUtils.getColumnNameDelimiter(fields));
     
props.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
       MetaStoreUtils.getColumnTypesFromFieldSchema(fields));
     props.setProperty("columns.comments",

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
----------------------------------------------------------------------
diff --git 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
index c28f096..b122602 100644
--- 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
+++ 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
@@ -49,9 +49,10 @@ public class CustomSerDe1 extends AbstractSerDe {
     // Read the configuration parameters
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     // The input column can either be a string or a list of integer values.
-    List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+    List<String> columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     List<TypeInfo> columnTypes = TypeInfoUtils
         .getTypeInfosFromTypeString(columnTypeProperty);
     assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
----------------------------------------------------------------------
diff --git 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
index 05d0590..6944fdd 100644
--- 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
+++ 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
@@ -50,9 +50,10 @@ public class CustomSerDe2 extends AbstractSerDe {
     // Read the configuration parameters
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     // The input column can either be a string or a list of integer values.
-    List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+    List<String> columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     List<TypeInfo> columnTypes = TypeInfoUtils
         .getTypeInfosFromTypeString(columnTypeProperty);
     assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
----------------------------------------------------------------------
diff --git 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
index 311718e..64e821a 100644
--- 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
+++ 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
@@ -40,9 +40,10 @@ public class CustomSerDe3 extends CustomSerDe1 {
     // Read the configuration parameters
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
-    // The input column can either be a string or a list of list of integer 
values.
-    List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
+    // The input column can either be a string or a list of integer values.
+    List<String> columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     List<TypeInfo> columnTypes = TypeInfoUtils
         .getTypeInfosFromTypeString(columnTypeProperty);
     assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java
----------------------------------------------------------------------
diff --git 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java
 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java
index 3504f5b..1c16e60 100644
--- 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java
+++ 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe4.java
@@ -41,9 +41,10 @@ public class CustomSerDe4 extends CustomSerDe2 {
       // Read the configuration parameters
       String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
       String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+      final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+          .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
       // The input column can either be a string or a list of integer values.
-      List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+      List<String> columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
       List<TypeInfo> columnTypes = TypeInfoUtils
           .getTypeInfosFromTypeString(columnTypeProperty);
       assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java
----------------------------------------------------------------------
diff --git 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java
 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java
index 670e5f2..f8f2a85 100644
--- 
a/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java
+++ 
b/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe5.java
@@ -39,9 +39,10 @@ public class CustomSerDe5 extends CustomSerDe4 {
       // Read the configuration parameters
       String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
       String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+      final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+          .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
       // The input column can either be a string or a list of integer values.
-      List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+      List<String> columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
       List<TypeInfo> columnTypes = TypeInfoUtils
           .getTypeInfosFromTypeString(columnTypeProperty);
       assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index 4aea152..b21b9ed 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -1050,9 +1050,10 @@ public class MetaStoreUtils {
     StringBuilder colComment = new StringBuilder();
 
     boolean first = true;
+    String columnNameDelimiter = getColumnNameDelimiter(cols);
     for (FieldSchema col : cols) {
       if (!first) {
-        colNameBuf.append(",");
+        colNameBuf.append(columnNameDelimiter);
         colTypeBuf.append(":");
         colComment.append('\0');
       }
@@ -1064,6 +1065,7 @@ public class MetaStoreUtils {
     schema.setProperty(
         
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMNS,
         colNameBuf.toString());
+    schema.setProperty(serdeConstants.COLUMN_NAME_DELIMITER, 
columnNameDelimiter);
     String colTypes = colTypeBuf.toString();
     schema.setProperty(
         
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_COLUMN_TYPES,
@@ -1177,15 +1179,25 @@ public class MetaStoreUtils {
     return addCols(getSchemaWithoutCols(sd, tblsd, parameters, databaseName, 
tableName, partitionKeys), tblsd.getCols());
   }
 
+  public static String getColumnNameDelimiter(List<FieldSchema> fieldSchemas) {
+    // we first take a look if any fieldSchemas contain COMMA
+    for (int i = 0; i < fieldSchemas.size(); i++) {
+      if (fieldSchemas.get(i).getName().contains(",")) {
+        return String.valueOf(SerDeUtils.COLUMN_COMMENTS_DELIMITER);
+      }
+    }
+    return String.valueOf(SerDeUtils.COMMA);
+  }
+  
   /**
    * Convert FieldSchemas to columnNames.
    */
-  public static String getColumnNamesFromFieldSchema(
-      List<FieldSchema> fieldSchemas) {
+  public static String getColumnNamesFromFieldSchema(List<FieldSchema> 
fieldSchemas) {
+    String delimiter = getColumnNameDelimiter(fieldSchemas);
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < fieldSchemas.size(); i++) {
       if (i > 0) {
-        sb.append(",");
+        sb.append(delimiter);
       }
       sb.append(fieldSchemas.get(i).getName());
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
index b0f8c8b..a179300 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
@@ -36,7 +36,9 @@ import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.RecordUpdater;
 import org.apache.hadoop.hive.ql.io.StatsProvidingRecordWriter;
 import org.apache.hadoop.hive.ql.io.orc.OrcSerde.OrcSerdeRow;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
@@ -141,11 +143,12 @@ public class OrcOutputFormat extends 
FileOutputFormat<NullWritable, OrcSerdeRow>
           !columnTypeProperty.isEmpty()) {
         List<String> columnNames;
         List<TypeInfo> columnTypes;
-
+        final String columnNameDelimiter = 
props.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? props
+            .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
         if (columnNameProperty.length() == 0) {
           columnNames = new ArrayList<String>();
         } else {
-          columnNames = Arrays.asList(columnNameProperty.split(","));
+          columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
         }
 
         if (columnTypeProperty.length() == 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
index 3ec9105..6dae512 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -84,13 +85,14 @@ public class OrcSerde extends VectorizedSerde {
     String columnNameProperty = table.getProperty(serdeConstants.LIST_COLUMNS);
     // NOTE: if "columns.types" is missing, all columns will be of String type
     String columnTypeProperty = 
table.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+    final String columnNameDelimiter = 
table.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? table
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     String compressType = OrcConf.COMPRESS.getString(table, conf);
 
     // Parse the configuration parameters
     ArrayList<String> columnNames = new ArrayList<String>();
     if (columnNameProperty != null && columnNameProperty.length() > 0) {
-      for (String name : columnNameProperty.split(",")) {
+      for (String name : columnNameProperty.split(columnNameDelimiter)) {
         columnNames.add(name);
       }
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
index bfb48a9..379a913 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/MapredParquetOutputFormat.java
@@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.parquet.convert.HiveSchemaConverter;
 import org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport;
 import org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -39,7 +41,6 @@ import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.RecordWriter;
 import org.apache.hadoop.mapreduce.OutputFormat;
 import org.apache.hadoop.util.Progressable;
-
 import org.apache.parquet.hadoop.ParquetOutputFormat;
 
 /**
@@ -97,11 +98,12 @@ public class MapredParquetOutputFormat extends 
FileOutputFormat<NullWritable, Pa
     final String columnTypeProperty = 
tableProperties.getProperty(IOConstants.COLUMNS_TYPES);
     List<String> columnNames;
     List<TypeInfo> columnTypes;
-
+    final String columnNameDelimiter = 
tableProperties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? 
tableProperties
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
 
     if (columnTypeProperty.length() == 0) {

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
index 5870a50..6413c5a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveSerDe.java
@@ -21,6 +21,7 @@ import java.util.Map;
 import java.util.Properties;
 
 import com.google.common.base.Preconditions;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.optimizer.FieldNode;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -29,6 +30,7 @@ import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ParquetHiveRecord;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
@@ -95,11 +97,12 @@ public class ParquetHiveSerDe extends AbstractSerDe {
     // Get column names and sort order
     final String columnNameProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMNS);
     final String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
-
+    final String columnNameDelimiter = 
tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : 
String.valueOf(SerDeUtils.COMMA);
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = 
Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index d3a1528..b2c5865 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -469,6 +469,7 @@ public final class PlanUtils {
         SequenceFileInputFormat.class, SequenceFileOutputFormat.class,
         Utilities.makeProperties(serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
+        serdeConstants.COLUMN_NAME_DELIMITER, 
MetaStoreUtils.getColumnNameDelimiter(fieldSchemas),
         serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
         serdeConstants.SERIALIZATION_SORT_ORDER, order,
@@ -496,6 +497,7 @@ public final class PlanUtils {
           SequenceFileInputFormat.class, SequenceFileOutputFormat.class,
           Utilities.makeProperties(serdeConstants.LIST_COLUMNS, MetaStoreUtils
               .getColumnNamesFromFieldSchema(fieldSchemas),
+              serdeConstants.COLUMN_NAME_DELIMITER, 
MetaStoreUtils.getColumnNameDelimiter(fieldSchemas),
               serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
               .getColumnTypesFromFieldSchema(fieldSchemas),
               serdeConstants.SERIALIZATION_SORT_ORDER, order.toString(),
@@ -521,6 +523,7 @@ public final class PlanUtils {
           SequenceFileOutputFormat.class, Utilities.makeProperties(
               serdeConstants.LIST_COLUMNS, MetaStoreUtils
               .getColumnNamesFromFieldSchema(fieldSchemas),
+              serdeConstants.COLUMN_NAME_DELIMITER, 
MetaStoreUtils.getColumnNameDelimiter(fieldSchemas),
               serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
               .getColumnTypesFromFieldSchema(fieldSchemas),
               serdeConstants.ESCAPE_CHAR, "\\",
@@ -536,6 +539,8 @@ public final class PlanUtils {
         SequenceFileOutputFormat.class, Utilities.makeProperties(
         serdeConstants.LIST_COLUMNS, MetaStoreUtils
         .getColumnNamesFromFieldSchema(fieldSchemas),
+        serdeConstants.COLUMN_NAME_DELIMITER, 
MetaStoreUtils.getColumnNameDelimiter(fieldSchemas),
+        serdeConstants.COLUMN_NAME_DELIMITER, 
MetaStoreUtils.getColumnNameDelimiter(fieldSchemas),
         serdeConstants.LIST_COLUMN_TYPES, MetaStoreUtils
         .getColumnTypesFromFieldSchema(fieldSchemas),
         serdeConstants.ESCAPE_CHAR, "\\",

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/queries/clientpositive/comma_in_column_name.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/comma_in_column_name.q 
b/ql/src/test/queries/clientpositive/comma_in_column_name.q
new file mode 100644
index 0000000..cb8823e
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/comma_in_column_name.q
@@ -0,0 +1,14 @@
+create table test (`x,y` int);
+
+insert into test values (1),(2);
+
+select `x,y` from test where `x,y` >=2 ;
+
+drop table test; 
+
+create table test (`x,y` int) stored as orc;
+
+insert into test values (1),(2);
+
+select `x,y` from test where `x,y` <2 ;
+

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_partition_coltype.q.out 
b/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
index 0ebbdc0..daa6255 100644
--- a/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
+++ b/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
@@ -234,6 +234,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -255,6 +256,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -278,6 +280,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -299,6 +302,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -467,6 +471,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns intcol
               columns.comments 
               columns.types string
@@ -488,6 +493,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns intcol
                 columns.comments 
                 columns.types string
@@ -534,6 +540,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns intcol
               columns.comments 
               columns.types string
@@ -555,6 +562,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns intcol
                 columns.comments 
                 columns.types string

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out 
b/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out
index c6691d6..2bfc04a 100644
--- a/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out
+++ b/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out
@@ -104,6 +104,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns name
               columns.comments 
               columns.types string
@@ -125,6 +126,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns name
                 columns.comments 
                 columns.types string
@@ -147,6 +149,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns name
               columns.comments 
               columns.types string
@@ -168,6 +171,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns name
                 columns.comments 
                 columns.types string
@@ -190,6 +194,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns name
               columns.comments 
               columns.types string
@@ -211,6 +216,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns name
                 columns.comments 
                 columns.types string
@@ -233,6 +239,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns name
               columns.comments 
               columns.types string
@@ -254,6 +261,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns name
                 columns.comments 
                 columns.types string

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_8.q.out 
b/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
index 3989c4b..c6f6127 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_8.q.out
@@ -82,6 +82,7 @@ STAGE PLANS:
                       output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         bucket_count -1
+                        column.name.delimiter ,
                         columns key,value
                         columns.comments 'default','default'
                         columns.types string:string
@@ -138,6 +139,7 @@ STAGE PLANS:
                       output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                       properties:
                         bucket_count -1
+                        column.name.delimiter ,
                         columns key,value
                         columns.comments 'default','default'
                         columns.types string:string
@@ -173,6 +175,7 @@ STAGE PLANS:
                           input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           properties:
+                            column.name.delimiter ,
                             columns _col0,_col1,_col2,_col3
                             columns.types 
string,string,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
                             escape.delim \
@@ -195,6 +198,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -216,6 +220,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -241,6 +246,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -262,6 +268,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -287,6 +294,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -308,6 +316,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -333,6 +342,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE 
{"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -354,6 +364,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -421,6 +432,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -468,6 +480,7 @@ STAGE PLANS:
               output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -509,6 +522,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1,_col2,_col3
               columns.types 
string,string,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
               escape.delim \
@@ -518,6 +532,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1,_col2,_col3
                 columns.types 
string,string,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>,struct<columntype:string,maxlength:bigint,sumlength:bigint,count:bigint,countnulls:bigint,bitvector:string,numbitvectors:int>
                 escape.delim \

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out 
b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
index 214c1df..156be41 100644
--- a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
+++ b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out
@@ -143,6 +143,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns dealid,date,time,cityid,userid
               columns.comments 
               columns.types int:string:string:int:int
@@ -163,6 +164,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns dealid,date,time,cityid,userid
                 columns.comments 
                 columns.types int:string:string:int:int
@@ -200,6 +202,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 properties:
+                  column.name.delimiter ,
                   columns _col0,_col2,_col3,_col4
                   columns.types int,int,int,string
                   escape.delim \
@@ -252,6 +255,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col2,_col3,_col4
               columns.types int,int,int,string
               escape.delim \
@@ -261,6 +265,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col2,_col3,_col4
                 columns.types int,int,int,string
                 escape.delim \
@@ -274,6 +279,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns dealid,date,time,cityid,userid
               columns.comments 
               columns.types int:string:string:int:int
@@ -294,6 +300,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns dealid,date,time,cityid,userid
                 columns.comments 
                 columns.types int:string:string:int:int
@@ -332,6 +339,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 properties:
+                  column.name.delimiter ,
                   columns _col2,_col3,_col4,_col5
                   columns.types int,int,string,int
                   escape.delim \
@@ -384,6 +392,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col2,_col3,_col4,_col5
               columns.types int,int,string,int
               escape.delim \
@@ -393,6 +402,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col2,_col3,_col4,_col5
                 columns.types int,int,string,int
                 escape.delim \
@@ -406,6 +416,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns dealid,date,time,cityid,userid
               columns.comments 
               columns.types int:string:string:int:int
@@ -426,6 +437,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns dealid,date,time,cityid,userid
                 columns.comments 
                 columns.types int:string:string:int:int
@@ -464,6 +476,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 properties:
+                  column.name.delimiter ,
                   columns _col3,_col4,_col5
                   columns.types int,string,int
                   escape.delim \
@@ -516,6 +529,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col3,_col4,_col5
               columns.types int,string,int
               escape.delim \
@@ -525,6 +539,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col3,_col4,_col5
                 columns.types int,string,int
                 escape.delim \
@@ -538,6 +553,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns userid
               columns.comments 
               columns.types int
@@ -558,6 +574,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns userid
                 columns.comments 
                 columns.types int

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out 
b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
index 6e4d112..d847937 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out
@@ -154,6 +154,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -177,6 +178,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -201,6 +203,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -224,6 +227,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -347,6 +351,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -370,6 +375,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -394,6 +400,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -417,6 +424,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -517,6 +525,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 2
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -540,6 +549,7 @@ STAGE PLANS:
                       SORTBUCKETCOLSPREFIX TRUE
                       bucket_count 2
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -621,6 +631,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -644,6 +655,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -668,6 +680,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -691,6 +704,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -714,6 +728,7 @@ STAGE PLANS:
             properties:
               bucket_count 2
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -737,6 +752,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -798,6 +814,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 4
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -821,6 +838,7 @@ STAGE PLANS:
                       SORTBUCKETCOLSPREFIX TRUE
                       bucket_count 4
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -844,6 +862,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 4
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -867,6 +886,7 @@ STAGE PLANS:
                       SORTBUCKETCOLSPREFIX TRUE
                       bucket_count 4
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -948,6 +968,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -971,6 +992,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -995,6 +1017,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -1018,6 +1041,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -1041,6 +1065,7 @@ STAGE PLANS:
             properties:
               bucket_count 2
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -1064,6 +1089,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 2
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -1154,6 +1180,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -1177,6 +1204,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -1201,6 +1229,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -1224,6 +1253,7 @@ STAGE PLANS:
                 SORTBUCKETCOLSPREFIX TRUE
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out 
b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
index b216c60..ca2ed10 100644
--- a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
+++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out
@@ -123,6 +123,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 2
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -145,6 +146,7 @@ STAGE PLANS:
                     properties:
                       bucket_count 2
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -229,6 +231,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -251,6 +254,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -275,6 +279,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -297,6 +302,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -321,6 +327,7 @@ STAGE PLANS:
             properties:
               bucket_count 2
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -343,6 +350,7 @@ STAGE PLANS:
               properties:
                 bucket_count 2
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -439,6 +447,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 2
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -461,6 +470,7 @@ STAGE PLANS:
                     properties:
                       bucket_count 2
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -545,6 +555,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -567,6 +578,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -591,6 +603,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -613,6 +626,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -637,6 +651,7 @@ STAGE PLANS:
             properties:
               bucket_count 2
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -659,6 +674,7 @@ STAGE PLANS:
               properties:
                 bucket_count 2
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -754,6 +770,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 2
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -776,6 +793,7 @@ STAGE PLANS:
                     properties:
                       bucket_count 2
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -860,6 +878,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -882,6 +901,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -906,6 +926,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -928,6 +949,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -1023,6 +1045,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 2
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -1045,6 +1068,7 @@ STAGE PLANS:
                     properties:
                       bucket_count 2
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -1071,6 +1095,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 4
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -1093,6 +1118,7 @@ STAGE PLANS:
                     properties:
                       bucket_count 4
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -1115,6 +1141,7 @@ STAGE PLANS:
                   properties:
                     bucket_count 4
                     bucket_field_name key
+                    column.name.delimiter ,
                     columns key,value
                     columns.comments 
                     columns.types string:string
@@ -1137,6 +1164,7 @@ STAGE PLANS:
                     properties:
                       bucket_count 4
                       bucket_field_name key
+                      column.name.delimiter ,
                       columns key,value
                       columns.comments 
                       columns.types string:string
@@ -1240,6 +1268,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -1262,6 +1291,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string
@@ -1286,6 +1316,7 @@ STAGE PLANS:
             properties:
               bucket_count 4
               bucket_field_name key
+              column.name.delimiter ,
               columns key,value
               columns.comments 
               columns.types string:string
@@ -1308,6 +1339,7 @@ STAGE PLANS:
               properties:
                 bucket_count 4
                 bucket_field_name key
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 
                 columns.types string:string

Reply via email to