kuczoram commented on code in PR #5192: URL: https://github.com/apache/hive/pull/5192#discussion_r1601229515
########## ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactionQueryBuilder.java: ########## @@ -419,265 +362,45 @@ private void buildWhereClauseForInsert(StringBuilder query) { } } } - - if (CompactionType.MINOR.equals(compactionType) && !insertOnly && validWriteIdList != null) { - long[] invalidWriteIds = validWriteIdList.getInvalidWriteIds(); - if (invalidWriteIds.length > 0) { - query.append(" where `originalTransaction` not in (").append( - StringUtils.join(ArrayUtils.toObject(invalidWriteIds), ",")) - .append(")"); - } - } } - private void getDdlForCreate(StringBuilder query) { - defineColumns(query); - - // PARTITIONED BY. Used for parts of minor compaction. - if (isPartitioned) { - query.append(" PARTITIONED BY (`file_name` STRING) "); - } - - // CLUSTERED BY. (bucketing) - int bucketingVersion = 0; - if (!insertOnly && CompactionType.MINOR.equals(compactionType)) { - bucketingVersion = getMinorCrudBucketing(query, bucketingVersion); - } else if (insertOnly) { - getMmBucketing(query); - } - - // SKEWED BY - if (insertOnly) { - getSkewedByClause(query); - } - - // STORED AS / ROW FORMAT SERDE + INPUTFORMAT + OUTPUTFORMAT - if (!insertOnly) { - query.append(" stored as orc"); - } else { - copySerdeFromSourceTable(query); + protected void appendColumns(StringBuilder query, List<FieldSchema> cols, boolean alias) { + if (cols == null) { + throw new IllegalStateException("Query could not be created: Source columns are unknown"); } - - // LOCATION - if (location != null) { - query.append(" LOCATION '").append(HiveStringUtils.escapeHiveCommand(location)).append("'"); + for (int i = 0; i < cols.size(); ++i) { + if (alias) { + query.append(i == 0 ? "'" : ", '").append(cols.get(i).getName()).append("', `").append(cols.get(i).getName()) + .append("`"); + } else { + query.append(i == 0 ? "`" : ", `").append(cols.get(i).getName()).append("`"); + } } - - // TBLPROPERTIES - addTblProperties(query, bucketingVersion); } /** * Define columns of the create query. */ - private void defineColumns(StringBuilder query) { - if (sourceTab == null) { - return; // avoid NPEs, don't throw an exception but skip this part of the query - } - query.append("("); - if (!insertOnly) { + protected void defineColumns(StringBuilder query) { + if (sourceTab != null) { + query.append("("); query.append( - "`operation` int, `originalTransaction` bigint, `bucket` int, `rowId` bigint, " - + "`currentTransaction` bigint, `row` struct<"); + "`operation` int, `originalTransaction` bigint, `bucket` int, `rowId` bigint, " + "`currentTransaction` bigint, `row` struct<"); + List<String> columnDescs = getColumnDescs(); + query.append(StringUtils.join(columnDescs, ',')); + query.append(">) "); Review Comment: Fixed it. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: gitbox-unsubscr...@hive.apache.org For additional commands, e-mail: gitbox-h...@hive.apache.org