deniskuzZ commented on code in PR #4372: URL: https://github.com/apache/hive/pull/4372#discussion_r1237282004
########## ql/src/java/org/apache/hadoop/hive/ql/ddl/table/metaref/AlterTableCreateMetaRefAnalyzer.java: ########## @@ -16,92 +16,159 @@ * limitations under the License. */ -package org.apache.hadoop.hive.ql.ddl.table.branch.create; +package org.apache.hadoop.hive.ql.ddl.table.metaref; import java.time.ZoneId; import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; - import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.common.type.TimestampTZ; import org.apache.hadoop.hive.common.type.TimestampTZUtil; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.QueryState; -import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory; import org.apache.hadoop.hive.ql.ddl.DDLUtils; import org.apache.hadoop.hive.ql.ddl.DDLWork; import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer; +import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc; import org.apache.hadoop.hive.ql.ddl.table.AlterTableType; import org.apache.hadoop.hive.ql.exec.TaskFactory; import org.apache.hadoop.hive.ql.hooks.ReadEntity; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.ASTNode; -import org.apache.hadoop.hive.ql.parse.AlterTableBranchSpec; import org.apache.hadoop.hive.ql.parse.HiveParser; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.session.SessionState; -import static org.apache.hadoop.hive.ql.parse.AlterTableBranchSpec.AlterBranchOperationType.CREATE_BRANCH; - [email protected](types = HiveParser.TOK_ALTERTABLE_CREATE_BRANCH) -public class AlterTableCreateBranchAnalyzer extends AbstractAlterTableAnalyzer { +public abstract class AlterTableCreateMetaRefAnalyzer extends AbstractAlterTableAnalyzer { + protected static AbstractAlterTableDesc alterTableDesc; + protected static AlterTableType alterTableType; + protected abstract AbstractAlterTableDesc getAlterTableDesc(AlterTableTypeReq alterTableTypeReq) + throws SemanticException; - public AlterTableCreateBranchAnalyzer(QueryState queryState) throws SemanticException { + public AlterTableCreateMetaRefAnalyzer(QueryState queryState) throws SemanticException { super(queryState); } @Override protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException { Table table = getTable(tableName); - validateAlterTableType(table, AlterTableType.CREATE_BRANCH, false); DDLUtils.validateTableIsIceberg(table); inputs.add(new ReadEntity(table)); + validateAlterTableType(table, alterTableType, false); + AlterTableTypeReq alterTableTypeReq = new AlterTableTypeReq(); - String branchName = command.getChild(0).getText(); + String metaRefName = command.getChild(0).getText(); + alterTableTypeReq.setTableName(tableName); + alterTableTypeReq.setMetaRefName(metaRefName); Long snapshotId = null; Long asOfTime = null; Long maxRefAgeMs = null; Integer minSnapshotsToKeep = null; Long maxSnapshotAgeMs = null; + AlterTableType alterTableType = command.getType() + == HiveParser.TOK_ALTERTABLE_CREATE_BRANCH ? AlterTableType.CREATE_BRANCH : AlterTableType.CREATE_TAG; for (int i = 1; i < command.getChildCount(); i++) { ASTNode childNode = (ASTNode) command.getChild(i); switch (childNode.getToken().getType()) { case HiveParser.TOK_AS_OF_VERSION: snapshotId = Long.parseLong(childNode.getChild(0).getText()); + alterTableTypeReq.setSnapshotId(snapshotId); break; case HiveParser.TOK_AS_OF_TIME: ZoneId timeZone = SessionState.get() == null ? new HiveConf().getLocalTimeZone() : SessionState.get().getConf().getLocalTimeZone(); TimestampTZ ts = TimestampTZUtil.parse(stripQuotes(childNode.getChild(0).getText()), timeZone); asOfTime = ts.toEpochMilli(); + alterTableTypeReq.setAsOfTime(asOfTime); break; case HiveParser.TOK_RETAIN: String maxRefAge = childNode.getChild(0).getText(); String timeUnitOfBranchRetain = childNode.getChild(1).getText(); - maxRefAgeMs = TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)) - .toMillis(Long.parseLong(maxRefAge)); + maxRefAgeMs = + TimeUnit.valueOf(timeUnitOfBranchRetain.toUpperCase(Locale.ENGLISH)).toMillis(Long.parseLong(maxRefAge)); + alterTableTypeReq.setMaxRefAgeMs(maxRefAgeMs); break; case HiveParser.TOK_WITH_SNAPSHOT_RETENTION: minSnapshotsToKeep = Integer.valueOf(childNode.getChild(0).getText()); + alterTableTypeReq.setMinSnapshotsToKeep(minSnapshotsToKeep); if (childNode.getChildren().size() > 1) { String maxSnapshotAge = childNode.getChild(1).getText(); String timeUnitOfSnapshotsRetention = childNode.getChild(2).getText(); maxSnapshotAgeMs = TimeUnit.valueOf(timeUnitOfSnapshotsRetention.toUpperCase(Locale.ENGLISH)) .toMillis(Long.parseLong(maxSnapshotAge)); + alterTableTypeReq.setMaxSnapshotAgeMs(maxSnapshotAgeMs); } break; default: - throw new SemanticException("Unrecognized token in ALTER CREATE BRANCH statement"); + throw new SemanticException("Unrecognized token in ALTER " + alterTableType.getName() + " statement"); } } + alterTableDesc = getAlterTableDesc(alterTableTypeReq); + rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTableDesc))); + } + + public class AlterTableTypeReq{ Review Comment: could we mark it as a package-private? also not sure if you need to have public setters, you can directly access the properties. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
