vnhive commented on a change in pull request #2037:
URL: https://github.com/apache/hive/pull/2037#discussion_r598073734
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java
##########
@@ -52,14 +53,22 @@ public int execute() throws HiveException {
params = new TreeMap<>(database.getParameters());
}
- String location = database.getLocationUri();
- if (HiveConf.getBoolVar(context.getConf(),
HiveConf.ConfVars.HIVE_IN_TEST)) {
- location = "location/in/test";
- }
-
+ String location = "";
DescDatabaseFormatter formatter =
DescDatabaseFormatter.getFormatter(context.getConf());
- formatter.showDatabaseDescription(outStream, database.getName(),
database.getDescription(), location,
- database.getManagedLocationUri(), database.getOwnerName(),
database.getOwnerType(), params);
+ if (database.getType() == DatabaseType.NATIVE) {
+ location = database.getLocationUri();
+ if (HiveConf.getBoolVar(context.getConf(),
HiveConf.ConfVars.HIVE_IN_TEST)) {
+ location = "location/in/test";
+ }
+ // database.setRemote_dbname("");
+ // database.setConnector_name("");
+
+ formatter.showDatabaseDescription(outStream, database.getName(),
database.getDescription(), location,
+ database.getManagedLocationUri(), database.getOwnerName(),
database.getOwnerType(), params, "", "");
+ } else {
+ formatter.showDatabaseDescription(outStream, database.getName(),
database.getDescription(), "", "",
+ database.getOwnerName(), database.getOwnerType(), params,
database.getConnector_name(), database.getRemote_dbname());
+ }
} catch (Exception e) {
throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
Review comment:
showDatabaseDescription throws a HiveException, why are we casting it to
an Exception, then wrapping it in a HiveException? Can we please consider
skipping this?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesDesc.java
##########
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorDesc;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR ... SET PROPERTIES ... commands.
+ */
+@Explain(displayName = "Set Connector Properties", explainLevels = {
Level.USER, Level.DEFAULT, Level.EXTENDED })
Review comment:
So the displayName "ALTER CONNECTOR ... SET PROPERTIES ... commands" is
the more appropriate semantics for the command isn't it ? Does this need to be
changed ?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
##########
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a data connector.
+ */
+public abstract class AbstractAlterDataConnectorOperation<T extends
AbstractAlterDataConnectorDesc> extends DDLOperation<T> {
+ public AbstractAlterDataConnectorOperation(DDLOperationContext context, T
desc) {
+ super(context, desc);
+ }
+
+ @Override
+ public int execute() throws HiveException {
+ String dcName = desc.getConnectorName();
+ DataConnector connector = context.getDb().getDataConnector(dcName);
+ if (connector == null) {
+ throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
+ }
+
+ Map<String, String> params = connector.getParameters();
+ if ((null != desc.getReplicationSpec()) &&
+ !desc.getReplicationSpec().allowEventReplacementInto(params)) {
+ LOG.debug("DDLTask: Alter Connector {} is skipped as connector is newer
than update", dcName);
+ return 0; // no replacement, the existing connector state is newer than
our update.
+ }
+
+ doAlteration(connector, params);
+
+ context.getDb().alterDataConnector(connector.getName(), connector);
+ return 0;
Review comment:
return 0 both in the cases where the alteration is skipped and here
(when the alteration has been persisted successfully. Is this expected ?
##########
File path: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
##########
@@ -846,6 +847,110 @@ public void alterTable(String catName, String dbName,
String tblName, Table newT
}
}
+ /**
+ * Create a dataconnector
+ * @param connector
+ * @param ifNotExist if true, will ignore AlreadyExistsException exception
+ * @throws AlreadyExistsException
+ * @throws HiveException
+ */
+ public void createDataConnector(DataConnector connector, boolean ifNotExist)
+ throws AlreadyExistsException, HiveException {
+ try {
+ getMSC().createDataConnector(connector);
+ } catch (AlreadyExistsException e) {
+ if (!ifNotExist) {
+ throw e;
+ }
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ /**
+ * Create a DataConnector. Raise an error if a dataconnector with the same
name already exists.
+ * @param connector
+ * @throws AlreadyExistsException
+ * @throws HiveException
+ */
+ public void createDataConnector(DataConnector connector) throws
AlreadyExistsException, HiveException {
+ createDataConnector(connector, false);
+ }
+
+ /**
+ * Drop a dataconnector.
+ * @param name
+ * @throws NoSuchObjectException
+ * @throws HiveException
+ * @see
org.apache.hadoop.hive.metastore.HiveMetaStoreClient#dropDataConnector(java.lang.String,
boolean, boolean)
+ */
+ public void dropDataConnector(String name, boolean ifNotExists) throws
HiveException, NoSuchObjectException {
+ dropDataConnector(name, ifNotExists, true);
+ }
+
+ /**
+ * Drop a dataconnector
+ * @param name
+ * @param checkReferences drop only if there are no dbs referencing this
connector
+ * @throws HiveException
+ * @throws NoSuchObjectException
+ */
+ public void dropDataConnector(String name, boolean ifNotExists, boolean
checkReferences)
+ throws HiveException, NoSuchObjectException {
+ try {
+ getMSC().dropDataConnector(name, ifNotExists, checkReferences);
+ } catch (NoSuchObjectException e) {
+ if (!ifNotExists)
+ throw e;
+ } catch (Exception e) {
+ throw new HiveException(e);
+ }
+ }
+
+ /**
+ * Get the dataconnector by name.
+ * @param dcName the name of the dataconnector.
+ * @return a DataConnector object if this dataconnector exists, null
otherwise.
+ * @throws HiveException
+ */
+ public DataConnector getDataConnector(String dcName) throws HiveException {
+ try {
+ return getMSC().getDataConnector(dcName);
+ } catch (NoSuchObjectException e) {
+ return null;
Review comment:
If we let this method throw a NoSuchObjection exception when the item is
not found, instead of returning null,
1. we can avoid the other places in the code where we check for the null
value and throw an exception
2. We can retain the entire stack trace obtained from the MetaStoreClient,
which in turn would retain the history of the exception from the HiveMetaStore.
That way, if the exception occurred due to the underlying database not being
available or because the authentication information was not valid, the relevant
information would be available in the exception trace.
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
##########
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a data connector.
+ */
+public abstract class AbstractAlterDataConnectorOperation<T extends
AbstractAlterDataConnectorDesc> extends DDLOperation<T> {
+ public AbstractAlterDataConnectorOperation(DDLOperationContext context, T
desc) {
+ super(context, desc);
+ }
+
+ @Override
+ public int execute() throws HiveException {
+ String dcName = desc.getConnectorName();
+ DataConnector connector = context.getDb().getDataConnector(dcName);
+ if (connector == null) {
+ throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
+ }
+
+ Map<String, String> params = connector.getParameters();
+ if ((null != desc.getReplicationSpec()) &&
+ !desc.getReplicationSpec().allowEventReplacementInto(params)) {
+ LOG.debug("DDLTask: Alter Connector {} is skipped as connector is newer
than update", dcName);
+ return 0; // no replacement, the existing connector state is newer than
our update.
Review comment:
The method is returning 0 on both the cases of success and failure.
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesDesc.java
##########
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorDesc;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR ... SET PROPERTIES ... commands.
+ */
+@Explain(displayName = "Set Connector Properties", explainLevels = {
Level.USER, Level.DEFAULT, Level.EXTENDED })
+public class AlterDataConnectorSetPropertiesDesc extends
AbstractAlterDataConnectorDesc {
+ private static final long serialVersionUID = 1L;
+
+ private final Map<String, String> dcProperties;
+
+ public AlterDataConnectorSetPropertiesDesc(String connectorName, Map<String,
String> dcProperties,
+ ReplicationSpec replicationSpec) {
Review comment:
Can you please consider writing a comment on how the replication
specification is linked to the data connector? So in the event of replication
to another hive server, the connection would encapsulate the information
required to connect to the hive server and the replication specification stores
the replication specific properties, correct ?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
##########
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a data connector.
+ */
+public abstract class AbstractAlterDataConnectorOperation<T extends
AbstractAlterDataConnectorDesc> extends DDLOperation<T> {
+ public AbstractAlterDataConnectorOperation(DDLOperationContext context, T
desc) {
+ super(context, desc);
+ }
+
+ @Override
+ public int execute() throws HiveException {
+ String dcName = desc.getConnectorName();
+ DataConnector connector = context.getDb().getDataConnector(dcName);
+ if (connector == null) {
+ throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
Review comment:
If the connector object is null, would it not meant that the data
connector does not exist. Should we be using DATACONNECTOR_NOT_EXISTS (instead
of DATACONNECTOR_ALREADY_EXISTS) here ?
I really wish we just threw the exception from the underlying methods. We
can wrap that exception in a DATACONNECTOR_NOT_EXISTS exception here.
We lose the context of what happened in the underlying metastore that led to
the data connector not being found. This could for example be due to,
1. The DC not existing
2. The HMS being down
3. The metastoreclient not being able to communicate with the metastore
server.
4. Log-in credentials mismatch for the underlying database.
5. etc.
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlAnalyzer.java
##########
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for connector set url commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERDATACONNECTOR_URL)
+public class AlterDataConnectorSetUrlAnalyzer extends
AbstractAlterDataConnectorAnalyzer {
+ public AlterDataConnectorSetUrlAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ String connectorName = getUnescapedName((ASTNode) root.getChild(0));
Review comment:
1. Can connectorName be null ? Does this use case need to be addressed
in the code ?
2. Can newURL be null and does this need to be handled ?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
##########
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a data connector.
+ */
+public abstract class AbstractAlterDataConnectorOperation<T extends
AbstractAlterDataConnectorDesc> extends DDLOperation<T> {
+ public AbstractAlterDataConnectorOperation(DDLOperationContext context, T
desc) {
+ super(context, desc);
+ }
+
+ @Override
+ public int execute() throws HiveException {
+ String dcName = desc.getConnectorName();
+ DataConnector connector = context.getDb().getDataConnector(dcName);
+ if (connector == null) {
+ throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
+ }
+
+ Map<String, String> params = connector.getParameters();
+ if ((null != desc.getReplicationSpec()) &&
Review comment:
can you please consider sticking to variable != null for the sake of
uniformity throughout the patch ?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesOperation.java
##########
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+
+/**
+ * Operation process of altering a dataconnector's properties.
+ */
+public class AlterDataConnectorSetPropertiesOperation
+ extends
AbstractAlterDataConnectorOperation<AlterDataConnectorSetPropertiesDesc> {
+ public AlterDataConnectorSetPropertiesOperation(DDLOperationContext context,
AlterDataConnectorSetPropertiesDesc desc) {
+ super(context, desc);
+ }
+
+ @Override
+ protected void doAlteration(DataConnector connector, Map<String, String>
params) {
+ Map<String, String> newParams = desc.getConnectorProperties();
+
+ // if both old and new params are not null, merge them
+ if (params != null && newParams != null) {
+ params.putAll(newParams);
+ connector.setParameters(params);
+ } else {
+ // if one of them is null, replace the old params with the new one
+ connector.setParameters(newParams);
Review comment:
1. If newParams are null the connector parameters will become null. This
is an error right (can't have a connector with null parameters)?
2. If params are null setParameters will throw a null pointer exception. In
this case we should create a new map and set the parameters corect ?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/properties/AlterDataConnectorSetPropertiesAnalyzer.java
##########
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.properties;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorAnalyzer;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for connector set properties commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERDATACONNECTOR_PROPERTIES)
+public class AlterDataConnectorSetPropertiesAnalyzer extends
AbstractAlterDataConnectorAnalyzer {
+ public AlterDataConnectorSetPropertiesAnalyzer(QueryState queryState) throws
SemanticException {
+ super(queryState);
+ }
+
+ @Override
+ public void analyzeInternal(ASTNode root) throws SemanticException {
+ String connectorName = unescapeIdentifier(root.getChild(0).getText());
+
+ Map<String, String> dbProps = null;
+ for (int i = 1; i < root.getChildCount(); i++) {
+ ASTNode childNode = (ASTNode) root.getChild(i);
+ switch (childNode.getToken().getType()) {
Review comment:
Other than TOK_DATACONNECTORPROPERTIES is any other token possible?
If the answer is no, I am going to nitpick by saying, an if - else
conditional construct is more appropriate here. In my humble opinion, a switch
case is more appropriate in the cases where multiple values are possible.
**if-else if-else** can be converted to **switch-case** especially when there
is a default case. But in this case there is only
TOK_ALTERDATACONNECTOR_PROPERTIES.
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/owner/AlterDataConnectorSetOwnerOperation.java
##########
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.owner;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+
+/**
+ * Operation process of altering a connector's owner.
+ */
+public class AlterDataConnectorSetOwnerOperation extends
+ AbstractAlterDataConnectorOperation<AlterDataConnectorSetOwnerDesc> {
+ public AlterDataConnectorSetOwnerOperation(DDLOperationContext context,
AlterDataConnectorSetOwnerDesc desc) {
+ super(context, desc);
+ }
+
+ @Override
+ protected void doAlteration(DataConnector connector, Map<String, String>
params) {
+ connector.setOwnerName(desc.getOwnerPrincipal().getName());
Review comment:
Looking at the method signature, I thought that the connector object
will be modified by the key-value pairs being set in params. But this is
different. It looks like the AlterDataConnectorSetOwnerDesc object is used to
modify the connector object. So what is the role of params here?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
##########
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a connector's URL.
+ */
+public class AlterDataConnectorSetUrlOperation extends
+ AbstractAlterDataConnectorOperation<AlterDataConnectorSetUrlDesc> {
+ public AlterDataConnectorSetUrlOperation(DDLOperationContext context,
AlterDataConnectorSetUrlDesc desc) {
+ super(context, desc);
+ }
+
+ @Override
+ protected void doAlteration(DataConnector connector, Map<String, String>
params) throws HiveException {
+ try {
+ String newUrl = desc.getURL();
+
+ if (newUrl.equalsIgnoreCase(connector.getUrl())) {
+ throw new HiveException("Old and New URL's for data connector cannot
be the same");
+ }
+
+ URI newURI = new URI(newUrl);
+ if (!newURI.isAbsolute() || StringUtils.isBlank(newURI.getScheme())) {
+ throw new HiveException(ErrorMsg.INVALID_PATH, newUrl); // TODO make a
new error message for URL
+ }
+
+ if (newUrl.equals(connector.getUrl())) {
Review comment:
Isn't this code redundant with the following code right above ?
if (newUrl.equalsIgnoreCase(connector.getUrl())) {
throw new HiveException("Old and New URL's for data connector cannot
be the same");
}
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
##########
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a connector's URL.
+ */
+public class AlterDataConnectorSetUrlOperation extends
+ AbstractAlterDataConnectorOperation<AlterDataConnectorSetUrlDesc> {
+ public AlterDataConnectorSetUrlOperation(DDLOperationContext context,
AlterDataConnectorSetUrlDesc desc) {
+ super(context, desc);
+ }
+
+ @Override
+ protected void doAlteration(DataConnector connector, Map<String, String>
params) throws HiveException {
+ try {
+ String newUrl = desc.getURL();
Review comment:
Can newURL be null or empty, should we check this?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlDesc.java
##########
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+
+/**
+ * DDL task description for ALTER CONNECTOR ... SET URL ... commands.
+ */
+@Explain(displayName = "Set Connector URL", explainLevels = { Level.USER,
Level.DEFAULT, Level.EXTENDED })
Review comment:
Should the display name should also mention about ALTERing a CONNECTOR?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/database/desc/DescDatabaseOperation.java
##########
@@ -52,14 +53,22 @@ public int execute() throws HiveException {
params = new TreeMap<>(database.getParameters());
}
- String location = database.getLocationUri();
- if (HiveConf.getBoolVar(context.getConf(),
HiveConf.ConfVars.HIVE_IN_TEST)) {
- location = "location/in/test";
- }
-
+ String location = "";
DescDatabaseFormatter formatter =
DescDatabaseFormatter.getFormatter(context.getConf());
- formatter.showDatabaseDescription(outStream, database.getName(),
database.getDescription(), location,
- database.getManagedLocationUri(), database.getOwnerName(),
database.getOwnerType(), params);
+ if (database.getType() == DatabaseType.NATIVE) {
+ location = database.getLocationUri();
+ if (HiveConf.getBoolVar(context.getConf(),
HiveConf.ConfVars.HIVE_IN_TEST)) {
+ location = "location/in/test";
+ }
+ // database.setRemote_dbname("");
+ // database.setConnector_name("");
+
+ formatter.showDatabaseDescription(outStream, database.getName(),
database.getDescription(), location,
+ database.getManagedLocationUri(), database.getOwnerName(),
database.getOwnerType(), params, "", "");
+ } else {
Review comment:
No strong opinion here but,
if (database.getType() == DatabaseType.NATIVE) {}
else if (database.getType() == DatabaseType.REMOTE) {}
else {throw unknown database type exception}
is also a nice pattern that can please be considered.
Better is a switch case
switch (database.getType()) {
case DatabaseType.NATIVE:
case DatabaseType.REMOTE:
default:
throw unknown database type exception.
}
is probably better. It would be even better to use the switch or if blocks
to set only those parameters specific to the particular option,
for example,
we know that between the two showDatabaseDescription calls, the parameters
varying are,
1. location
2. managed location
3. connector name
4. remote db name
we can set only these in the if or switch conditions and have a single call
for showDatabaseDescription. It will help to easily understand which parameters
are specific to which conditional branch.
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/AbstractAlterDataConnectorOperation.java
##########
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a data connector.
+ */
+public abstract class AbstractAlterDataConnectorOperation<T extends
AbstractAlterDataConnectorDesc> extends DDLOperation<T> {
+ public AbstractAlterDataConnectorOperation(DDLOperationContext context, T
desc) {
+ super(context, desc);
+ }
+
+ @Override
+ public int execute() throws HiveException {
+ String dcName = desc.getConnectorName();
+ DataConnector connector = context.getDb().getDataConnector(dcName);
+ if (connector == null) {
+ throw new HiveException(ErrorMsg.DATACONNECTOR_ALREADY_EXISTS, dcName);
+ }
+
+ Map<String, String> params = connector.getParameters();
+ if ((null != desc.getReplicationSpec()) &&
+ !desc.getReplicationSpec().allowEventReplacementInto(params)) {
+ LOG.debug("DDLTask: Alter Connector {} is skipped as connector is newer
than update", dcName);
+ return 0; // no replacement, the existing connector state is newer than
our update.
+ }
+
+ doAlteration(connector, params);
+
+ context.getDb().alterDataConnector(connector.getName(), connector);
Review comment:
doAlteration alters the object, while alterDataConnector persists the
altered information into the metastore, correct ? Can you please add a comment
above each of the calls to the same effect ?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/ddl/dataconnector/alter/url/AlterDataConnectorSetUrlOperation.java
##########
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.dataconnector.alter.url;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.Map;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import
org.apache.hadoop.hive.ql.ddl.dataconnector.alter.AbstractAlterDataConnectorOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a connector's URL.
+ */
+public class AlterDataConnectorSetUrlOperation extends
+ AbstractAlterDataConnectorOperation<AlterDataConnectorSetUrlDesc> {
+ public AlterDataConnectorSetUrlOperation(DDLOperationContext context,
AlterDataConnectorSetUrlDesc desc) {
+ super(context, desc);
+ }
+
+ @Override
+ protected void doAlteration(DataConnector connector, Map<String, String>
params) throws HiveException {
+ try {
+ String newUrl = desc.getURL();
+
+ if (newUrl.equalsIgnoreCase(connector.getUrl())) {
+ throw new HiveException("Old and New URL's for data connector cannot
be the same");
+ }
+
+ URI newURI = new URI(newUrl);
+ if (!newURI.isAbsolute() || StringUtils.isBlank(newURI.getScheme())) {
+ throw new HiveException(ErrorMsg.INVALID_PATH, newUrl); // TODO make a
new error message for URL
+ }
+
+ if (newUrl.equals(connector.getUrl())) {
+ LOG.info("Alter Connector skipped. No change in url.");
Review comment:
In the earlier check we are throwing an exception if the URLs are equal.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]