This is an automated email from the ASF dual-hosted git repository.
chengzhang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new e05c1b11b77 Refactor pr#36492 code style (#36877)
e05c1b11b77 is described below
commit e05c1b11b7791682c928007942da6e5702e46e64
Author: Zhengqiang Duan <[email protected]>
AuthorDate: Tue Oct 14 13:47:55 2025 +0800
Refactor pr#36492 code style (#36877)
---
.../database/metadata/DialectDatabaseMetaData.java | 10 ++++
.../option/sqlbatch/DialectSQLBatchOption.java | 23 +++----
.../connector/core/type/DatabaseTypeFactory.java | 14 +++++
.../metadata/database/HiveDatabaseMetaData.java | 6 ++
.../e2e/env/container/util/SQLScriptUtils.java | 32 ++++++----
.../util/spi/SQLBatchExecutionStrategy.java | 51 ----------------
.../spi/impl/HiveSQLBatchExecutionStrategy.java | 48 ---------------
.../e2e/sql/env/DataSetEnvironmentManager.java | 70 +++++++++-------------
.../dialect/HiveDialectQueryBehaviorProvider.java | 8 +--
.../param/array/E2ETestParameterGenerator.java | 2 +-
.../test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java | 22 ++-----
...l => batch_insert_into_user_table_for_hive.xml} | 20 ++++---
...ble_hive.xml => delete_user_table_for_hive.xml} | 17 +++---
...ble_hive.xml => update_user_table_for_hive.xml} | 16 ++---
.../test/resources/cases/dml/e2e-dml-delete.xml | 2 +-
.../test/resources/cases/dml/e2e-dml-insert.xml | 2 +-
.../test/resources/cases/dml/e2e-dml-update.xml | 2 +-
17 files changed, 126 insertions(+), 219 deletions(-)
diff --git
a/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/DialectDatabaseMetaData.java
b/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/DialectDatabaseMetaData.java
index 7e369b19aa1..b961819fcc2 100644
---
a/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/DialectDatabaseMetaData.java
+++
b/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/DialectDatabaseMetaData.java
@@ -31,6 +31,7 @@ import
org.apache.shardingsphere.database.connector.core.metadata.database.metad
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.pagination.DialectPaginationOption;
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.schema.DefaultSchemaOption;
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.schema.DialectSchemaOption;
+import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.sqlbatch.DialectSQLBatchOption;
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.table.DialectDriverQuerySystemCatalogOption;
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.transaction.DialectTransactionOption;
import org.apache.shardingsphere.database.connector.core.spi.DatabaseTypedSPI;
@@ -165,4 +166,13 @@ public interface DialectDatabaseMetaData extends
DatabaseTypedSPI {
default Optional<DialectAlterTableOption> getAlterTableOption() {
return Optional.empty();
}
+
+ /**
+ * Get sql batch option.
+ *
+ * @return sql batch option
+ */
+ default DialectSQLBatchOption getSQLBatchOption() {
+ return new DialectSQLBatchOption(true);
+ }
}
diff --git
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
b/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/option/sqlbatch/DialectSQLBatchOption.java
similarity index 60%
copy from
test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
copy to
database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/option/sqlbatch/DialectSQLBatchOption.java
index 5bf1325ef6f..a3c90f7d267 100644
---
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
+++
b/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/metadata/database/metadata/option/sqlbatch/DialectSQLBatchOption.java
@@ -15,24 +15,17 @@
* limitations under the License.
*/
-package org.apache.shardingsphere.test.e2e.sql.framework.metadata.dialect;
+package
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.sqlbatch;
-import
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider;
-
-import java.util.Optional;
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
/**
- * Hive dialect query behavior provider.
+ * Dialect sql batch option.
*/
-public final class HiveDialectQueryBehaviorProvider implements
DialectQueryBehaviorProvider {
-
- @Override
- public String getDatabaseType() {
- return "Hive";
- }
+@RequiredArgsConstructor
+@Getter
+public final class DialectSQLBatchOption {
- @Override
- public Optional<String> getFallbackOrderByWhenNoPrimaryKey() {
- return Optional.of("1 ASC");
- }
+ private final boolean supportSQLBatch;
}
diff --git
a/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/type/DatabaseTypeFactory.java
b/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/type/DatabaseTypeFactory.java
index e1b1f958dd7..e2d412610ee 100644
---
a/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/type/DatabaseTypeFactory.java
+++
b/database/connector/core/src/main/java/org/apache/shardingsphere/database/connector/core/type/DatabaseTypeFactory.java
@@ -22,7 +22,10 @@ import lombok.NoArgsConstructor;
import
org.apache.shardingsphere.database.connector.core.exception.UnsupportedStorageTypeException;
import org.apache.shardingsphere.infra.exception.ShardingSpherePreconditions;
import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader;
+import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
+import java.sql.DatabaseMetaData;
+import java.sql.SQLException;
import java.util.Collection;
import java.util.stream.Collectors;
@@ -49,6 +52,17 @@ public final class DatabaseTypeFactory {
return databaseTypes.iterator().next();
}
+ /**
+ * Get database type.
+ *
+ * @param metaData database meta data
+ * @return database type
+ * @throws SQLException SQL exception
+ */
+ public static DatabaseType get(final DatabaseMetaData metaData) throws
SQLException {
+ return metaData.getDatabaseProductName().contains("Hive") ?
TypedSPILoader.getService(DatabaseType.class, "Hive") : get(metaData.getURL());
+ }
+
private static boolean matchURLs(final String url, final DatabaseType
databaseType) {
return
databaseType.getJdbcUrlPrefixes().stream().anyMatch(url::startsWith);
}
diff --git
a/database/connector/dialect/hive/src/main/java/org/apache/shardingsphere/database/connector/hive/metadata/database/HiveDatabaseMetaData.java
b/database/connector/dialect/hive/src/main/java/org/apache/shardingsphere/database/connector/hive/metadata/database/HiveDatabaseMetaData.java
index 785c791c5ca..a189e02d227 100644
---
a/database/connector/dialect/hive/src/main/java/org/apache/shardingsphere/database/connector/hive/metadata/database/HiveDatabaseMetaData.java
+++
b/database/connector/dialect/hive/src/main/java/org/apache/shardingsphere/database/connector/hive/metadata/database/HiveDatabaseMetaData.java
@@ -23,6 +23,7 @@ import
org.apache.shardingsphere.database.connector.core.metadata.database.metad
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.IdentifierPatternType;
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.schema.DefaultSchemaOption;
import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.schema.DialectSchemaOption;
+import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.sqlbatch.DialectSQLBatchOption;
/**
* Database meta data of Hive.
@@ -49,6 +50,11 @@ public final class HiveDatabaseMetaData implements
DialectDatabaseMetaData {
return new DefaultSchemaOption(false, "default");
}
+ @Override
+ public DialectSQLBatchOption getSQLBatchOption() {
+ return new DialectSQLBatchOption(false);
+ }
+
@Override
public String getDatabaseType() {
return "Hive";
diff --git
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
index 95f3f5fdc2e..20c4209922d 100644
---
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
+++
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/SQLScriptUtils.java
@@ -22,7 +22,10 @@ import lombok.NoArgsConstructor;
import lombok.SneakyThrows;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Strings;
-import
org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy;
+import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.sqlbatch.DialectSQLBatchOption;
+import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
+import
org.apache.shardingsphere.database.connector.core.type.DatabaseTypeFactory;
+import
org.apache.shardingsphere.database.connector.core.type.DatabaseTypeRegistry;
import org.h2.util.ScriptReader;
import javax.sql.DataSource;
@@ -39,7 +42,6 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collection;
import java.util.LinkedList;
-import java.util.ServiceLoader;
/**
* SQL script utility class.
@@ -59,7 +61,7 @@ public final class SQLScriptUtils {
public static void execute(final DataSource dataSource, final String
scriptFilePath) {
Collection<String> sqls = readSQLs(scriptFilePath);
try (Connection connection = dataSource.getConnection()) {
- executeBatch(connection, sqls);
+ execute(connection, sqls);
}
}
@@ -71,7 +73,21 @@ public final class SQLScriptUtils {
*/
@SneakyThrows({SQLException.class, IOException.class})
public static void execute(final Connection connection, final String
scriptFilePath) {
- executeBatch(connection, readSQLs(scriptFilePath));
+ execute(connection, readSQLs(scriptFilePath));
+ }
+
+ private static void execute(final Connection connection, final
Collection<String> sqls) throws SQLException {
+ DatabaseType databaseType =
DatabaseTypeFactory.get(connection.getMetaData());
+ DialectSQLBatchOption sqlBatchOption = new
DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData().getSQLBatchOption();
+ if (sqlBatchOption.isSupportSQLBatch()) {
+ executeBatch(connection, sqls);
+ return;
+ }
+ for (String each : sqls) {
+ try (Statement statement = connection.createStatement()) {
+ statement.execute(each);
+ }
+ }
}
private static Collection<String> readSQLs(final String scriptFilePath)
throws IOException {
@@ -101,14 +117,6 @@ public final class SQLScriptUtils {
private static void executeBatch(final Connection connection, final
Collection<String> sqls) throws SQLException {
int count = 0;
try (Statement statement = connection.createStatement()) {
- String driverName = connection.getMetaData().getDriverName();
- ServiceLoader<SQLBatchExecutionStrategy> loader =
ServiceLoader.load(SQLBatchExecutionStrategy.class);
- for (SQLBatchExecutionStrategy strategy : loader) {
- if (strategy.supports(driverName)) {
- strategy.execute(connection, statement, sqls);
- return;
- }
- }
for (String each : sqls) {
statement.addBatch(each);
count++;
diff --git
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/SQLBatchExecutionStrategy.java
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/SQLBatchExecutionStrategy.java
deleted file mode 100644
index d1a0f7df2bb..00000000000
---
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/SQLBatchExecutionStrategy.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.shardingsphere.test.e2e.env.container.util.spi;
-
-import org.apache.shardingsphere.infra.spi.type.typed.TypedSPI;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Collection;
-
-/**
- * SQL batch execution strategy.
- *
- * <p>Allow different drivers to decide whether to use addBatch/executeBatch
or fallback to execute one by one.</p>
- */
-public interface SQLBatchExecutionStrategy extends TypedSPI {
-
- /**
- * Whether this strategy supports the given JDBC driver name.
- *
- * @param jdbcDriverName driver name from connection metadata
- * @return true if supported
- */
- boolean supports(String jdbcDriverName);
-
- /**
- * Execute SQLs using the strategy.
- *
- * @param connection connection
- * @param statement statement
- * @param sqls SQL collection
- * @throws SQLException SQL exception
- */
- void execute(Connection connection, Statement statement,
Collection<String> sqls) throws SQLException;
-}
diff --git
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/impl/HiveSQLBatchExecutionStrategy.java
b/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/impl/HiveSQLBatchExecutionStrategy.java
deleted file mode 100644
index 63b0b906117..00000000000
---
a/test/e2e/env/src/test/java/org/apache/shardingsphere/test/e2e/env/container/util/spi/impl/HiveSQLBatchExecutionStrategy.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.shardingsphere.test.e2e.env.container.util.spi.impl;
-
-import
org.apache.shardingsphere.test.e2e.env.container.util.spi.SQLBatchExecutionStrategy;
-
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.Collection;
-
-/**
- * Hive SQL batch execution strategy.
- */
-public final class HiveSQLBatchExecutionStrategy implements
SQLBatchExecutionStrategy {
-
- @Override
- public String getType() {
- return "Hive";
- }
-
- @Override
- public boolean supports(final String jdbcDriverName) {
- return null != jdbcDriverName &&
jdbcDriverName.toLowerCase().contains("hive");
- }
-
- @Override
- public void execute(final Connection connection, final Statement
statement, final Collection<String> sqls) throws SQLException {
- for (String each : sqls) {
- statement.execute(each);
- }
- }
-}
diff --git
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
index 373792cd8dd..65c284303eb 100644
---
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
+++
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/env/DataSetEnvironmentManager.java
@@ -19,12 +19,12 @@ package org.apache.shardingsphere.test.e2e.sql.env;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
+import
org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.sqlbatch.DialectSQLBatchOption;
import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
import
org.apache.shardingsphere.database.connector.core.type.DatabaseTypeFactory;
import
org.apache.shardingsphere.database.connector.core.type.DatabaseTypeRegistry;
import
org.apache.shardingsphere.database.connector.opengauss.type.OpenGaussDatabaseType;
import
org.apache.shardingsphere.database.connector.postgresql.type.PostgreSQLDatabaseType;
-import org.apache.shardingsphere.infra.spi.type.typed.TypedSPILoader;
import org.apache.shardingsphere.infra.datanode.DataNode;
import org.apache.shardingsphere.infra.executor.kernel.ExecutorEngine;
import
org.apache.shardingsphere.infra.executor.kernel.thread.ExecutorServiceManager;
@@ -45,7 +45,6 @@ import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
-import java.sql.SQLFeatureNotSupportedException;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedList;
@@ -96,9 +95,10 @@ public final class DataSetEnvironmentManager {
sqlValueGroups.add(new SQLValueGroup(dataSetMetaData,
row.splitValues(DATA_COLUMN_DELIMITER)));
}
String insertSQL;
+ DatabaseType databaseType;
try (Connection connection =
dataSourceMap.get(dataNode.getDataSourceName()).getConnection()) {
- String insertTableName = dataNode.getTableName();
- insertSQL = generateInsertSQL(insertTableName,
dataSetMetaData.getColumns(), databaseType);
+ databaseType =
DatabaseTypeFactory.get(connection.getMetaData());
+ insertSQL = generateInsertSQL(dataNode.getTableName(),
dataSetMetaData.getColumns(), databaseType);
}
fillDataTasks.add(new
InsertTask(dataSourceMap.get(dataNode.getDataSourceName()), insertSQL,
sqlValueGroups, databaseType));
}
@@ -202,40 +202,37 @@ public final class DataSetEnvironmentManager {
try (
Connection connection = dataSource.getConnection();
PreparedStatement preparedStatement =
connection.prepareStatement(insertSQL)) {
- boolean supportsBatchUpdates;
- try {
- supportsBatchUpdates =
connection.getMetaData().supportsBatchUpdates();
- } catch (final SQLFeatureNotSupportedException ignored) {
- supportsBatchUpdates = false;
- }
- if (supportsBatchUpdates) {
- for (SQLValueGroup each : sqlValueGroups) {
- setParameters(preparedStatement, each);
- preparedStatement.addBatch();
- }
- preparedStatement.executeBatch();
+ DialectSQLBatchOption sqlBatchOption = new
DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData().getSQLBatchOption();
+ if (sqlBatchOption.isSupportSQLBatch()) {
+ executeBatch(preparedStatement);
} else {
- for (SQLValueGroup each : sqlValueGroups) {
- setParameters(preparedStatement, each);
- preparedStatement.executeUpdate();
- }
+ executeUpdate(preparedStatement);
}
}
return null;
}
+ private void executeBatch(final PreparedStatement preparedStatement)
throws SQLException {
+ for (SQLValueGroup each : sqlValueGroups) {
+ setParameters(preparedStatement, each);
+ preparedStatement.addBatch();
+ }
+ preparedStatement.executeBatch();
+ }
+
+ private void executeUpdate(final PreparedStatement preparedStatement)
throws SQLException {
+ for (SQLValueGroup each : sqlValueGroups) {
+ setParameters(preparedStatement, each);
+ preparedStatement.executeUpdate();
+ }
+ }
+
private void setParameters(final PreparedStatement preparedStatement,
final SQLValueGroup sqlValueGroup) throws SQLException {
for (SQLValue each : sqlValueGroup.getValues()) {
- Object value = each.getValue();
- int index = each.getIndex();
- if ("Hive".equalsIgnoreCase(databaseType.getType())) {
- if (value instanceof Date) {
- preparedStatement.setDate(index, (java.sql.Date)
value);
- } else {
- preparedStatement.setObject(index, value);
- }
+ if ("Hive".equalsIgnoreCase(databaseType.getType()) &&
each.getValue() instanceof Date) {
+ preparedStatement.setDate(each.getIndex(), (java.sql.Date)
each.getValue());
} else {
- preparedStatement.setObject(index, value);
+ preparedStatement.setObject(each.getIndex(),
each.getValue());
}
}
}
@@ -251,7 +248,7 @@ public final class DataSetEnvironmentManager {
@Override
public Void call() throws SQLException {
try (Connection connection = dataSource.getConnection()) {
- DatabaseType databaseType = getDatabaseType(connection);
+ DatabaseType databaseType =
DatabaseTypeFactory.get(connection.getMetaData());
for (String each : tableNames) {
String quotedTableName = getQuotedTableName(each,
databaseType);
try (PreparedStatement preparedStatement =
connection.prepareStatement(String.format("TRUNCATE TABLE %s",
quotedTableName))) {
@@ -262,19 +259,6 @@ public final class DataSetEnvironmentManager {
return null;
}
- private DatabaseType getDatabaseType(final Connection connection)
throws SQLException {
- try {
- String url = connection.getMetaData().getURL();
- return DatabaseTypeFactory.get(url);
- } catch (final SQLFeatureNotSupportedException ex) {
- String driverName = connection.getMetaData().getDriverName();
- if (null != driverName &&
driverName.toLowerCase().contains("hive")) {
- return TypedSPILoader.getService(DatabaseType.class,
"Hive");
- }
- throw ex;
- }
- }
-
private String getQuotedTableName(final String tableName, final
DatabaseType databaseType) {
DatabaseTypeRegistry databaseTypeRegistry = new
DatabaseTypeRegistry(databaseType);
return
databaseTypeRegistry.getDialectDatabaseMetaData().getQuoteCharacter().wrap(databaseTypeRegistry.formatIdentifierPattern(tableName));
diff --git
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
index 5bf1325ef6f..2c15b6a070d 100644
---
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
+++
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/metadata/dialect/HiveDialectQueryBehaviorProvider.java
@@ -27,12 +27,12 @@ import java.util.Optional;
public final class HiveDialectQueryBehaviorProvider implements
DialectQueryBehaviorProvider {
@Override
- public String getDatabaseType() {
- return "Hive";
+ public Optional<String> getFallbackOrderByWhenNoPrimaryKey() {
+ return Optional.of("1 ASC");
}
@Override
- public Optional<String> getFallbackOrderByWhenNoPrimaryKey() {
- return Optional.of("1 ASC");
+ public String getDatabaseType() {
+ return "Hive";
}
}
diff --git
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
index 31dfb352e87..0486659271a 100644
---
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
+++
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/framework/param/array/E2ETestParameterGenerator.java
@@ -178,7 +178,7 @@ public final class E2ETestParameterGenerator {
private Collection<E2ETestParameter> getCaseTestParameter(final
SQLE2ETestCaseContext testCaseContext, final DatabaseType databaseType, final
SQLCommandType sqlCommandType) {
Collection<E2ETestParameter> result = new LinkedList<>();
- if (testCaseContext.getTestCase().isSkipBatch() &&
"Hive".equalsIgnoreCase(databaseType.getType())) {
+ if (testCaseContext.getTestCase().isSkipBatch()) {
return Collections.emptyList();
}
for (String each : envAdapters) {
diff --git
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
index 3efe223651d..1f08b9b45bd 100644
---
a/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
+++
b/test/e2e/sql/src/test/java/org/apache/shardingsphere/test/e2e/sql/it/sql/dml/BaseDMLE2EIT.java
@@ -35,10 +35,10 @@ import
org.apache.shardingsphere.test.e2e.sql.cases.dataset.DataSetLoader;
import
org.apache.shardingsphere.test.e2e.sql.cases.dataset.metadata.DataSetColumn;
import
org.apache.shardingsphere.test.e2e.sql.cases.dataset.metadata.DataSetMetaData;
import org.apache.shardingsphere.test.e2e.sql.cases.dataset.row.DataSetRow;
-import
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider;
import org.apache.shardingsphere.test.e2e.sql.env.DataSetEnvironmentManager;
import org.apache.shardingsphere.test.e2e.sql.env.SQLE2EEnvironmentEngine;
import
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectDatabaseAssertionMetaDataFactory;
+import
org.apache.shardingsphere.test.e2e.sql.framework.metadata.DialectQueryBehaviorProvider;
import
org.apache.shardingsphere.test.e2e.sql.framework.param.model.AssertionTestParameter;
import
org.apache.shardingsphere.test.e2e.sql.framework.param.model.CaseTestParameter;
import
org.apache.shardingsphere.test.e2e.sql.framework.param.model.E2ETestParameter;
@@ -235,27 +235,17 @@ public abstract class BaseDMLE2EIT implements SQLE2EIT {
private String generateFetchActualDataSQL(final Map<String, DataSource>
actualDataSourceMap, final DataNode dataNode, final DatabaseType databaseType)
throws SQLException {
String tableName = dataNode.getTableName();
Optional<String> primaryKeyColumnName =
DialectDatabaseAssertionMetaDataFactory.getPrimaryKeyColumnName(databaseType,
actualDataSourceMap.get(dataNode.getDataSourceName()), tableName);
- if (primaryKeyColumnName.isPresent()) {
- return String.format("SELECT * FROM %s ORDER BY %s ASC",
tableName, primaryKeyColumnName.get());
- }
- Optional<DialectQueryBehaviorProvider> behaviorProvider =
DatabaseTypedSPILoader.findService(DialectQueryBehaviorProvider.class,
databaseType);
- if (behaviorProvider.isPresent()) {
- Optional<String> fallbackOrderBy =
behaviorProvider.get().getFallbackOrderByWhenNoPrimaryKey();
- if (fallbackOrderBy.isPresent()) {
- return String.format("SELECT * FROM %s ORDER BY %s",
tableName, fallbackOrderBy.get());
- }
- }
- return String.format("SELECT * FROM %s", tableName);
+ return primaryKeyColumnName.map(optional -> String.format("SELECT *
FROM %s ORDER BY %s ASC", tableName, optional))
+ .orElseGet(() ->
DatabaseTypedSPILoader.findService(DialectQueryBehaviorProvider.class,
databaseType)
+
.flatMap(DialectQueryBehaviorProvider::getFallbackOrderByWhenNoPrimaryKey).map(optional
-> String.format("SELECT * FROM %s ORDER BY %s", tableName, optional))
+ .orElseGet(() -> String.format("SELECT * FROM %s",
tableName)));
}
private void assertMetaData(final ResultSetMetaData actual, final
Collection<DataSetColumn> expected) throws SQLException {
assertThat(actual.getColumnCount(), is(expected.size()));
int index = 1;
for (DataSetColumn each : expected) {
- String actualLabel = actual.getColumnLabel(index++);
- int lastDotIndex = actualLabel != null ?
actualLabel.lastIndexOf('.') : -1;
- String normalizedLabel = lastDotIndex >= 0 ?
actualLabel.substring(lastDotIndex + 1) : actualLabel;
- assertThat(normalizedLabel.toUpperCase(),
is(each.getName().toUpperCase()));
+ assertThat(actual.getColumnLabel(index++).toUpperCase(),
is(each.getName().toUpperCase()));
}
}
diff --git
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table_for_hive.xml
similarity index 88%
copy from
test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
copy to
test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table_for_hive.xml
index b96386db13d..53eee3f6774 100644
---
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
+++
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/batch_insert_into_user_table_for_hive.xml
@@ -15,16 +15,16 @@
~ limitations under the License.
-->
-<dataset update-count="1">
+<dataset update-count="2">
<metadata data-nodes="encrypt.t_user">
- <column name="user_id" type="numeric" />
- <column name="user_name_cipher" type="varchar" />
- <column name="user_name_like" type="varchar" />
- <column name="password_cipher" type="varchar" />
- <column name="email_cipher" type="varchar" />
- <column name="user_telephone_cipher" type="varchar" />
- <column name="user_telephone_like" type="varchar" />
- <column name="creation_date" type="datetime" />
+ <column name="t_user.user_id" type="numeric" />
+ <column name="t_user.user_name_cipher" type="varchar" />
+ <column name="t_user.user_name_like" type="varchar" />
+ <column name="t_user.password_cipher" type="varchar" />
+ <column name="t_user.email_cipher" type="varchar" />
+ <column name="t_user.user_telephone_cipher" type="varchar" />
+ <column name="t_user.user_telephone_like" type="varchar" />
+ <column name="t_user.creation_date" type="datetime" />
</metadata>
<row data-node="encrypt.t_user" values="10, sVq8Lmm+j6bZE5EKSilJEQ==,
yi`mht`m, aQol0b6th65d0aXe+zFPsQ==,
WM0fHOH91JNWnHTkiqBdyNmzk4uJ7CCz4mB1va9Ya1M=, kLjLJIMnfyHT2nA+viaoaQ==,
01454589811, 2017-08-08" />
<row data-node="encrypt.t_user" values="11, fQ7IzBxKVuNHtUF6h6WSBg==,
mhth, wuhmEKgdgrWQYt+Ev0hgGA==, svATu3uWv9KfiloWJeWx3A==,
0kDFxndQdzauFwL/wyCsNQ==, 01454589810, 2017-08-08" />
@@ -56,4 +56,6 @@
<row data-node="encrypt.t_user" values="37, aXS0VfnqHIAnOAtDjsF/9Q==, 伈嶱啴,
bO/8ha1eS/H8/3DugjdOAQ==, fwyOxfHtLxNuSCFmghYiY0qMsgbpjg5UIo3xmJOLGu0=,
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
<row data-node="encrypt.t_user" values="38, 59/68izQEdnwNSueX1lPAA==, 伈乄,
ilD/Tk7DUG4+EuznS1bNLg==, 2emhAeiXPr0kHbFrhYlM1dmzk4uJ7CCz4mB1va9Ya1M=,
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
<row data-node="encrypt.t_user" values="39, fn9LnNltUAOWO0F0iy0+Jw==, 伈妅,
qe/WdUiSPP1RAsSSuejGJw==, zx2omwIbXHpEJeh8ta7HqQq2ZLhWcqfQ8/EQnIqMx+g=,
60fpnMdKCWeyKzxkdthn2Q==, 09101401454, 2017-08-08" />
+ <row data-node="encrypt.t_user" values="40, Dit2zNmv7pj+Jf5ND1tGQQ==,
upl`t, MyOShk4kjRnds7CZfU5NCw==, VvWKtMDr5+58OMAAu6BH5A==,
8Rj1Bg696vJMKwPNxzxGOg==, 01450145014, 2018-08-08" />
+ <row data-node="encrypt.t_user" values="41, YTfHI/5xnPNvpjCR20sPWw==,
lhld, qbUuBn0oxdrV8sNNyoqDCg==, FRo1h44oclER1+0MeI1T0w==,
cD36lxxtVApK4QLzec87zg==, 14541454545, 2019-08-08" />
</dataset>
diff --git
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table_for_hive.xml
similarity index 90%
copy from
test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
copy to
test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table_for_hive.xml
index b96386db13d..813825260bd 100644
---
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
+++
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/delete_user_table_for_hive.xml
@@ -17,16 +17,15 @@
<dataset update-count="1">
<metadata data-nodes="encrypt.t_user">
- <column name="user_id" type="numeric" />
- <column name="user_name_cipher" type="varchar" />
- <column name="user_name_like" type="varchar" />
- <column name="password_cipher" type="varchar" />
- <column name="email_cipher" type="varchar" />
- <column name="user_telephone_cipher" type="varchar" />
- <column name="user_telephone_like" type="varchar" />
- <column name="creation_date" type="datetime" />
+ <column name="t_user.user_id" type="numeric" />
+ <column name="t_user.user_name_cipher" type="varchar" />
+ <column name="t_user.user_name_like" type="varchar" />
+ <column name="t_user.password_cipher" type="varchar" />
+ <column name="t_user.email_cipher" type="varchar" />
+ <column name="t_user.user_telephone_cipher" type="varchar" />
+ <column name="t_user.user_telephone_like" type="varchar" />
+ <column name="t_user.creation_date" type="datetime" />
</metadata>
- <row data-node="encrypt.t_user" values="10, sVq8Lmm+j6bZE5EKSilJEQ==,
yi`mht`m, aQol0b6th65d0aXe+zFPsQ==,
WM0fHOH91JNWnHTkiqBdyNmzk4uJ7CCz4mB1va9Ya1M=, kLjLJIMnfyHT2nA+viaoaQ==,
01454589811, 2017-08-08" />
<row data-node="encrypt.t_user" values="11, fQ7IzBxKVuNHtUF6h6WSBg==,
mhth, wuhmEKgdgrWQYt+Ev0hgGA==, svATu3uWv9KfiloWJeWx3A==,
0kDFxndQdzauFwL/wyCsNQ==, 01454589810, 2017-08-08" />
<row data-node="encrypt.t_user" values="12, AQRWSlufQPog/b64YRhu6Q==,
x`mhxt, x7A+2jq9B6DSOSFtSOibdA==, nHJv9e6NiClIuGHOjHLvCAq2ZLhWcqfQ8/EQnIqMx+g=,
a/SzSJLapt5iBXvF2c9ycw==, 01454589811, 2017-08-08" />
<row data-node="encrypt.t_user" values="13, 5NqS4YvpT+mHBFqZOZ3QDA==,
yi`pmht, zi6b4xYRjjV+bBk2R4wB+w==,
MLBZczLjriUXvg3aM5QPTxMJbLjNh8yeNrSNBek/VTw=, b6VVhG+F6ujG8IMUZJAIFg==,
01454589814, 2017-08-08" />
diff --git
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_for_hive.xml
similarity index 93%
rename from
test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
rename to
test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_for_hive.xml
index b96386db13d..965ab642cc9 100644
---
a/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_hive.xml
+++
b/test/e2e/sql/src/test/resources/cases/dml/dataset/encrypt/update_user_table_for_hive.xml
@@ -17,14 +17,14 @@
<dataset update-count="1">
<metadata data-nodes="encrypt.t_user">
- <column name="user_id" type="numeric" />
- <column name="user_name_cipher" type="varchar" />
- <column name="user_name_like" type="varchar" />
- <column name="password_cipher" type="varchar" />
- <column name="email_cipher" type="varchar" />
- <column name="user_telephone_cipher" type="varchar" />
- <column name="user_telephone_like" type="varchar" />
- <column name="creation_date" type="datetime" />
+ <column name="t_user.user_id" type="numeric" />
+ <column name="t_user.user_name_cipher" type="varchar" />
+ <column name="t_user.user_name_like" type="varchar" />
+ <column name="t_user.password_cipher" type="varchar" />
+ <column name="t_user.email_cipher" type="varchar" />
+ <column name="t_user.user_telephone_cipher" type="varchar" />
+ <column name="t_user.user_telephone_like" type="varchar" />
+ <column name="t_user.creation_date" type="datetime" />
</metadata>
<row data-node="encrypt.t_user" values="10, sVq8Lmm+j6bZE5EKSilJEQ==,
yi`mht`m, aQol0b6th65d0aXe+zFPsQ==,
WM0fHOH91JNWnHTkiqBdyNmzk4uJ7CCz4mB1va9Ya1M=, kLjLJIMnfyHT2nA+viaoaQ==,
01454589811, 2017-08-08" />
<row data-node="encrypt.t_user" values="11, fQ7IzBxKVuNHtUF6h6WSBg==,
mhth, wuhmEKgdgrWQYt+Ev0hgGA==, svATu3uWv9KfiloWJeWx3A==,
0kDFxndQdzauFwL/wyCsNQ==, 01454589810, 2017-08-08" />
diff --git a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
index 5b124bfebcd..88823781533 100644
--- a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
+++ b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-delete.xml
@@ -71,6 +71,6 @@
</test-case>
<test-case sql="DELETE FROM t_user WHERE user_id = ?" db-types="Hive"
scenario-types="encrypt" skip-batch="true">
- <assertion parameters="10:int"
expected-data-file="delete_user_table.xml" />
+ <assertion parameters="10:int"
expected-data-file="delete_user_table_for_hive.xml" />
</test-case>
</e2e-test-cases>
diff --git a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
index bdff9d55e0b..cedb39c730c 100644
--- a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
+++ b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-insert.xml
@@ -256,6 +256,6 @@
</test-case>
<test-case sql="INSERT INTO t_user (user_id, user_name, password, email,
telephone, creation_date) VALUES (?, ?, '123456', '[email protected]',
'12341234123', '2018-08-08'), (?, ?, '23456', '[email protected]', '23452345456',
'2019-08-08')" db-types="Hive" scenario-types="encrypt" skip-batch="true">
- <assertion parameters="40:int, tomas:String, 41:int, mike:String"
expected-data-file="batch_insert_into_user_table.xml" />
+ <assertion parameters="40:int, tomas:String, 41:int, mike:String"
expected-data-file="batch_insert_into_user_table_for_hive.xml" />
</test-case>
</e2e-test-cases>
diff --git a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
index cfd29b4d818..597ec3cf9e1 100644
--- a/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
+++ b/test/e2e/sql/src/test/resources/cases/dml/e2e-dml-update.xml
@@ -111,6 +111,6 @@
</test-case>
<test-case sql="UPDATE t_user SET password = ? WHERE user_id = ?"
db-types="Hive" scenario-types="encrypt" skip-batch="true">
- <assertion parameters="222222:String, 11:int"
expected-data-file="update_user_table_hive.xml" />
+ <assertion parameters="222222:String, 11:int"
expected-data-file="update_user_table_for_hive.xml" />
</test-case>
</e2e-test-cases>