This is an automated email from the ASF dual-hosted git repository.
yinweihong pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 84323b43c76 Revise #17251
new 408c6e4ebfc Merge pull request #17816 from strongduanmu/dev
84323b43c76 is described below
commit 84323b43c76616f83f448b8f1603fbb26a36b9ed
Author: strongduanmu <[email protected]>
AuthorDate: Thu May 19 23:17:11 2022 +0800
Revise #17251
---
.../loader/dialect/OracleSchemaMetaDataLoader.java | 17 ++++++++---------
1 file changed, 8 insertions(+), 9 deletions(-)
diff --git
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/loader/dialect/OracleSchemaMetaDataLoader.java
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/loader/dialect/OracleSchemaMetaDataLoader.java
index e8fadb8c28a..d9897be6210 100644
---
a/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/loader/dialect/OracleSchemaMetaDataLoader.java
+++
b/shardingsphere-infra/shardingsphere-infra-common/src/main/java/org/apache/shardingsphere/infra/metadata/schema/loader/dialect/OracleSchemaMetaDataLoader.java
@@ -69,20 +69,19 @@ public final class OracleSchemaMetaDataLoader implements
DialectSchemaMetaDataLo
private static final int IDENTITY_COLUMN_START_MINOR_VERSION = 1;
- private static final int BATCH_SIZE = 1000;
+ private static final int MAX_EXPRESSION_SIZE = 1000;
@Override
public Collection<SchemaMetaData> load(final DataSource dataSource, final
Collection<String> tables, final String defaultSchemaName) throws SQLException {
- Map<String, TableMetaData> tableMetaDataMap = new LinkedHashMap<>();
- Map<String, Collection<IndexMetaData>> indexMetaDataMap = new
LinkedHashMap<>();
- Map<String, Collection<ColumnMetaData>> columnMetaDataMap = new
HashMap<>(tables.size(), 1.0f);
- List<List<String>> splitTables = Lists.partition(new
ArrayList(tables), BATCH_SIZE);
+ Map<String, Collection<ColumnMetaData>> columnMetaDataMap = new
HashMap<>(tables.size(), 1);
+ Map<String, Collection<IndexMetaData>> indexMetaDataMap = new
HashMap<>(tables.size(), 1);
try (Connection connection = dataSource.getConnection()) {
- for (List<String> each : splitTables) {
+ for (List<String> each : Lists.partition(new ArrayList<>(tables),
MAX_EXPRESSION_SIZE)) {
columnMetaDataMap.putAll(loadColumnMetaDataMap(connection,
each));
+ indexMetaDataMap.putAll(loadIndexMetaData(connection, each));
}
- indexMetaDataMap.putAll(columnMetaDataMap.isEmpty() ?
Collections.emptyMap() : loadIndexMetaData(connection,
columnMetaDataMap.keySet()));
}
+ Map<String, TableMetaData> tableMetaDataMap = new LinkedHashMap<>();
for (Entry<String, Collection<ColumnMetaData>> entry :
columnMetaDataMap.entrySet()) {
tableMetaDataMap.put(entry.getKey(), new
TableMetaData(entry.getKey(), entry.getValue(),
indexMetaDataMap.getOrDefault(entry.getKey(), Collections.emptyList()),
Collections.emptyList()));
}
@@ -90,7 +89,7 @@ public final class OracleSchemaMetaDataLoader implements
DialectSchemaMetaDataLo
}
private Map<String, Collection<ColumnMetaData>>
loadColumnMetaDataMap(final Connection connection, final Collection<String>
tables) throws SQLException {
- Map<String, Collection<ColumnMetaData>> result = new HashMap<>();
+ Map<String, Collection<ColumnMetaData>> result = new
HashMap<>(tables.size(), 1);
try (PreparedStatement preparedStatement =
connection.prepareStatement(getTableMetaDataSQL(tables,
connection.getMetaData()))) {
Map<String, Integer> dataTypes =
DataTypeLoader.load(connection.getMetaData());
appendNumberDataType(dataTypes);
@@ -155,7 +154,7 @@ public final class OracleSchemaMetaDataLoader implements
DialectSchemaMetaDataLo
}
private Map<String, Collection<IndexMetaData>> loadIndexMetaData(final
Connection connection, final Collection<String> tableNames) throws SQLException
{
- Map<String, Collection<IndexMetaData>> result = new HashMap<>();
+ Map<String, Collection<IndexMetaData>> result = new
HashMap<>(tableNames.size(), 1);
try (PreparedStatement preparedStatement =
connection.prepareStatement(getIndexMetaDataSQL(tableNames))) {
preparedStatement.setString(1, connection.getSchema());
try (ResultSet resultSet = preparedStatement.executeQuery()) {