This is an automated email from the ASF dual-hosted git repository.
zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 9c787559222 Minor changes for code format (#28267)
9c787559222 is described below
commit 9c78755922274aa5d59a117084a3e33e093f7450
Author: Liang Zhang <[email protected]>
AuthorDate: Sun Aug 27 13:18:14 2023 +0800
Minor changes for code format (#28267)
* Refactor usage of ContextManager
* Refactor usage of ContextManager
* Remove useless ContextManager.getDataSourceMap()
* Refactor ConfigurationContextManager
* Refactor ContextManager
* Minor changes for code format
---
.../pipeline/common/util/ShardingColumnsExtractor.java | 13 ++++++-------
.../api/impl/ConsistencyCheckJobAPI.java | 12 ++++++------
.../single/route/engine/SingleStandardRouteEngine.java | 12 +++++-------
.../migration/api/impl/MigrationJobAPITest.java | 18 ++++++++----------
4 files changed, 25 insertions(+), 30 deletions(-)
diff --git
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java
index 78f0b3b8991..1fe9015f343 100644
---
a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java
+++
b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/common/util/ShardingColumnsExtractor.java
@@ -49,15 +49,14 @@ public final class ShardingColumnsExtractor {
* @return sharding columns map
*/
public Map<LogicTableName, Set<String>> getShardingColumnsMap(final
Collection<YamlRuleConfiguration> yamlRuleConfigs, final Set<LogicTableName>
logicTableNames) {
- Optional<ShardingRuleConfiguration> shardingRuleConfigOptional =
ShardingRuleConfigurationConverter.findAndConvertShardingRuleConfiguration(yamlRuleConfigs);
- if (!shardingRuleConfigOptional.isPresent()) {
+ Optional<ShardingRuleConfiguration> shardingRuleConfig =
ShardingRuleConfigurationConverter.findAndConvertShardingRuleConfiguration(yamlRuleConfigs);
+ if (!shardingRuleConfig.isPresent()) {
return Collections.emptyMap();
}
- ShardingRuleConfiguration shardingRuleConfig =
shardingRuleConfigOptional.get();
- Set<String> defaultDatabaseShardingColumns =
extractShardingColumns(shardingRuleConfig.getDefaultDatabaseShardingStrategy());
- Set<String> defaultTableShardingColumns =
extractShardingColumns(shardingRuleConfig.getDefaultTableShardingStrategy());
+ Set<String> defaultDatabaseShardingColumns =
extractShardingColumns(shardingRuleConfig.get().getDefaultDatabaseShardingStrategy());
+ Set<String> defaultTableShardingColumns =
extractShardingColumns(shardingRuleConfig.get().getDefaultTableShardingStrategy());
Map<LogicTableName, Set<String>> result = new ConcurrentHashMap<>();
- for (ShardingTableRuleConfiguration each :
shardingRuleConfig.getTables()) {
+ for (ShardingTableRuleConfiguration each :
shardingRuleConfig.get().getTables()) {
LogicTableName logicTableName = new
LogicTableName(each.getLogicTable());
if (!logicTableNames.contains(logicTableName)) {
continue;
@@ -67,7 +66,7 @@ public final class ShardingColumnsExtractor {
shardingColumns.addAll(null == each.getTableShardingStrategy() ?
defaultTableShardingColumns :
extractShardingColumns(each.getTableShardingStrategy()));
result.put(logicTableName, shardingColumns);
}
- for (ShardingAutoTableRuleConfiguration each :
shardingRuleConfig.getAutoTables()) {
+ for (ShardingAutoTableRuleConfiguration each :
shardingRuleConfig.get().getAutoTables()) {
LogicTableName logicTableName = new
LogicTableName(each.getLogicTable());
if (!logicTableNames.contains(logicTableName)) {
continue;
diff --git
a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java
b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java
index e5162fab517..d87119b58c3 100644
---
a/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java
+++
b/kernel/data-pipeline/scenario/consistencycheck/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/consistencycheck/api/impl/ConsistencyCheckJobAPI.java
@@ -257,12 +257,12 @@ public final class ConsistencyCheckJobAPI extends
AbstractPipelineJobAPIImpl {
Optional<String> latestCheckJobId =
governanceRepositoryAPI.getLatestCheckJobId(parentJobId);
ShardingSpherePreconditions.checkState(latestCheckJobId.isPresent(),
() -> new ConsistencyCheckJobNotFoundException(parentJobId));
String checkJobId = latestCheckJobId.get();
- Optional<ConsistencyCheckJobItemProgress> progressOptional =
getJobItemProgress(checkJobId, 0);
- if (!progressOptional.isPresent()) {
+ Optional<ConsistencyCheckJobItemProgress> progress =
getJobItemProgress(checkJobId, 0);
+ if (!progress.isPresent()) {
return Collections.emptyList();
}
List<ConsistencyCheckJobItemInfo> result = new LinkedList<>();
- ConsistencyCheckJobItemProgress jobItemProgress =
progressOptional.get();
+ ConsistencyCheckJobItemProgress jobItemProgress = progress.get();
if (!Strings.isNullOrEmpty(jobItemProgress.getIgnoredTableNames())) {
Map<String, TableDataConsistencyCheckResult> checkJobResult =
governanceRepositoryAPI.getCheckJobResult(parentJobId, latestCheckJobId.get());
result.addAll(buildIgnoredTableInfo(jobItemProgress.getIgnoredTableNames().split(","),
checkJobResult));
@@ -297,12 +297,12 @@ public final class ConsistencyCheckJobAPI extends
AbstractPipelineJobAPIImpl {
Optional<String> latestCheckJobId =
governanceRepositoryAPI.getLatestCheckJobId(parentJobId);
ShardingSpherePreconditions.checkState(latestCheckJobId.isPresent(),
() -> new ConsistencyCheckJobNotFoundException(parentJobId));
String checkJobId = latestCheckJobId.get();
- Optional<ConsistencyCheckJobItemProgress> progressOptional =
getJobItemProgress(checkJobId, 0);
+ Optional<ConsistencyCheckJobItemProgress> progress =
getJobItemProgress(checkJobId, 0);
ConsistencyCheckJobItemInfo result = new ConsistencyCheckJobItemInfo();
- if (!progressOptional.isPresent()) {
+ if (!progress.isPresent()) {
return result;
}
- ConsistencyCheckJobItemProgress jobItemProgress =
progressOptional.get();
+ ConsistencyCheckJobItemProgress jobItemProgress = progress.get();
if (null == jobItemProgress.getRecordsCount() || null ==
jobItemProgress.getCheckedRecordsCount()) {
result.setFinishedPercentage(0);
result.setCheckSuccess(null);
diff --git
a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java
b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java
index 1b40e868e9a..75740ecca32 100644
---
a/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java
+++
b/kernel/single/core/src/main/java/org/apache/shardingsphere/single/route/engine/SingleStandardRouteEngine.java
@@ -85,12 +85,12 @@ public final class SingleStandardRouteEngine implements
SingleRouteEngine {
private void routeDDLStatement(final RouteContext routeContext, final
SingleRule rule) {
if (sqlStatement instanceof CreateTableStatement) {
QualifiedTable table = singleTables.iterator().next();
- Optional<DataNode> dataNodeOptional =
rule.findTableDataNode(table.getSchemaName(), table.getTableName());
+ Optional<DataNode> dataNode =
rule.findTableDataNode(table.getSchemaName(), table.getTableName());
boolean containsIfNotExists =
CreateTableStatementHandler.ifNotExists((CreateTableStatement) sqlStatement);
- if (dataNodeOptional.isPresent() && containsIfNotExists) {
- String dataSourceName =
dataNodeOptional.map(DataNode::getDataSourceName).orElse(null);
+ if (dataNode.isPresent() && containsIfNotExists) {
+ String dataSourceName =
dataNode.map(DataNode::getDataSourceName).orElse(null);
routeContext.getRouteUnits().add(new RouteUnit(new
RouteMapper(dataSourceName, dataSourceName), Collections.singleton(new
RouteMapper(table.getTableName(), table.getTableName()))));
- } else if (dataNodeOptional.isPresent()) {
+ } else if (dataNode.isPresent()) {
throw new TableExistsException(table.getTableName());
} else {
String dataSourceName = rule.assignNewDataSourceName();
@@ -105,9 +105,7 @@ public final class SingleStandardRouteEngine implements
SingleRouteEngine {
for (QualifiedTable each : logicTables) {
String tableName = each.getTableName();
Optional<DataNode> dataNode =
singleRule.findTableDataNode(each.getSchemaName(), tableName);
- if (!dataNode.isPresent()) {
- throw new SingleTableNotFoundException(tableName);
- }
+ ShardingSpherePreconditions.checkState(dataNode.isPresent(), () ->
new SingleTableNotFoundException(tableName));
String dataSource = dataNode.get().getDataSourceName();
routeContext.putRouteUnit(new RouteMapper(dataSource, dataSource),
Collections.singletonList(new RouteMapper(tableName, tableName)));
}
diff --git
a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
index 39418560e4f..239ff019d3e 100644
---
a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
+++
b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
@@ -328,14 +328,13 @@ class MigrationJobAPITest {
@Test
void assertGetJobItemInfosAtBegin() {
- Optional<String> optional =
jobAPI.start(JobConfigurationBuilder.createJobConfiguration());
- assertTrue(optional.isPresent());
- String jobId = optional.get();
+ Optional<String> jobId =
jobAPI.start(JobConfigurationBuilder.createJobConfiguration());
+ assertTrue(jobId.isPresent());
YamlInventoryIncrementalJobItemProgress yamlJobItemProgress = new
YamlInventoryIncrementalJobItemProgress();
yamlJobItemProgress.setStatus(JobStatus.RUNNING.name());
yamlJobItemProgress.setSourceDatabaseType("MySQL");
-
PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId,
0, YamlEngine.marshal(yamlJobItemProgress));
- List<InventoryIncrementalJobItemInfo> jobItemInfos =
jobAPI.getJobItemInfos(jobId);
+
PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId.get(),
0, YamlEngine.marshal(yamlJobItemProgress));
+ List<InventoryIncrementalJobItemInfo> jobItemInfos =
jobAPI.getJobItemInfos(jobId.get());
assertThat(jobItemInfos.size(), is(1));
InventoryIncrementalJobItemInfo jobItemInfo = jobItemInfos.get(0);
assertThat(jobItemInfo.getJobItemProgress().getStatus(),
is(JobStatus.RUNNING));
@@ -344,16 +343,15 @@ class MigrationJobAPITest {
@Test
void assertGetJobItemInfosAtIncrementTask() {
- Optional<String> optional =
jobAPI.start(JobConfigurationBuilder.createJobConfiguration());
- assertTrue(optional.isPresent());
+ Optional<String> jobId =
jobAPI.start(JobConfigurationBuilder.createJobConfiguration());
+ assertTrue(jobId.isPresent());
YamlInventoryIncrementalJobItemProgress yamlJobItemProgress = new
YamlInventoryIncrementalJobItemProgress();
yamlJobItemProgress.setSourceDatabaseType("MySQL");
yamlJobItemProgress.setStatus(JobStatus.EXECUTE_INCREMENTAL_TASK.name());
yamlJobItemProgress.setProcessedRecordsCount(100);
yamlJobItemProgress.setInventoryRecordsCount(50);
- String jobId = optional.get();
-
PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId,
0, YamlEngine.marshal(yamlJobItemProgress));
- List<InventoryIncrementalJobItemInfo> jobItemInfos =
jobAPI.getJobItemInfos(jobId);
+
PipelineAPIFactory.getGovernanceRepositoryAPI(PipelineContextUtils.getContextKey()).persistJobItemProgress(jobId.get(),
0, YamlEngine.marshal(yamlJobItemProgress));
+ List<InventoryIncrementalJobItemInfo> jobItemInfos =
jobAPI.getJobItemInfos(jobId.get());
InventoryIncrementalJobItemInfo jobItemInfo = jobItemInfos.get(0);
assertThat(jobItemInfo.getJobItemProgress().getStatus(),
is(JobStatus.EXECUTE_INCREMENTAL_TASK));
assertThat(jobItemInfo.getInventoryFinishedPercentage(), is(100));