This is an automated email from the ASF dual-hosted git repository.
panjuan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git
The following commit(s) were added to refs/heads/master by this push:
new 861920fc29f Fix CI (#21158)
861920fc29f is described below
commit 861920fc29f664f6258d6a5ed3ffeec84d57204a
Author: Hongsheng Zhong <[email protected]>
AuthorDate: Fri Sep 23 21:53:15 2022 +0800
Fix CI (#21158)
---
.../migration/MigrationDataConsistencyChecker.java | 17 +++++++----------
1 file changed, 7 insertions(+), 10 deletions(-)
diff --git
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationDataConsistencyChecker.java
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationDataConsistencyChecker.java
index 6301ea80ba6..ea68ca1cd0b 100644
---
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationDataConsistencyChecker.java
+++
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationDataConsistencyChecker.java
@@ -22,6 +22,7 @@ import
org.apache.shardingsphere.data.pipeline.api.check.consistency.DataConsist
import
org.apache.shardingsphere.data.pipeline.api.check.consistency.DataConsistencyCheckResult;
import
org.apache.shardingsphere.data.pipeline.api.check.consistency.DataConsistencyContentCheckResult;
import
org.apache.shardingsphere.data.pipeline.api.check.consistency.DataConsistencyCountCheckResult;
+import
org.apache.shardingsphere.data.pipeline.api.check.consistency.PipelineDataConsistencyChecker;
import
org.apache.shardingsphere.data.pipeline.api.config.TableNameSchemaNameMapping;
import
org.apache.shardingsphere.data.pipeline.api.config.job.MigrationJobConfiguration;
import
org.apache.shardingsphere.data.pipeline.api.datasource.PipelineDataSourceWrapper;
@@ -30,10 +31,11 @@ import
org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDat
import org.apache.shardingsphere.data.pipeline.api.job.JobOperationType;
import
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData;
import
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData;
+import
org.apache.shardingsphere.data.pipeline.core.context.InventoryIncrementalProcessContext;
import
org.apache.shardingsphere.data.pipeline.core.datasource.PipelineDataSourceFactory;
import
org.apache.shardingsphere.data.pipeline.core.exception.PipelineSQLException;
-import
org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedPipelineDatabaseTypeException;
import
org.apache.shardingsphere.data.pipeline.core.exception.data.PipelineTableDataConsistencyCheckLoadingFailedException;
+import
org.apache.shardingsphere.data.pipeline.core.exception.data.UnsupportedPipelineDatabaseTypeException;
import
org.apache.shardingsphere.data.pipeline.core.metadata.loader.StandardPipelineTableMetaDataLoader;
import
org.apache.shardingsphere.data.pipeline.core.sqlbuilder.PipelineSQLBuilderFactory;
import
org.apache.shardingsphere.data.pipeline.spi.check.consistency.DataConsistencyCalculateAlgorithm;
@@ -69,7 +71,7 @@ import java.util.concurrent.TimeUnit;
* Data consistency checker for migration job.
*/
@Slf4j
-public final class MigrationDataConsistencyChecker {
+public final class MigrationDataConsistencyChecker implements
PipelineDataConsistencyChecker {
private final MigrationJobConfiguration jobConfig;
@@ -77,19 +79,14 @@ public final class MigrationDataConsistencyChecker {
private final TableNameSchemaNameMapping tableNameSchemaNameMapping;
- public MigrationDataConsistencyChecker(final MigrationJobConfiguration
jobConfig, final JobRateLimitAlgorithm readRateLimitAlgorithm) {
+ public MigrationDataConsistencyChecker(final MigrationJobConfiguration
jobConfig, final InventoryIncrementalProcessContext processContext) {
this.jobConfig = jobConfig;
- this.readRateLimitAlgorithm = readRateLimitAlgorithm;
+ this.readRateLimitAlgorithm = null != processContext ?
processContext.getReadRateLimitAlgorithm() : null;
tableNameSchemaNameMapping = new TableNameSchemaNameMapping(
TableNameSchemaNameMapping.convert(jobConfig.getSourceSchemaName(), new
HashSet<>(Arrays.asList(jobConfig.getSourceTableName(),
jobConfig.getTargetTableName()))));
}
- /**
- * Check data consistency.
- *
- * @param calculator data consistency calculate algorithm
- * @return checked result, key is logic table name, value is check result.
- */
+ @Override
public Map<String, DataConsistencyCheckResult> check(final
DataConsistencyCalculateAlgorithm calculator) {
Map<String, DataConsistencyCountCheckResult> countCheckResult =
checkCount();
Map<String, DataConsistencyContentCheckResult> contentCheckResult =
countCheckResult.values().stream().allMatch(DataConsistencyCountCheckResult::isMatched)