This is an automated email from the ASF dual-hosted git repository.

zhangliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 25c3e73dabe Refactor MigrationJobAPI (#32723)
25c3e73dabe is described below

commit 25c3e73dabec535307acf765561b5919e8e1117c
Author: Liang Zhang <[email protected]>
AuthorDate: Thu Aug 29 23:39:32 2024 +0800

    Refactor MigrationJobAPI (#32723)
---
 .../migration/distsql/handler/update/MigrateTableExecutor.java |  2 +-
 .../data/pipeline/scenario/migration/api/MigrationJobAPI.java  |  4 ++--
 .../scenario/migration/api/impl/MigrationJobAPITest.java       | 10 +++++-----
 3 files changed, 8 insertions(+), 8 deletions(-)

diff --git 
a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/data/pipeline/migration/distsql/handler/update/MigrateTableExecutor.java
 
b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/data/pipeline/migration/distsql/handler/update/MigrateTableExecutor.java
index e7ca624bf41..c85f81dfbdd 100644
--- 
a/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/data/pipeline/migration/distsql/handler/update/MigrateTableExecutor.java
+++ 
b/kernel/data-pipeline/distsql/handler/src/main/java/org/apache/shardingsphere/data/pipeline/migration/distsql/handler/update/MigrateTableExecutor.java
@@ -50,7 +50,7 @@ public final class MigrateTableExecutor implements 
DistSQLUpdateExecutor<Migrate
         
ShardingSpherePreconditions.checkState(contextManager.getMetaDataContexts().getMetaData().containsDatabase(targetDatabaseName),
                 () -> new 
MissingRequiredTargetDatabaseException(sqlStatement.getTargetDatabaseName()));
         MigrationJobAPI jobAPI = (MigrationJobAPI) 
TypedSPILoader.getService(TransmissionJobAPI.class, "MIGRATION");
-        jobAPI.start(new PipelineContextKey(InstanceType.PROXY), new 
MigrateTableStatement(sqlStatement.getSourceTargetEntries(), 
targetDatabaseName));
+        jobAPI.schedule(new PipelineContextKey(InstanceType.PROXY), new 
MigrateTableStatement(sqlStatement.getSourceTargetEntries(), 
targetDatabaseName));
     }
     
     @Override
diff --git 
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
 
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
index 580f74ad5f0..1b5daf02ed1 100644
--- 
a/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
+++ 
b/kernel/data-pipeline/scenario/migration/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/api/MigrationJobAPI.java
@@ -105,13 +105,13 @@ public final class MigrationJobAPI implements 
TransmissionJobAPI {
     }
     
     /**
-     * Start migration job.
+     * Schedule migration job.
      *
      * @param contextKey context key
      * @param param create migration job parameter
      * @return job id
      */
-    public String start(final PipelineContextKey contextKey, final 
MigrateTableStatement param) {
+    public String schedule(final PipelineContextKey contextKey, final 
MigrateTableStatement param) {
         MigrationJobConfiguration jobConfig = new 
YamlMigrationJobConfigurationSwapper().swapToObject(buildYamlJobConfiguration(contextKey,
 param));
         jobManager.start(jobConfig);
         return jobConfig.getJobId();
diff --git 
a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
 
b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
index 01698c39046..aa1b74d796c 100644
--- 
a/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
+++ 
b/test/it/pipeline/src/test/java/org/apache/shardingsphere/test/it/data/pipeline/scenario/migration/api/impl/MigrationJobAPITest.java
@@ -135,7 +135,7 @@ class MigrationJobAPITest {
     }
     
     @Test
-    void assertStartAndList() {
+    void assertScheduleAndList() {
         PipelineJobConfiguration jobConfig = 
JobConfigurationBuilder.createJobConfiguration();
         jobManager.start(jobConfig);
         JobConfigurationPOJO jobConfigPOJO = 
getJobConfigurationPOJO(jobConfig.getJobId());
@@ -148,7 +148,7 @@ class MigrationJobAPITest {
     }
     
     @Test
-    void assertStartOrStopById() {
+    void assertScheduleOrStopById() {
         PipelineJobConfiguration jobConfig = 
JobConfigurationBuilder.createJobConfiguration();
         jobManager.start(jobConfig);
         
assertFalse(getJobConfigurationPOJO(jobConfig.getJobId()).isDisabled());
@@ -262,20 +262,20 @@ class MigrationJobAPITest {
     void assertCreateJobConfigFailedOnMoreThanOneSourceTable() {
         List<SourceTargetEntry> sourceTargetEntries = Stream.of("t_order_0", 
"t_order_1")
                 .map(each -> new SourceTargetEntry("logic_db", new 
DataNode("ds_0", each), "t_order")).collect(Collectors.toList());
-        assertThrows(PipelineInvalidParameterException.class, () -> 
jobAPI.start(PipelineContextUtils.getContextKey(), new 
MigrateTableStatement(sourceTargetEntries, "logic_db")));
+        assertThrows(PipelineInvalidParameterException.class, () -> 
jobAPI.schedule(PipelineContextUtils.getContextKey(), new 
MigrateTableStatement(sourceTargetEntries, "logic_db")));
     }
     
     @Test
     void assertCreateJobConfigFailedOnDataSourceNotExist() {
         List<SourceTargetEntry> sourceTargetEntries = 
Collections.singletonList(new SourceTargetEntry("logic_db", new 
DataNode("ds_not_exists", "t_order"), "t_order"));
-        assertThrows(PipelineInvalidParameterException.class, () -> 
jobAPI.start(PipelineContextUtils.getContextKey(), new 
MigrateTableStatement(sourceTargetEntries, "logic_db")));
+        assertThrows(PipelineInvalidParameterException.class, () -> 
jobAPI.schedule(PipelineContextUtils.getContextKey(), new 
MigrateTableStatement(sourceTargetEntries, "logic_db")));
     }
     
     @Test
     void assertCreateJobConfig() throws SQLException {
         initIntPrimaryEnvironment();
         SourceTargetEntry sourceTargetEntry = new 
SourceTargetEntry("logic_db", new DataNode("ds_0", "t_order"), "t_order");
-        String jobId = jobAPI.start(PipelineContextUtils.getContextKey(), new 
MigrateTableStatement(Collections.singletonList(sourceTargetEntry), 
"logic_db"));
+        String jobId = jobAPI.schedule(PipelineContextUtils.getContextKey(), 
new MigrateTableStatement(Collections.singletonList(sourceTargetEntry), 
"logic_db"));
         MigrationJobConfiguration actual = 
jobConfigManager.getJobConfiguration(jobId);
         assertThat(actual.getTargetDatabaseName(), is("logic_db"));
         List<JobDataNodeLine> dataNodeLines = actual.getJobShardingDataNodes();

Reply via email to