This is an automated email from the ASF dual-hosted git repository.

duanzhengqiang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new a20b8d246f3 Refactor SplitPipelineJobByUniqueKeyException (#21041)
a20b8d246f3 is described below

commit a20b8d246f3ff97ac62657abf89ce656298bf1cc
Author: Liang Zhang <zhangli...@apache.org>
AuthorDate: Sun Sep 18 06:13:02 2022 +0800

    Refactor SplitPipelineJobByUniqueKeyException (#21041)
    
    * Refactor SplitPipelineJobByUniqueKeyException
    
    * Refactor SplitPipelineJobByUniqueKeyException
---
 .../user-manual/error-code/sql-error-code.cn.md    | 17 ++++++------
 .../user-manual/error-code/sql-error-code.en.md    | 17 ++++++------
 ...ingTableRulesUsedAuditorQueryResultSetTest.java |  8 +++---
 .../job/PipelineImporterJobWriteException.java     |  2 +-
 .../job/PipelineJobPrepareFailedException.java     | 30 ----------------------
 ...repareJobWithCheckPrivilegeFailedException.java |  2 +-
 .../PrepareJobWithGetBinlogPositionException.java  |  2 +-
 .../PrepareJobWithInvalidConnectionException.java  |  2 +-
 ...areJobWithInvalidSourceDataSourceException.java |  2 +-
 ...PrepareJobWithTargetTableNotEmptyException.java |  2 +-
 .../PrepareJobWithoutEnoughPrivilegeException.java |  2 +-
 .../job/PrepareJobWithoutUserException.java        |  2 +-
 ....java => SplitPipelineJobByRangeException.java} |  8 +++---
 ...a => SplitPipelineJobByUniqueKeyException.java} | 10 ++++----
 .../core/prepare/InventoryTaskSplitter.java        |  8 +++---
 .../core/util/PipelineTableMetaDataUtil.java       | 10 ++++----
 .../datasource/MySQLDataSourcePreparerTest.java    |  4 +--
 .../SQLTranslatorRuleQueryResultSetTest.java       |  2 +-
 .../mode/metadata/MetaDataContextsFactoryTest.java |  6 ++---
 .../sql/parser/sql/common/util/SQLUtilTest.java    |  2 +-
 .../migration/general/MySQLMigrationGeneralIT.java |  2 +-
 .../primarykey/TextPrimaryKeyMigrationIT.java      |  2 +-
 ...ardingTableRulesUsedAuditorStatementAssert.java |  2 +-
 .../core/prepare/InventoryTaskSplitterTest.java    |  8 +++---
 24 files changed, 62 insertions(+), 90 deletions(-)

diff --git a/docs/document/content/user-manual/error-code/sql-error-code.cn.md 
b/docs/document/content/user-manual/error-code/sql-error-code.cn.md
index 800e627a87e..a07d99058ec 100644
--- a/docs/document/content/user-manual/error-code/sql-error-code.cn.md
+++ b/docs/document/content/user-manual/error-code/sql-error-code.cn.md
@@ -96,14 +96,15 @@ SQL 错误码以标准的 SQL State,Vendor Code 和详细错误信息提供,
 | HY000     | 18081       | Job has already started |
 | HY000     | 18082       | Sharding count of job \`%s\` is 0 |
 | HY000     | 18083       | Can not split range for table \`%s\`, reason: %s |
-| HY000     | 18084       | Target table \`%s\` is not empty |
-| 01007     | 18085       | Source data source lacks %s privilege(s) |
-| HY000     | 18086       | Source data source required \`%s = %s\`, now is 
\`%s\` |
-| HY000     | 18087       | User \`%s\` does exist |
-| 08000     | 18088       | Check privileges failed on source data source, 
reason is: %s |
-| 08000     | 18089       | Data sources can not connect, reason is: %s |
-| HY000     | 18090       | Importer job write data failed |
-| 08000     | 18091       | Get binlog position failed by job \`%s\`, reason 
is: %s |
+| HY000     | 18084       | Can not split by unique key \`%s\` for table 
\`%s\`, reason is: %s |
+| HY000     | 18085       | Target table \`%s\` is not empty |
+| 01007     | 18086       | Source data source lacks %s privilege(s) |
+| HY000     | 18087       | Source data source required \`%s = %s\`, now is 
\`%s\` |
+| HY000     | 18088       | User \`%s\` does exist |
+| 08000     | 18089       | Check privileges failed on source data source, 
reason is: %s |
+| 08000     | 18090       | Data sources can not connect, reason is: %s |
+| HY000     | 18091       | Importer job write data failed |
+| 08000     | 18092       | Get binlog position failed by job \`%s\`, reason 
is: %s |
 | HY000     | 18093       | Can not poll event because of binlog sync channel 
already closed |
 | HY000     | 18094       | Task \`%s\` execute failed |
 
diff --git a/docs/document/content/user-manual/error-code/sql-error-code.en.md 
b/docs/document/content/user-manual/error-code/sql-error-code.en.md
index 836ac81e0ab..8ee731dffd8 100644
--- a/docs/document/content/user-manual/error-code/sql-error-code.en.md
+++ b/docs/document/content/user-manual/error-code/sql-error-code.en.md
@@ -96,14 +96,15 @@ SQL error codes provide by standard `SQL State`, `Vendor 
Code` and `Reason`, whi
 | HY000     | 18081       | Job has already started |
 | HY000     | 18082       | Sharding count of job \`%s\` is 0 |
 | HY000     | 18083       | Can not split range for table \`%s\`, reason: %s |
-| HY000     | 18084       | Target table \`%s\` is not empty |
-| 01007     | 18085       | Source data source lacks %s privilege(s) |
-| HY000     | 18086       | Source data source required \`%s = %s\`, now is 
\`%s\` |
-| HY000     | 18087       | User \`%s\` does exist |
-| 08000     | 18088       | Check privileges failed on source data source, 
reason is: %s |
-| 08000     | 18089       | Data sources can not connect, reason is: %s |
-| HY000     | 18090       | Importer job write data failed |
-| 08000     | 18091       | Get binlog position failed by job \`%s\`, reason 
is: %s |
+| HY000     | 18084       | Can not split by unique key \`%s\` for table 
\`%s\`, reason is: %s |
+| HY000     | 18085       | Target table \`%s\` is not empty |
+| 01007     | 18086       | Source data source lacks %s privilege(s) |
+| HY000     | 18087       | Source data source required \`%s = %s\`, now is 
\`%s\` |
+| HY000     | 18088       | User \`%s\` does exist |
+| 08000     | 18089       | Check privileges failed on source data source, 
reason is: %s |
+| 08000     | 18090       | Data sources can not connect, reason is: %s |
+| HY000     | 18091       | Importer job write data failed |
+| 08000     | 18092       | Get binlog position failed by job \`%s\`, reason 
is: %s |
 | HY000     | 18093       | Can not poll event because of binlog sync channel 
already closed |
 | HY000     | 18094       | Task \`%s\` execute failed |
 
diff --git 
a/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-distsql/shardingsphere-sharding-distsql-handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRulesUsedAuditorQueryResultSetTest.java
 
b/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-distsql/shardingsphere-sharding-distsql-handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRulesUsedAuditorQueryResultSetTest.java
index 9028e1df569..3d2261bb14f 100644
--- 
a/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-distsql/shardingsphere-sharding-distsql-handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRulesUsedAuditorQueryResultSetTest.java
+++ 
b/shardingsphere-features/shardingsphere-sharding/shardingsphere-sharding-distsql/shardingsphere-sharding-distsql-handler/src/test/java/org/apache/shardingsphere/sharding/distsql/query/ShowShardingTableRulesUsedAuditorQueryResultSetTest.java
@@ -32,13 +32,13 @@ import org.apache.shardingsphere.sharding.rule.ShardingRule;
 import org.junit.Test;
 
 import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
 import java.util.Properties;
 
 import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
@@ -82,14 +82,14 @@ public final class 
ShowShardingTableRulesUsedAuditorQueryResultSetTest {
     private ShardingAutoTableRuleConfiguration 
createShardingAutoTableRuleConfiguration() {
         ShardingAutoTableRuleConfiguration result = new 
ShardingAutoTableRuleConfiguration("t_order_auto", "ds_0, ds_1");
         result.setShardingStrategy(new 
StandardShardingStrategyConfiguration("order_id", "auto_mod"));
-        result.setAuditStrategy(new 
ShardingAuditStrategyConfiguration(Arrays.asList("shardingKeyAudit"), true));
+        result.setAuditStrategy(new 
ShardingAuditStrategyConfiguration(Collections.singleton("shardingKeyAudit"), 
true));
         return result;
     }
     
     private ShardingTableRuleConfiguration 
createShardingTableRuleConfiguration() {
         ShardingTableRuleConfiguration result = new 
ShardingTableRuleConfiguration("t_order", "ds_${0..1}.t_order_${0..1}");
         result.setTableShardingStrategy(new 
StandardShardingStrategyConfiguration("order_id", "t_order_inline"));
-        result.setAuditStrategy(new 
ShardingAuditStrategyConfiguration(Arrays.asList("shardingKeyAudit"), true));
+        result.setAuditStrategy(new 
ShardingAuditStrategyConfiguration(Collections.singleton("shardingKeyAudit"), 
true));
         return result;
     }
     
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineImporterJobWriteException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineImporterJobWriteException.java
index 2476a1d0594..9cfb1903493 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineImporterJobWriteException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineImporterJobWriteException.java
@@ -28,6 +28,6 @@ public final class PipelineImporterJobWriteException extends 
PipelineSQLExceptio
     private static final long serialVersionUID = -7924663094479253130L;
     
     public PipelineImporterJobWriteException() {
-        super(XOpenSQLState.GENERAL_ERROR, 90, "Importer job write data 
failed");
+        super(XOpenSQLState.GENERAL_ERROR, 91, "Importer job write data 
failed");
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineJobPrepareFailedException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineJobPrepareFailedException.java
deleted file mode 100644
index ceefc0c1a7f..00000000000
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PipelineJobPrepareFailedException.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.shardingsphere.data.pipeline.core.exception.job;
-
-/**
- * Pipeline job prepare failed exception.
- */
-public final class PipelineJobPrepareFailedException extends RuntimeException {
-    
-    private static final long serialVersionUID = 1409505606319197767L;
-    
-    public PipelineJobPrepareFailedException(final String message, final 
Throwable cause) {
-        super(message, cause);
-    }
-}
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithCheckPrivilegeFailedException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithCheckPrivilegeFailedException.java
index 4685cedbf9e..be8f90618f2 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithCheckPrivilegeFailedException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithCheckPrivilegeFailedException.java
@@ -30,6 +30,6 @@ public final class 
PrepareJobWithCheckPrivilegeFailedException extends PipelineS
     private static final long serialVersionUID = -8462039913248251254L;
     
     public PrepareJobWithCheckPrivilegeFailedException(final SQLException 
cause) {
-        super(XOpenSQLState.CONNECTION_EXCEPTION, 88, "Check privileges failed 
on source data source, reason is: %s", cause.getMessage());
+        super(XOpenSQLState.CONNECTION_EXCEPTION, 89, "Check privileges failed 
on source data source, reason is: %s", cause.getMessage());
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithGetBinlogPositionException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithGetBinlogPositionException.java
index 7eb4bcb73f7..d325e754bf7 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithGetBinlogPositionException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithGetBinlogPositionException.java
@@ -30,6 +30,6 @@ public final class PrepareJobWithGetBinlogPositionException 
extends PipelineSQLE
     private static final long serialVersionUID = -3701189611685636704L;
     
     public PrepareJobWithGetBinlogPositionException(final String jobId, final 
SQLException cause) {
-        super(XOpenSQLState.CONNECTION_EXCEPTION, 90, "Get binlog position 
failed by job `%s`, reason is: %s", jobId, cause.getMessage());
+        super(XOpenSQLState.CONNECTION_EXCEPTION, 92, "Get binlog position 
failed by job `%s`, reason is: %s", jobId, cause.getMessage());
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
index b3147798751..6c54d8d27f7 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
@@ -30,6 +30,6 @@ public final class PrepareJobWithInvalidConnectionException 
extends PipelineSQLE
     private static final long serialVersionUID = 208040912786493973L;
     
     public PrepareJobWithInvalidConnectionException(final SQLException cause) {
-        super(XOpenSQLState.CONNECTION_EXCEPTION, 89, "Data sources can not 
connect, reason is: %s", cause.getMessage());
+        super(XOpenSQLState.CONNECTION_EXCEPTION, 90, "Data sources can not 
connect, reason is: %s", cause.getMessage());
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidSourceDataSourceException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidSourceDataSourceException.java
index 00118c9b34d..3299d1c7a0b 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidSourceDataSourceException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidSourceDataSourceException.java
@@ -28,6 +28,6 @@ public final class 
PrepareJobWithInvalidSourceDataSourceException extends Pipeli
     private static final long serialVersionUID = -7710035889344958565L;
     
     public PrepareJobWithInvalidSourceDataSourceException(final String 
dataSourceKey, final String toBeCheckedValue, final String actualValue) {
-        super(XOpenSQLState.GENERAL_ERROR, 86, "Source data source required 
`%s = %s`, now is `%s`", dataSourceKey, toBeCheckedValue, actualValue);
+        super(XOpenSQLState.GENERAL_ERROR, 87, "Source data source required 
`%s = %s`, now is `%s`", dataSourceKey, toBeCheckedValue, actualValue);
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithTargetTableNotEmptyException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithTargetTableNotEmptyException.java
index f25d4614adb..0dd75b8d965 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithTargetTableNotEmptyException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithTargetTableNotEmptyException.java
@@ -28,6 +28,6 @@ public final class PrepareJobWithTargetTableNotEmptyException 
extends PipelineSQ
     private static final long serialVersionUID = -8462039913248251254L;
     
     public PrepareJobWithTargetTableNotEmptyException(final String tableName) {
-        super(XOpenSQLState.GENERAL_ERROR, 84, "Target table `%s` is not empty 
before migration", tableName);
+        super(XOpenSQLState.GENERAL_ERROR, 85, "Target table `%s` is not empty 
before migration", tableName);
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutEnoughPrivilegeException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutEnoughPrivilegeException.java
index 4acea96575d..45cfd619d84 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutEnoughPrivilegeException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutEnoughPrivilegeException.java
@@ -30,6 +30,6 @@ public final class PrepareJobWithoutEnoughPrivilegeException 
extends PipelineSQL
     private static final long serialVersionUID = -8462039913248251254L;
     
     public PrepareJobWithoutEnoughPrivilegeException(final Collection<String> 
privileges) {
-        super(XOpenSQLState.PRIVILEGE_NOT_GRANTED, 85, "Source data source 
lacks %s privilege(s)", privileges);
+        super(XOpenSQLState.PRIVILEGE_NOT_GRANTED, 86, "Source data source 
lacks %s privilege(s)", privileges);
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutUserException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutUserException.java
index d80861dc9ae..b376abc5c6d 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutUserException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithoutUserException.java
@@ -28,6 +28,6 @@ public final class PrepareJobWithoutUserException extends 
PipelineSQLException {
     private static final long serialVersionUID = 7250019436391155770L;
     
     public PrepareJobWithoutUserException(final String username) {
-        super(XOpenSQLState.PRIVILEGE_NOT_GRANTED, 87, "User `%s` does exist", 
username);
+        super(XOpenSQLState.PRIVILEGE_NOT_GRANTED, 88, "User `%s` does exist", 
username);
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobByRangeException.java
similarity index 77%
rename from 
shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobException.java
rename to 
shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobByRangeException.java
index 7a82e6da397..96396bd81fc 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobByRangeException.java
@@ -21,13 +21,13 @@ import 
org.apache.shardingsphere.data.pipeline.core.exception.PipelineSQLExcepti
 import 
org.apache.shardingsphere.infra.util.exception.external.sql.sqlstate.XOpenSQLState;
 
 /**
- * Split pipeline job exception.
+ * Split pipeline job by range exception.
  */
-public final class SplitPipelineJobException extends PipelineSQLException {
+public final class SplitPipelineJobByRangeException extends 
PipelineSQLException {
     
     private static final long serialVersionUID = -8509592086832334026L;
     
-    public SplitPipelineJobException(final String tableName, final String 
reason) {
-        super(XOpenSQLState.GENERAL_ERROR, 83, "Can not split range for table 
`%s`, reason: %s", tableName, reason);
+    public SplitPipelineJobByRangeException(final String tableName, final 
String reason) {
+        super(XOpenSQLState.GENERAL_ERROR, 83, "Can not split by range for 
table `%s`, reason is: %s", tableName, reason);
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobByUniqueKeyException.java
similarity index 69%
copy from 
shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
copy to 
shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobByUniqueKeyException.java
index b3147798751..dd424701843 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/PrepareJobWithInvalidConnectionException.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/exception/job/SplitPipelineJobByUniqueKeyException.java
@@ -23,13 +23,13 @@ import 
org.apache.shardingsphere.infra.util.exception.external.sql.sqlstate.XOpe
 import java.sql.SQLException;
 
 /**
- * Prepare job with invalid connection exception.
+ * Split pipeline job by unique key exception.
  */
-public final class PrepareJobWithInvalidConnectionException extends 
PipelineSQLException {
+public final class SplitPipelineJobByUniqueKeyException extends 
PipelineSQLException {
     
-    private static final long serialVersionUID = 208040912786493973L;
+    private static final long serialVersionUID = -7804078676439253443L;
     
-    public PrepareJobWithInvalidConnectionException(final SQLException cause) {
-        super(XOpenSQLState.CONNECTION_EXCEPTION, 89, "Data sources can not 
connect, reason is: %s", cause.getMessage());
+    public SplitPipelineJobByUniqueKeyException(final String tableName, final 
String uniqueKey, final SQLException cause) {
+        super(XOpenSQLState.GENERAL_ERROR, 84, "Can not split by unique key 
`%s` for table `%s`, reason is: %s", uniqueKey, tableName, cause.getMessage());
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitter.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitter.java
index 23ce1729373..0d3a57f8ada 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitter.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitter.java
@@ -34,8 +34,8 @@ import 
org.apache.shardingsphere.data.pipeline.api.metadata.LogicTableName;
 import 
org.apache.shardingsphere.data.pipeline.api.metadata.loader.PipelineTableMetaDataLoader;
 import 
org.apache.shardingsphere.data.pipeline.core.context.InventoryIncrementalJobItemContext;
 import 
org.apache.shardingsphere.data.pipeline.core.context.InventoryIncrementalProcessContext;
-import 
org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobPrepareFailedException;
-import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobException;
+import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobByUniqueKeyException;
+import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobByRangeException;
 import org.apache.shardingsphere.data.pipeline.core.execute.ExecuteEngine;
 import 
org.apache.shardingsphere.data.pipeline.core.sqlbuilder.PipelineSQLBuilderFactory;
 import org.apache.shardingsphere.data.pipeline.core.task.InventoryTask;
@@ -150,7 +150,7 @@ public final class InventoryTaskSplitter {
         if (PipelineJdbcUtils.isStringColumn(uniqueKeyDataType)) {
             return getPositionByStringPrimaryKeyRange();
         }
-        throw new SplitPipelineJobException(dumperConfig.getActualTableName(), 
"primary key is not integer or string type");
+        throw new 
SplitPipelineJobByRangeException(dumperConfig.getActualTableName(), "primary 
key is not integer or string type");
     }
     
     private Collection<IngestPosition<?>> 
getPositionByIntegerPrimaryKeyRange(final InventoryIncrementalJobItemContext 
jobItemContext, final DataSource dataSource,
@@ -187,7 +187,7 @@ public final class InventoryTaskSplitter {
                 result.add(new IntegerPrimaryKeyPosition(0, 0));
             }
         } catch (final SQLException ex) {
-            throw new PipelineJobPrepareFailedException(String.format("Split 
task for table %s by primary key %s error", dumperConfig.getActualTableName(), 
dumperConfig.getUniqueKey()), ex);
+            throw new 
SplitPipelineJobByUniqueKeyException(dumperConfig.getActualTableName(), 
dumperConfig.getUniqueKey(), ex);
         }
         return result;
     }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/util/PipelineTableMetaDataUtil.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/util/PipelineTableMetaDataUtil.java
index 4b61486c9d3..abdb15dfff4 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/util/PipelineTableMetaDataUtil.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/util/PipelineTableMetaDataUtil.java
@@ -27,7 +27,7 @@ import 
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumn
 import 
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineIndexMetaData;
 import 
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineTableMetaData;
 import 
org.apache.shardingsphere.data.pipeline.core.datasource.PipelineDataSourceFactory;
-import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobException;
+import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobByRangeException;
 import 
org.apache.shardingsphere.data.pipeline.core.metadata.loader.StandardPipelineTableMetaDataLoader;
 import 
org.apache.shardingsphere.infra.util.exception.ShardingSpherePreconditions;
 
@@ -107,20 +107,20 @@ public final class PipelineTableMetaDataUtil {
     }
     
     private static PipelineColumnMetaData 
mustGetAnAppropriateUniqueKeyColumn(final PipelineTableMetaData tableMetaData, 
final String tableName) {
-        ShardingSpherePreconditions.checkNotNull(tableMetaData, () -> new 
SplitPipelineJobException(tableName, "can not get table metadata"));
+        ShardingSpherePreconditions.checkNotNull(tableMetaData, () -> new 
SplitPipelineJobByRangeException(tableName, "can not get table metadata"));
         List<String> primaryKeys = tableMetaData.getPrimaryKeyColumns();
         if (1 == primaryKeys.size()) {
             return 
tableMetaData.getColumnMetaData(tableMetaData.getPrimaryKeyColumns().get(0));
         }
-        ShardingSpherePreconditions.checkState(primaryKeys.isEmpty(), () -> 
new SplitPipelineJobException(tableName, "primary key is union primary"));
+        ShardingSpherePreconditions.checkState(primaryKeys.isEmpty(), () -> 
new SplitPipelineJobByRangeException(tableName, "primary key is union 
primary"));
         Collection<PipelineIndexMetaData> uniqueIndexes = 
tableMetaData.getUniqueIndexes();
-        ShardingSpherePreconditions.checkState(!uniqueIndexes.isEmpty(), () -> 
new SplitPipelineJobException(tableName, "no primary key or unique index"));
+        ShardingSpherePreconditions.checkState(!uniqueIndexes.isEmpty(), () -> 
new SplitPipelineJobByRangeException(tableName, "no primary key or unique 
index"));
         if (1 == uniqueIndexes.size() && 1 == 
uniqueIndexes.iterator().next().getColumns().size()) {
             PipelineColumnMetaData column = 
uniqueIndexes.iterator().next().getColumns().get(0);
             if (!column.isNullable()) {
                 return column;
             }
         }
-        throw new SplitPipelineJobException(tableName, "table contains 
multiple unique index or unique index contains nullable/multiple column(s)");
+        throw new SplitPipelineJobByRangeException(tableName, "table contains 
multiple unique index or unique index contains nullable/multiple column(s)");
     }
 }
diff --git 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/prepare/datasource/MySQLDataSourcePreparerTest.java
 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/prepare/datasource/MySQLDataSourcePreparerTest.java
index f40b22fa679..aa15b23e0c1 100644
--- 
a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/prepare/datasource/MySQLDataSourcePreparerTest.java
+++ 
b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-mysql/src/test/java/org/apache/shardingsphere/data/pipeline/mysql/prepare/datasource/MySQLDataSourcePreparerTest.java
@@ -23,7 +23,7 @@ import 
org.apache.shardingsphere.data.pipeline.api.datasource.PipelineDataSource
 import 
org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDataSourceConfiguration;
 import 
org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDataSourceConfigurationFactory;
 import 
org.apache.shardingsphere.data.pipeline.api.datasource.config.impl.ShardingSpherePipelineDataSourceConfiguration;
-import 
org.apache.shardingsphere.data.pipeline.core.exception.job.PipelineJobPrepareFailedException;
+import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobByUniqueKeyException;
 import 
org.apache.shardingsphere.data.pipeline.core.prepare.datasource.PrepareTargetTablesParameter;
 import org.junit.Before;
 import org.junit.Ignore;
@@ -98,7 +98,7 @@ public final class MySQLDataSourcePreparerTest {
         }
     }
     
-    @Test(expected = PipelineJobPrepareFailedException.class)
+    @Test(expected = SplitPipelineJobByUniqueKeyException.class)
     public void assertThrowPrepareFailedException() throws SQLException {
         try (MockedStatic<PipelineDataSourceConfigurationFactory> 
mockedStaticPipelineDataSourceConfigurationFactory = 
mockStatic(PipelineDataSourceConfigurationFactory.class)) {
             mockedStaticPipelineDataSourceConfigurationFactory.when(() -> 
PipelineDataSourceConfigurationFactory.newInstance(eq("ShardingSphereJDBC"), 
eq("source")))
diff --git 
a/shardingsphere-kernel/shardingsphere-sql-translator/shardingsphere-sql-translator-distsql/shardingsphere-sql-translator-distsql-handler/src/test/java/org/apache/shardingsphere/sqltranslator/distsql/handler/SQLTranslatorRuleQueryResultSetTest.java
 
b/shardingsphere-kernel/shardingsphere-sql-translator/shardingsphere-sql-translator-distsql/shardingsphere-sql-translator-distsql-handler/src/test/java/org/apache/shardingsphere/sqltranslator/distsql/handler/SQLTranslatorRuleQueryRes
 [...]
index f49986aef49..52b97c58a14 100644
--- 
a/shardingsphere-kernel/shardingsphere-sql-translator/shardingsphere-sql-translator-distsql/shardingsphere-sql-translator-distsql-handler/src/test/java/org/apache/shardingsphere/sqltranslator/distsql/handler/SQLTranslatorRuleQueryResultSetTest.java
+++ 
b/shardingsphere-kernel/shardingsphere-sql-translator/shardingsphere-sql-translator-distsql/shardingsphere-sql-translator-distsql-handler/src/test/java/org/apache/shardingsphere/sqltranslator/distsql/handler/SQLTranslatorRuleQueryResultSetTest.java
@@ -27,7 +27,7 @@ import java.util.Collection;
 import java.util.Optional;
 
 import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
diff --git 
a/shardingsphere-mode/shardingsphere-mode-core/src/test/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactoryTest.java
 
b/shardingsphere-mode/shardingsphere-mode-core/src/test/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactoryTest.java
index 5036d41ac94..37102252d93 100644
--- 
a/shardingsphere-mode/shardingsphere-mode-core/src/test/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactoryTest.java
+++ 
b/shardingsphere-mode/shardingsphere-mode-core/src/test/java/org/apache/shardingsphere/mode/metadata/MetaDataContextsFactoryTest.java
@@ -54,7 +54,7 @@ import java.util.Properties;
 
 import static org.hamcrest.CoreMatchers.instanceOf;
 import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
 import static org.mockito.Mockito.any;
@@ -131,7 +131,7 @@ public final class MetaDataContextsFactoryTest {
     public void assertCreateWithJDBCInstanceMetadata() throws SQLException {
         InstanceContext instanceContext = mock(InstanceContext.class, 
RETURNS_DEEP_STUBS);
         
when(instanceContext.getInstance().getMetaData()).thenReturn(jdbcInstanceMetaData);
-        try (MetaDataContexts actual = 
MetaDataContextsFactory.create(metaDataPersistService, 
createContextManagerBuilderParameter(), instanceContext);) {
+        try (MetaDataContexts actual = 
MetaDataContextsFactory.create(metaDataPersistService, 
createContextManagerBuilderParameter(), instanceContext)) {
             
assertThat(actual.getMetaData().getGlobalRuleMetaData().getRules().size(), 
is(1));
             
assertThat(actual.getMetaData().getGlobalRuleMetaData().getRules().iterator().next(),
 instanceOf(MockedRule.class));
             
assertTrue(actual.getMetaData().getDatabases().containsKey("foo_db"));
@@ -143,7 +143,7 @@ public final class MetaDataContextsFactoryTest {
     public void assertCreateWithProxyInstanceMetadata() throws SQLException {
         
when(databaseMetaDataPersistService.loadAllDatabaseNames()).thenReturn(Collections.singletonList("foo_db"));
         
when(metaDataPersistService.getDatabaseMetaDataService()).thenReturn(databaseMetaDataPersistService);
-        try (MetaDataContexts actual = 
MetaDataContextsFactory.create(metaDataPersistService, 
createContextManagerBuilderParameter(), mock(InstanceContext.class, 
RETURNS_DEEP_STUBS));) {
+        try (MetaDataContexts actual = 
MetaDataContextsFactory.create(metaDataPersistService, 
createContextManagerBuilderParameter(), mock(InstanceContext.class, 
RETURNS_DEEP_STUBS))) {
             assertThat(actual.getPersistService(), is(metaDataPersistService));
             
assertThat(actual.getMetaData().getGlobalRuleMetaData().getRules().size(), 
is(1));
             
assertThat(actual.getMetaData().getGlobalRuleMetaData().getRules().iterator().next(),
 instanceOf(MockedRule.class));
diff --git 
a/shardingsphere-sql-parser/shardingsphere-sql-parser-statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/SQLUtilTest.java
 
b/shardingsphere-sql-parser/shardingsphere-sql-parser-statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/SQLUtilTest.java
index 9b9db90cab9..22a83e0e6e2 100644
--- 
a/shardingsphere-sql-parser/shardingsphere-sql-parser-statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/SQLUtilTest.java
+++ 
b/shardingsphere-sql-parser/shardingsphere-sql-parser-statement/src/test/java/org/apache/shardingsphere/sql/parser/sql/common/util/SQLUtilTest.java
@@ -30,8 +30,8 @@ import java.math.BigDecimal;
 import java.math.BigInteger;
 
 import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertFalse;
 
diff --git 
a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/general/MySQLMigrationGeneralIT.java
 
b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/general/MySQLMigrationGeneralIT.java
index 5f6c60c08f5..aa6c80cf62b 100644
--- 
a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/general/MySQLMigrationGeneralIT.java
+++ 
b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/general/MySQLMigrationGeneralIT.java
@@ -41,7 +41,7 @@ import java.util.List;
 import java.util.Map;
 
 import static org.hamcrest.CoreMatchers.is;
-import static org.junit.Assert.assertThat;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertTrue;
 
 /**
diff --git 
a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationIT.java
 
b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationIT.java
index e5e3e76ce6d..d1ee2aad484 100644
--- 
a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationIT.java
+++ 
b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/migration/primarykey/TextPrimaryKeyMigrationIT.java
@@ -39,8 +39,8 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.concurrent.ThreadLocalRandom;
 
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
 
 @RunWith(Parameterized.class)
 @Slf4j
diff --git 
a/shardingsphere-test/shardingsphere-parser-test/src/main/java/org/apache/shardingsphere/test/sql/parser/parameterized/asserts/statement/distsql/rql/impl/rule/ShowShardingTableRulesUsedAuditorStatementAssert.java
 
b/shardingsphere-test/shardingsphere-parser-test/src/main/java/org/apache/shardingsphere/test/sql/parser/parameterized/asserts/statement/distsql/rql/impl/rule/ShowShardingTableRulesUsedAuditorStatementAssert.java
index 4c9b90c4ba0..5acd8eebbd6 100644
--- 
a/shardingsphere-test/shardingsphere-parser-test/src/main/java/org/apache/shardingsphere/test/sql/parser/parameterized/asserts/statement/distsql/rql/impl/rule/ShowShardingTableRulesUsedAuditorStatementAssert.java
+++ 
b/shardingsphere-test/shardingsphere-parser-test/src/main/java/org/apache/shardingsphere/test/sql/parser/parameterized/asserts/statement/distsql/rql/impl/rule/ShowShardingTableRulesUsedAuditorStatementAssert.java
@@ -26,8 +26,8 @@ import 
org.apache.shardingsphere.test.sql.parser.parameterized.asserts.segment.d
 import 
org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.statement.distsql.rql.ShowShardingTableRulesUsedAuditorStatementTestCase;
 
 import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 
 /**
diff --git 
a/shardingsphere-test/shardingsphere-pipeline-test/src/test/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitterTest.java
 
b/shardingsphere-test/shardingsphere-pipeline-test/src/test/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitterTest.java
index 35efb8e40f9..9e0d6934cf7 100644
--- 
a/shardingsphere-test/shardingsphere-pipeline-test/src/test/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitterTest.java
+++ 
b/shardingsphere-test/shardingsphere-pipeline-test/src/test/java/org/apache/shardingsphere/data/pipeline/core/prepare/InventoryTaskSplitterTest.java
@@ -24,7 +24,7 @@ import 
org.apache.shardingsphere.data.pipeline.api.datasource.PipelineDataSource
 import 
org.apache.shardingsphere.data.pipeline.api.datasource.PipelineDataSourceWrapper;
 import 
org.apache.shardingsphere.data.pipeline.api.ingest.position.IntegerPrimaryKeyPosition;
 import 
org.apache.shardingsphere.data.pipeline.api.metadata.model.PipelineColumnMetaData;
-import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobException;
+import 
org.apache.shardingsphere.data.pipeline.core.exception.job.SplitPipelineJobByRangeException;
 import org.apache.shardingsphere.data.pipeline.core.task.InventoryTask;
 import 
org.apache.shardingsphere.data.pipeline.core.util.JobConfigurationBuilder;
 import org.apache.shardingsphere.data.pipeline.core.util.PipelineContextUtil;
@@ -45,8 +45,8 @@ import java.sql.Types;
 import java.util.List;
 
 import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
 
 public final class InventoryTaskSplitterTest {
     
@@ -120,7 +120,7 @@ public final class InventoryTaskSplitterTest {
         assertThat(actual.size(), is(1));
     }
     
-    @Test(expected = SplitPipelineJobException.class)
+    @Test(expected = SplitPipelineJobByRangeException.class)
     public void assertSplitInventoryDataWithIllegalKeyDataType() throws 
SQLException, NoSuchFieldException, IllegalAccessException {
         initUnionPrimaryEnvironment(taskConfig.getDumperConfig());
         InventoryDumperConfiguration dumperConfig = 
ReflectionUtil.getFieldValue(inventoryTaskSplitter, "dumperConfig", 
InventoryDumperConfiguration.class);
@@ -130,7 +130,7 @@ public final class InventoryTaskSplitterTest {
         inventoryTaskSplitter.splitInventoryData(jobItemContext);
     }
     
-    @Test(expected = SplitPipelineJobException.class)
+    @Test(expected = SplitPipelineJobByRangeException.class)
     public void assertSplitInventoryDataWithoutPrimaryAndUniqueIndex() throws 
SQLException, NoSuchFieldException, IllegalAccessException {
         initNoPrimaryEnvironment(taskConfig.getDumperConfig());
         try (PipelineDataSourceWrapper dataSource = 
dataSourceManager.getDataSource(taskConfig.getDumperConfig().getDataSourceConfig()))
 {


Reply via email to