This is an automated email from the ASF dual-hosted git repository.

jin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph.git


The following commit(s) were added to refs/heads/master by this push:
     new 6c0d596ea refact: use standard UTF-8 charset & enhance CI configs 
(#2095)
6c0d596ea is described below

commit 6c0d596eac57d5bfc80cff86cb5565818d876817
Author: imbajin <[email protected]>
AuthorDate: Tue Mar 7 23:10:31 2023 +0800

    refact: use standard UTF-8 charset & enhance CI configs (#2095)
---
 .asf.yaml                                          |  1 +
 .github/workflows/check-dependencies.yml           | 27 +++++++++++---
 .github/workflows/ci.yml                           |  4 +--
 .github/workflows/codeql-analysis.yml              |  6 ++--
 .github/workflows/licence-checker.yml              |  4 +--
 .github/workflows/stale.yml                        |  2 +-
 .../backend/store/cassandra/CassandraShard.java    |  5 +--
 .../apache/hugegraph/analyzer/AnsjAnalyzer.java    |  2 +-
 .../hugegraph/backend/serializer/BytesBuffer.java  |  6 ++--
 .../traversal/optimize/HugeScriptTraversal.java    | 16 ++++-----
 .../apache/hugegraph/type/define/Cardinality.java  |  8 ++---
 .../java/org/apache/hugegraph/util/LZ4Util.java    | 22 ++++--------
 .../org/apache/hugegraph/util/StringEncoding.java  | 41 ++++++++--------------
 .../hugegraph/tinkerpop/ProcessBasicSuite.java     | 11 +++---
 .../hugegraph/tinkerpop/StructureBasicSuite.java   |  7 ++--
 15 files changed, 72 insertions(+), 90 deletions(-)

diff --git a/.asf.yaml b/.asf.yaml
index be8f953ae..9650e2973 100644
--- a/.asf.yaml
+++ b/.asf.yaml
@@ -40,6 +40,7 @@ github:
         contexts:
           - Analyze (java)
           - CodeQL
+          - check-license
           - build (memory, 8)
           - build (memory, 11)
       required_pull_request_reviews:
diff --git a/.github/workflows/check-dependencies.yml 
b/.github/workflows/check-dependencies.yml
index e7dbe9f1b..60185492b 100644
--- a/.github/workflows/check-dependencies.yml
+++ b/.github/workflows/check-dependencies.yml
@@ -1,4 +1,4 @@
-name: third-party dependencies check
+name: "3rd-party"
 
 on:
   push:
@@ -6,26 +6,43 @@ on:
       - master
       - /^release-.*$/
   pull_request:
+  
+permissions:
+  contents: read
 
 jobs:
-  build:
+  dependency-check:
     runs-on: ubuntu-latest
     env:
       SCRIPT_DEPENDENCY: hugegraph-dist/scripts/dependency
     steps:
       - name: Checkout source
         uses: actions/checkout@v3
-      - name: Set up JDK 8
+      - name: Set up JDK 11
         uses: actions/setup-java@v3
         with:
-          java-version: '8'
+          java-version: '11'
           distribution: 'adopt'
       - name: mvn install
         run: |
-          mvn install -DskipTests=true
+          mvn install -DskipTests=true -ntp
       - name: generate current dependencies
         run: |
           bash $SCRIPT_DEPENDENCY/regenerate_known_dependencies.sh 
current-dependencies.txt
       - name: check third dependencies
         run: |
           bash $SCRIPT_DEPENDENCY/check_dependencies.sh
+
+  dependency-review:
+    runs-on: ubuntu-latest
+    steps:
+      - name: 'Checkout Repository'
+        uses: actions/checkout@v3
+      - name: 'Dependency Review'
+        uses: actions/dependency-review-action@v3
+        # Refer: https://github.com/actions/dependency-review-action
+        with:
+          fail-on-severity: low
+          # Action will fail if dependencies don't match the list
+          #allow-licenses: Apache-2.0, MIT
+          #deny-licenses: GPL-3.0, AGPL-1.0, AGPL-3.0, LGPL-2.0, CC-BY-3.0
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 9c78051f7..414fd4a3f 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -1,4 +1,4 @@
-name: hugegraph-ci
+name: "hugegraph-ci"
 
 on:
   push:
@@ -7,8 +7,6 @@ on:
       - 'release-*'
       - 'test-*'
   pull_request:
-    branches:
-      - '**'
 
 jobs:
   build:
diff --git a/.github/workflows/codeql-analysis.yml 
b/.github/workflows/codeql-analysis.yml
index 75f0c78c4..a8e40be83 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -1,10 +1,8 @@
-# You may wish to alter this file to override the set of languages analyzed,
-# or to provide custom queries or build logic.
 name: "CodeQL"
 
 on:
   push:
-    branches: [ master, release-*, v*.* ]
+    branches: [ master, release-* ]
   pull_request:
     # The branches below must be a subset of the branches above
     # branches: [ master ] # enable in all PR
@@ -33,7 +31,7 @@ jobs:
         uses: actions/setup-java@v3
         with:
           distribution: 'zulu'
-          java-version: '8'
+          java-version: '11'
 
       # Initializes the CodeQL tools for scanning.
       - name: Initialize CodeQL
diff --git a/.github/workflows/licence-checker.yml 
b/.github/workflows/licence-checker.yml
index b28d1a81b..163e59e1d 100644
--- a/.github/workflows/licence-checker.yml
+++ b/.github/workflows/licence-checker.yml
@@ -1,10 +1,10 @@
-name: License checker
+name: "License checker"
 
 on:
   push:
     branches:
       - master
-      - /^v[0-9]\..*$/
+      - 'release-*'
   pull_request:
 
 jobs:
diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
index dc8a70408..6b8e11f4b 100644
--- a/.github/workflows/stale.yml
+++ b/.github/workflows/stale.yml
@@ -1,4 +1,4 @@
-name: Mark stale issues and pull requests
+name: "Mark stale issues and pull requests"
 
 on:
   schedule:
diff --git 
a/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java
 
b/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java
index 14c9f44ac..c5734f62e 100644
--- 
a/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java
+++ 
b/hugegraph-cassandra/src/main/java/org/apache/hugegraph/backend/store/cassandra/CassandraShard.java
@@ -56,12 +56,13 @@ import com.google.common.collect.ImmutableMap;
  * CassandraShard is used for cassandra scanning operations.
  * Each shard represents a range of tokens for a node.
  * Reading data from a given shard does not cross multiple nodes.
+ * <p>
  * Refer to AbstractColumnFamilyInputFormat from:
  * <a href="https://github.com/2013Commons/hive-cassandra/";>...</a>
  */
 public class CassandraShard {
 
-    /* The minimal shard size should >= 1M to prevent too many number of 
shards */
+    /** The minimal shard size should >= 1M to prevent too many number of 
shards */
     private static final int MIN_SHARD_SIZE = (int) Bytes.MB;
 
     private CassandraSessionPool.Session session;
@@ -228,7 +229,7 @@ public class CassandraShard {
                                               tokenRange.getEnd().toString());
         Row row = resultSet.one();
 
-        long meanPartitionSize = 0L;
+        long meanPartitionSize;
         long partitionsCount = 0L;
         long splitCount = 0L;
 
diff --git 
a/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java 
b/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java
index 9c8d6c01b..78890c830 100644
--- 
a/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java
+++ 
b/hugegraph-core/src/main/java/org/apache/hugegraph/analyzer/AnsjAnalyzer.java
@@ -56,7 +56,7 @@ public class AnsjAnalyzer implements Analyzer {
 
     @Override
     public Set<String> segment(String text) {
-        Result terms = null;
+        Result terms;
         switch (this.analysis) {
             case "BaseAnalysis":
                 terms = BaseAnalysis.parse(text);
diff --git 
a/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java
 
b/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java
index 2b87e3c57..da66cc4f3 100644
--- 
a/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java
+++ 
b/hugegraph-core/src/main/java/org/apache/hugegraph/backend/serializer/BytesBuffer.java
@@ -165,11 +165,11 @@ public final class BytesBuffer extends OutputStream {
         E.checkState(this.resize, "Can't resize for wrapped buffer");
 
         // Extra capacity as buffer
-        int newcapacity = size + this.buffer.limit() + DEFAULT_CAPACITY;
-        E.checkArgument(newcapacity <= MAX_BUFFER_CAPACITY,
+        int newCapacity = size + this.buffer.limit() + DEFAULT_CAPACITY;
+        E.checkArgument(newCapacity <= MAX_BUFFER_CAPACITY,
                         "Capacity exceeds max buffer capacity: %s",
                         MAX_BUFFER_CAPACITY);
-        ByteBuffer newBuffer = ByteBuffer.allocate(newcapacity);
+        ByteBuffer newBuffer = ByteBuffer.allocate(newCapacity);
         ((Buffer) this.buffer).flip();
         newBuffer.put(this.buffer);
         this.buffer = newBuffer;
diff --git 
a/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java
 
b/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java
index a6582850d..66d40e9b2 100644
--- 
a/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java
+++ 
b/hugegraph-core/src/main/java/org/apache/hugegraph/traversal/optimize/HugeScriptTraversal.java
@@ -53,10 +53,8 @@ public final class HugeScriptTraversal<S, E> extends 
DefaultTraversal<S, E> {
 
     private Object result;
 
-    public HugeScriptTraversal(TraversalSource traversalSource,
-                               String language, String script,
-                               Map<String, Object> bindings,
-                               Map<String, String> aliases) {
+    public HugeScriptTraversal(TraversalSource traversalSource, String 
language, String script,
+                               Map<String, Object> bindings, Map<String, 
String> aliases) {
         this.graph = traversalSource.getGraph();
         this.language = language;
         this.script = script;
@@ -75,8 +73,7 @@ public final class HugeScriptTraversal<S, E> extends 
DefaultTraversal<S, E> {
 
     @Override
     public void applyStrategies() throws IllegalStateException {
-        ScriptEngine engine =
-                     SingleGremlinScriptEngineManager.get(this.language);
+        ScriptEngine engine = 
SingleGremlinScriptEngineManager.get(this.language);
 
         Bindings bindings = engine.createBindings();
         bindings.putAll(this.bindings);
@@ -94,9 +91,8 @@ public final class HugeScriptTraversal<S, E> extends 
DefaultTraversal<S, E> {
         for (Map.Entry<String, String> entry : this.aliases.entrySet()) {
             Object value = bindings.get(entry.getValue());
             if (value == null) {
-                throw new IllegalArgumentException(String.format(
-                          "Invalid aliase '%s':'%s'",
-                          entry.getKey(), entry.getValue()));
+                throw new IllegalArgumentException(String.format("Invalid 
alias '%s':'%s'",
+                                                                 
entry.getKey(), entry.getValue()));
             }
             bindings.put(entry.getKey(), value);
         }
@@ -105,7 +101,7 @@ public final class HugeScriptTraversal<S, E> extends 
DefaultTraversal<S, E> {
             Object result = engine.eval(this.script, bindings);
 
             if (result instanceof Admin) {
-                @SuppressWarnings({ "unchecked", "resource" })
+                @SuppressWarnings({ "unchecked"})
                 Admin<S, E> traversal = (Admin<S, E>) result;
                 traversal.getSideEffects().mergeInto(this.sideEffects);
                 traversal.getSteps().forEach(this::addStep);
diff --git 
a/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Cardinality.java
 
b/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Cardinality.java
index 96c8095cd..e9d6b59bf 100644
--- 
a/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Cardinality.java
+++ 
b/hugegraph-core/src/main/java/org/apache/hugegraph/type/define/Cardinality.java
@@ -39,8 +39,8 @@ public enum Cardinality implements SerialEnum {
      */
     SET(3, "set");
 
-    private byte code = 0;
-    private String name = null;
+    private final byte code;
+    private final String name;
 
     static {
         SerialEnum.register(Cardinality.class);
@@ -78,8 +78,8 @@ public enum Cardinality implements SerialEnum {
             case set:
                 return SET;
             default:
-                throw new AssertionError(String.format(
-                          "Unrecognized cardinality: '%s'", cardinality));
+                throw new AssertionError(String.format("Unrecognized 
cardinality: '%s'",
+                                                       cardinality));
         }
     }
 }
diff --git 
a/hugegraph-core/src/main/java/org/apache/hugegraph/util/LZ4Util.java 
b/hugegraph-core/src/main/java/org/apache/hugegraph/util/LZ4Util.java
index d4c68e4c5..94ea6f910 100644
--- a/hugegraph-core/src/main/java/org/apache/hugegraph/util/LZ4Util.java
+++ b/hugegraph-core/src/main/java/org/apache/hugegraph/util/LZ4Util.java
@@ -37,25 +37,20 @@ public class LZ4Util {
         return compress(bytes, blockSize, DEFAULT_BUFFER_RATIO);
     }
 
-    public static BytesBuffer compress(byte[] bytes, int blockSize,
-                                       float bufferRatio) {
+    public static BytesBuffer compress(byte[] bytes, int blockSize, float 
bufferRatio) {
         float ratio = bufferRatio <= 0.0F ? DEFAULT_BUFFER_RATIO : bufferRatio;
         LZ4Factory factory = LZ4Factory.fastestInstance();
         LZ4Compressor compressor = factory.fastCompressor();
         int initBufferSize = Math.round(bytes.length / ratio);
         BytesBuffer buf = new BytesBuffer(initBufferSize);
-        LZ4BlockOutputStream lz4Output = new LZ4BlockOutputStream(
-                                         buf, blockSize, compressor);
+        LZ4BlockOutputStream lz4Output = new LZ4BlockOutputStream(buf, 
blockSize, compressor);
         try {
             lz4Output.write(bytes);
             lz4Output.close();
         } catch (IOException e) {
             throw new BackendException("Failed to compress", e);
         }
-        /*
-         * If need to perform reading outside the method,
-         * remember to call forReadWritten()
-         */
+        // If we need to perform reading outside the method, remember to call 
forReadWritten()
         return buf;
     }
 
@@ -63,8 +58,7 @@ public class LZ4Util {
         return decompress(bytes, blockSize, DEFAULT_BUFFER_RATIO);
     }
 
-    public static BytesBuffer decompress(byte[] bytes, int blockSize,
-                                         float bufferRatio) {
+    public static BytesBuffer decompress(byte[] bytes, int blockSize, float 
bufferRatio) {
         float ratio = bufferRatio <= 0.0F ? DEFAULT_BUFFER_RATIO : bufferRatio;
         LZ4Factory factory = LZ4Factory.fastestInstance();
         LZ4FastDecompressor decompressor = factory.fastDecompressor();
@@ -72,8 +66,7 @@ public class LZ4Util {
         int initBufferSize = Math.min(Math.round(bytes.length * ratio),
                                       BytesBuffer.MAX_BUFFER_CAPACITY);
         BytesBuffer buf = new BytesBuffer(initBufferSize);
-        LZ4BlockInputStream lzInput = new LZ4BlockInputStream(bais,
-                                                              decompressor);
+        LZ4BlockInputStream lzInput = new LZ4BlockInputStream(bais, 
decompressor);
         int count;
         byte[] buffer = new byte[blockSize];
         try {
@@ -84,10 +77,7 @@ public class LZ4Util {
         } catch (IOException e) {
             throw new BackendException("Failed to decompress", e);
         }
-        /*
-         * If need to perform reading outside the method,
-         * remember to call forReadWritten()
-         */
+        // If we need to perform reading outside the method, remember to call 
forReadWritten()
         return buf;
     }
 }
diff --git 
a/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java 
b/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java
index 6d92c4bf9..16bf3207a 100644
--- a/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java
+++ b/hugegraph-core/src/main/java/org/apache/hugegraph/util/StringEncoding.java
@@ -14,16 +14,17 @@
 
 package org.apache.hugegraph.util;
 
-import java.io.UnsupportedEncodingException;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 import java.util.Base64;
 import java.util.UUID;
 
-import org.mindrot.jbcrypt.BCrypt;
-
 import org.apache.hugegraph.HugeException;
 import org.apache.hugegraph.backend.serializer.BytesBuffer;
+import org.mindrot.jbcrypt.BCrypt;
+
 import com.google.common.base.CharMatcher;
 
 /**
@@ -49,7 +50,7 @@ public final class StringEncoding {
     private static final Base64.Encoder BASE64_ENCODER = Base64.getEncoder();
     private static final Base64.Decoder BASE64_DECODER = Base64.getDecoder();
 
-    // Similar to {@link StringSerializer}
+    /** Similar to {@link StringSerializer} */
     public static int writeAsciiString(byte[] array, int offset, String value) 
{
         E.checkArgument(CharMatcher.ascii().matchesAllOf(value),
                         "'%s' must be ASCII string", value);
@@ -65,7 +66,8 @@ public final class StringEncoding {
             assert c <= 127;
             byte b = (byte) c;
             if (++i == len) {
-                b |= 0x80; // End marker
+                // End marker
+                b |= 0x80;
             }
             array[offset++] = b;
         } while (i < len);
@@ -75,7 +77,7 @@ public final class StringEncoding {
 
     public static String readAsciiString(byte[] array, int offset) {
         StringBuilder sb = new StringBuilder();
-        int c = 0;
+        int c;
         do {
             c = 0xFF & array[offset++];
             if (c != 0x80) {
@@ -92,33 +94,21 @@ public final class StringEncoding {
     }
 
     public static byte[] encode(String value) {
-        try {
-            return value.getBytes("UTF-8");
-        } catch (UnsupportedEncodingException e) {
-            throw new HugeException("Failed to encode string", e);
-        }
+        return value.getBytes(StandardCharsets.UTF_8);
     }
 
     public static String decode(byte[] bytes) {
         if (bytes.length == 0) {
             return STRING_EMPTY;
         }
-        try {
-            return new String(bytes, "UTF-8");
-        } catch (UnsupportedEncodingException e) {
-            throw new HugeException("Failed to decode string", e);
-        }
+        return new String(bytes, StandardCharsets.UTF_8);
     }
 
     public static String decode(byte[] bytes, int offset, int length) {
         if (length == 0) {
             return STRING_EMPTY;
         }
-        try {
-            return new String(bytes, offset, length, "UTF-8");
-        } catch (UnsupportedEncodingException e) {
-            throw new HugeException("Failed to decode string", e);
-        }
+        return new String(bytes, offset, length, StandardCharsets.UTF_8);
     }
 
     public static String encodeBase64(byte[] bytes) {
@@ -137,8 +127,7 @@ public final class StringEncoding {
     }
 
     public static byte[] compress(String value, float bufferRatio) {
-        BytesBuffer buf = LZ4Util.compress(encode(value), BLOCK_SIZE,
-                                           bufferRatio);
+        BytesBuffer buf = LZ4Util.compress(encode(value), BLOCK_SIZE, 
bufferRatio);
         return buf.bytes();
     }
 
@@ -155,8 +144,7 @@ public final class StringEncoding {
         return BCrypt.hashpw(password, BCrypt.gensalt(4));
     }
 
-    public static boolean checkPassword(String candidatePassword,
-                                        String dbPassword) {
+    public static boolean checkPassword(String candidatePassword, String 
dbPassword) {
         return BCrypt.checkpw(candidatePassword, dbPassword);
     }
 
@@ -177,8 +165,7 @@ public final class StringEncoding {
                 return UUID.fromString(value);
             }
             // UUID represented by hex string
-            E.checkArgument(value.length() == 32,
-                            "Invalid UUID string: %s", value);
+            E.checkArgument(value.length() == 32, "Invalid UUID string: %s", 
value);
             String high = value.substring(0, 16);
             String low = value.substring(16);
             return new UUID(Long.parseUnsignedLong(high, 16),
diff --git 
a/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/ProcessBasicSuite.java
 
b/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/ProcessBasicSuite.java
index 16b202409..3b088bc36 100644
--- 
a/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/ProcessBasicSuite.java
+++ 
b/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/ProcessBasicSuite.java
@@ -201,7 +201,7 @@ public class ProcessBasicSuite extends AbstractGremlinSuite 
{
      * A list of the minimum set of base tests that
      * Gremlin flavors should implement to be compliant with Gremlin.
      */
-    private static final Class<?>[] TESTS_TO_ENFORCE = new Class<?>[] {
+    private static final Class<?>[] TESTS_TO_ENFORCE = new Class<?>[]{
         // branch
         BranchTest.class,
         ChooseTest.class,
@@ -267,17 +267,14 @@ public class ProcessBasicSuite extends 
AbstractGremlinSuite {
     };
 
     public ProcessBasicSuite(final Class<?> klass,
-                             final RunnerBuilder builder)
-                             throws InitializationError {
+                             final RunnerBuilder builder) throws 
InitializationError {
         super(klass, builder, ALL_TESTS, TESTS_TO_ENFORCE, true,
               TraversalEngine.Type.STANDARD);
         RegisterUtil.registerBackends();
     }
 
-    public ProcessBasicSuite(final Class<?> klass,
-                             final RunnerBuilder builder,
-                             final Class<?>[] testsToExecute)
-                             throws InitializationError {
+    public ProcessBasicSuite(final Class<?> klass, final RunnerBuilder builder,
+                             final Class<?>[] testsToExecute) throws 
InitializationError {
         super(klass, builder, testsToExecute, TESTS_TO_ENFORCE, true,
               TraversalEngine.Type.STANDARD);
         RegisterUtil.registerBackends();
diff --git 
a/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/StructureBasicSuite.java
 
b/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/StructureBasicSuite.java
index a01cb6c07..4b2f0065e 100644
--- 
a/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/StructureBasicSuite.java
+++ 
b/hugegraph-test/src/main/java/org/apache/hugegraph/tinkerpop/StructureBasicSuite.java
@@ -17,7 +17,7 @@
 
 package org.apache.hugegraph.tinkerpop;
 
-import org.apache.commons.configuration.ConfigurationException;
+import org.apache.hugegraph.dist.RegisterUtil;
 import org.apache.tinkerpop.gremlin.AbstractGremlinSuite;
 import org.apache.tinkerpop.gremlin.GraphManager;
 import org.apache.tinkerpop.gremlin.GraphProvider;
@@ -54,8 +54,6 @@ import org.junit.runners.model.InitializationError;
 import org.junit.runners.model.RunnerBuilder;
 import org.junit.runners.model.Statement;
 
-import org.apache.hugegraph.dist.RegisterUtil;
-
 /**
  * Standard structure test suite for tinkerpop graph
  *
@@ -100,8 +98,7 @@ public class StructureBasicSuite extends 
AbstractGremlinSuite {
 
     public StructureBasicSuite(final Class<?> klass,
                                final RunnerBuilder builder)
-                               throws InitializationError,
-                                      ConfigurationException {
+                               throws InitializationError {
         super(klass, builder, ALL_TESTS, null, true,
               TraversalEngine.Type.STANDARD);
 

Reply via email to