This is an automated email from the ASF dual-hosted git repository.

vgalaxies pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph.git


The following commit(s) were added to refs/heads/master by this push:
     new 1a82d8311 feat(clustertest): add basic MiniCluster module for 
distributed system (#2615)
1a82d8311 is described below

commit 1a82d83117f556e5d3544560598e1770c85050f2
Author: HaoJin Yang <[email protected]>
AuthorDate: Tue Oct 15 11:05:43 2024 +0800

    feat(clustertest): add basic MiniCluster module for distributed system 
(#2615)
    
    Co-authored-by: imbajin <[email protected]>
    Co-authored-by: V_Galaxy <[email protected]>
---
 .github/workflows/cluster-test-ci.yml              |  52 ++++++
 .../hugegraph-clustertest-dist/pom.xml             |  78 ++++++++
 .../src/assembly/descriptor/assembly.xml           |  50 +++++
 .../static/conf/hugegraph.properties.template      | 126 +++++++++++++
 .../static/conf/pd-application.yml.template        |  80 ++++++++
 .../static/conf/rest-server.properties.template    |  71 +++++++
 .../static/conf/store-application.yml.template     |  64 +++++++
 .../hugegraph-clustertest-minicluster/pom.xml      |  66 +++++++
 .../apache/hugegraph/ct/base/ClusterConstant.java  | 135 ++++++++++++++
 .../java/org/apache/hugegraph/ct/base/EnvType.java |  29 +++
 .../java/org/apache/hugegraph/ct/base/EnvUtil.java |  67 +++++++
 .../org/apache/hugegraph/ct/base/HGTestLogger.java |  32 ++++
 .../apache/hugegraph/ct/config/AbstractConfig.java |  82 +++++++++
 .../apache/hugegraph/ct/config/ClusterConfig.java  | 130 +++++++++++++
 .../apache/hugegraph/ct/config/GraphConfig.java    |  38 ++++
 .../org/apache/hugegraph/ct/config/PDConfig.java   |  70 +++++++
 .../apache/hugegraph/ct/config/ServerConfig.java   |  54 ++++++
 .../apache/hugegraph/ct/config/StoreConfig.java    |  57 ++++++
 .../org/apache/hugegraph/ct/env/AbstractEnv.java   | 182 ++++++++++++++++++
 .../java/org/apache/hugegraph/ct/env/BaseEnv.java  |  49 +++++
 .../org/apache/hugegraph/ct/env/EnvFactory.java    |  46 +++++
 .../org/apache/hugegraph/ct/env/MultiNodeEnv.java  |  36 ++++
 .../org/apache/hugegraph/ct/env/SimpleEnv.java     |  30 +++
 .../hugegraph/ct/node/AbstractNodeWrapper.java     | 191 +++++++++++++++++++
 .../apache/hugegraph/ct/node/BaseNodeWrapper.java  |  39 ++++
 .../apache/hugegraph/ct/node/PDNodeWrapper.java    |  93 ++++++++++
 .../hugegraph/ct/node/ServerNodeWrapper.java       | 105 +++++++++++
 .../apache/hugegraph/ct/node/StoreNodeWrapper.java |  94 ++++++++++
 .../hugegraph-clustertest-test/pom.xml             |  98 ++++++++++
 .../MultiClusterTest/BaseMultiClusterTest.java     |  64 +++++++
 .../MultiClusterTest/MultiClusterDeployTest.java   | 203 +++++++++++++++++++++
 .../MultiClusterTest/MultiClusterFileTest.java     |  47 +++++
 .../MultiClusterTest/MultiClusterSuiteTest.java    |  33 ++++
 .../SimpleClusterTest/BaseSimpleTest.java          |  69 +++++++
 .../SimpleClusterTest/SimpleClusterDeployTest.java | 200 ++++++++++++++++++++
 .../SimpleClusterTest/SimpleClusterFileTest.java   |  48 +++++
 .../SimpleClusterTest/SimpleClusterSuiteTest.java  |  33 ++++
 hugegraph-cluster-test/pom.xml                     | 139 ++++++++++++++
 .../scripts/dependency/known-dependencies.txt      |  33 ++--
 pom.xml                                            |   2 +
 40 files changed, 3101 insertions(+), 14 deletions(-)

diff --git a/.github/workflows/cluster-test-ci.yml 
b/.github/workflows/cluster-test-ci.yml
new file mode 100644
index 000000000..7abebc722
--- /dev/null
+++ b/.github/workflows/cluster-test-ci.yml
@@ -0,0 +1,52 @@
+name: "Cluster Test CI"
+
+on:
+  push:
+    branches:
+      - master
+      - 'release-*'
+      - 'test-*'
+  pull_request:
+
+jobs:
+  cluster-test:
+    runs-on: ubuntu-latest
+    env:
+      USE_STAGE: 'true' # Whether to include the stage repository.
+
+    steps:
+      - name: Install JDK 11
+        uses: actions/setup-java@v3
+        with:
+          java-version: '11'
+          distribution: 'zulu'
+
+      - name: Cache Maven packages
+        uses: actions/cache@v3
+        with:
+          path: ~/.m2
+          key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
+          restore-keys: ${{ runner.os }}-m2
+
+      - name: Checkout
+        uses: actions/checkout@v4
+        with:
+          fetch-depth: 5
+
+      - name: use staged maven repo settings
+        if: ${{ env.USE_STAGE == 'true' }}
+        run: |
+          cp $HOME/.m2/settings.xml /tmp/settings.xml
+          mv -vf .github/configs/settings.xml $HOME/.m2/settings.xml
+
+      - name: Package
+        run: |
+          mvn clean package -U -Dmaven.javadoc.skip=true 
-Dmaven.test.skip=true -ntp
+
+      - name: Run simple cluster test
+        run: |
+          mvn test -pl hugegraph-cluster-test/hugegraph-clustertest-test -am 
-P simple-cluster-test -DskipCommonsTests=true
+
+      - name: Run multi cluster test
+        run: |
+          mvn test -pl hugegraph-cluster-test/hugegraph-clustertest-test -am 
-P multi-cluster-test -DskipCommonsTests=true
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-dist/pom.xml 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/pom.xml
new file mode 100644
index 000000000..20e3efc59
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-dist/pom.xml
@@ -0,0 +1,78 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.hugegraph</groupId>
+        <artifactId>hugegraph-cluster-test</artifactId>
+        <version>${revision}</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <artifactId>hugegraph-clustertest-dist</artifactId>
+
+    <properties>
+        <dist.dir>${project.parent.basedir}</dist.dir>
+        <shell-executable>bash</shell-executable>
+        <assembly.dir>${project.basedir}/src/assembly</assembly.dir>
+        
<assembly.descriptor.dir>${assembly.dir}/descriptor</assembly.descriptor.dir>
+        <assembly.static.dir>${assembly.dir}/static</assembly.static.dir>
+        <executable.jar.name>hg-ct</executable.jar.name>
+    </properties>
+
+    <build>
+        <plugins>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4</version>
+                <executions>
+                    <execution>
+                        <id>assembly-hugegraph-ct</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                        <configuration>
+                            <attach>false</attach>
+                            <appendAssemblyId>false</appendAssemblyId>
+                            <outputDirectory>${dist.dir}</outputDirectory>
+                            <descriptors>
+                                <descriptor>
+                                    ${assembly.descriptor.dir}/assembly.xml
+                                </descriptor>
+                            </descriptors>
+                            <finalName>${final.name}</finalName>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hugegraph</groupId>
+            <artifactId>hugegraph-clustertest-minicluster</artifactId>
+            <version>${revision}</version>
+        </dependency>
+    </dependencies>
+
+</project>
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/descriptor/assembly.xml
 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/descriptor/assembly.xml
new file mode 100644
index 000000000..3db49f426
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/descriptor/assembly.xml
@@ -0,0 +1,50 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<assembly>
+    <id>distribution</id>
+    <includeBaseDirectory>false</includeBaseDirectory>
+
+    <formats>
+        <format>dir</format>
+    </formats>
+
+    <fileSets>
+        <fileSet>
+            <directory>${assembly.static.dir}</directory>
+            <outputDirectory>/</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+
+    </fileSets>
+
+    <dependencySets>
+        <!-- code jars -->
+        <dependencySet>
+            <outputDirectory>/lib</outputDirectory>
+            <unpack>false</unpack>
+            <scope>runtime</scope>
+            <useProjectArtifact>false</useProjectArtifact>
+            <includes>
+                
<include>org.apache.hugegraph:${executable.jar.name}:jar:*</include>
+            </includes>
+        </dependencySet>
+    </dependencySets>
+
+</assembly>
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/hugegraph.properties.template
 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/hugegraph.properties.template
new file mode 100644
index 000000000..8eaf0adff
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/hugegraph.properties.template
@@ -0,0 +1,126 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# gremlin entrance to create graph
+# auth config: org.apache.hugegraph.auth.HugeFactoryAuthProxy
+gremlin.graph=org.apache.hugegraph.HugeFactory
+
+# cache config
+#schema.cache_capacity=100000
+# vertex-cache default is 1000w, 10min expired
+vertex.cache_type=l2
+#vertex.cache_capacity=10000000
+#vertex.cache_expire=600
+# edge-cache default is 100w, 10min expired
+edge.cache_type=l2
+#edge.cache_capacity=1000000
+#edge.cache_expire=600
+
+
+# schema illegal name template
+#schema.illegal_name_regex=\s+|~.*
+
+#vertex.default_label=vertex
+
+backend=hstore
+serializer=binary
+
+store=hugegraph
+
+# pd config
+pd.peers=$PD_PEERS_LIST$
+
+# task config
+task.scheduler_type=local
+task.schedule_period=10
+task.retry=0
+task.wait_timeout=10
+
+# raft config
+raft.mode=false
+raft.path=./raft-log
+raft.safe_read=true
+raft.use_replicator_pipeline=true
+raft.election_timeout=10000
+raft.snapshot_interval=3600
+raft.backend_threads=48
+raft.read_index_threads=8
+raft.snapshot_threads=4
+raft.snapshot_parallel_compress=false
+raft.snapshot_compress_threads=4
+raft.snapshot_decompress_threads=4
+raft.read_strategy=ReadOnlyLeaseBased
+raft.queue_size=16384
+raft.queue_publish_timeout=60
+raft.apply_batch=1
+raft.rpc_threads=80
+raft.rpc_connect_timeout=5000
+raft.rpc_timeout=60
+raft.install_snapshot_rpc_timeout=36000
+
+# search config
+search.text_analyzer=jieba
+search.text_analyzer_mode=INDEX
+
+# rocksdb backend config
+#rocksdb.data_path=/path/to/disk
+#rocksdb.wal_path=/path/to/disk
+
+
+# cassandra backend config
+cassandra.host=localhost
+cassandra.port=9042
+cassandra.username=
+cassandra.password=
+#cassandra.connect_timeout=5
+#cassandra.read_timeout=20
+#cassandra.keyspace.strategy=SimpleStrategy
+#cassandra.keyspace.replication=3
+
+# hbase backend config
+#hbase.hosts=localhost
+#hbase.port=2181
+#hbase.znode_parent=/hbase
+#hbase.threads_max=64
+# IMPORTANT: recommend to modify the HBase partition number
+#            by the actual/env data amount & RS amount before init store
+#            It will influence the load speed a lot
+#hbase.enable_partition=true
+#hbase.vertex_partitions=10
+#hbase.edge_partitions=30
+
+# mysql backend config
+#jdbc.driver=com.mysql.jdbc.Driver
+#jdbc.url=jdbc:mysql://127.0.0.1:3306
+#jdbc.username=root
+#jdbc.password=
+#jdbc.reconnect_max_times=3
+#jdbc.reconnect_interval=3
+#jdbc.ssl_mode=false
+
+# postgresql & cockroachdb backend config
+#jdbc.driver=org.postgresql.Driver
+#jdbc.url=jdbc:postgresql://localhost:5432/
+#jdbc.username=postgres
+#jdbc.password=
+#jdbc.postgresql.connect_database=template1
+
+# palo backend config
+#palo.host=127.0.0.1
+#palo.poll_interval=10
+#palo.temp_dir=./palo-data
+#palo.file_limit_size=32
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/pd-application.yml.template
 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/pd-application.yml.template
new file mode 100644
index 000000000..87229aabc
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/pd-application.yml.template
@@ -0,0 +1,80 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+spring:
+  application:
+    name: hugegraph-pd
+
+management:
+  metrics:
+    export:
+      prometheus:
+        enabled: true
+  endpoints:
+    web:
+      exposure:
+        include: "*"
+
+logging:
+  config: 'file:./conf/log4j2.xml'
+license:
+  verify-path: ./conf/verify-license.json
+  license-path: ./conf/hugegraph.license
+grpc:
+  port: $GRPC_PORT$
+  # The service address of grpc needs to be changed to the actual local IPv4 
address when deploying.
+  host: 127.0.0.1
+
+server:
+  # REST service port number
+  port : $REST_PORT$
+
+pd:
+  # Storage path
+  data-path: ./pd_data
+  # The check cycle of automatic expansion regularly checks the number of 
partitions in each store and automatically balances the number of partitions
+  patrol-interval: 1800
+  # The minimum number of surviving store nodes, less than which the entire 
cluster is unavailable
+  initial-store-count: $STORE_COUNT$
+  # The initial store list, grpc IP: grpc port, the store in the list is 
automatically activated
+  initial-store-list: $STORE_GRPC_LIST$
+
+
+raft:
+  # The address of the local raft service
+  address: $RAFT_ADDRESS$
+  # The service address of the PD cluster
+  peers-list: $RAFT_PEERS_LIST$
+
+store:
+  # The time when the store went offline. After that time, the store is 
considered permanently unavailable, and the replica is allocated to another 
machine, in seconds
+  max-down-time: 172800
+  # Specifies whether to enable store monitoring data storage
+  monitor_data_enabled: true
+  # The interval between monitoring data, minute, hour, second
+  # default: 1 min * 1 day = 1440
+  monitor_data_interval: 1 minute
+  # Retention time of monitoring data is 1 day; day, month, year
+  monitor_data_retention: 1 day
+  initial-store-count: 1
+
+partition:
+  # Default number of replicas per partition
+  default-shard-count: 1
+  # The default maximum number of replicas per machine
+  # the initial number of partitions= store-max-shard-count * store-number / 
default-shard-count
+  store-max-shard-count: 12
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/rest-server.properties.template
 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/rest-server.properties.template
new file mode 100644
index 000000000..8f4e9bf61
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/rest-server.properties.template
@@ -0,0 +1,71 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# bind url
+# could use '0.0.0.0' or specified (real)IP to expose external network access
+restserver.url=http://$REST_SERVER_ADDRESS$
+# gremlin server url, need to be consistent with host and port in 
gremlin-server.yaml
+#gremlinserver.url=http://$REST_SERVER_ADDRESS$
+
+graphs=./conf/graphs
+
+# The maximum thread ratio for batch writing, only take effect if the 
batch.max_write_threads is 0
+batch.max_write_ratio=80
+batch.max_write_threads=0
+
+# configuration of arthas
+arthas.telnet_port=8562
+arthas.http_port=8561
+arthas.ip=127.0.0.1
+arthas.disabled_commands=jad
+
+# authentication configs
+# choose 'org.apache.hugegraph.auth.StandardAuthenticator' or
+# 'org.apache.hugegraph.auth.ConfigAuthenticator'
+#auth.authenticator=
+
+# for StandardAuthenticator mode
+#auth.graph_store=hugegraph
+# auth client config
+#auth.remote_url=127.0.0.1:8899,127.0.0.1:8898,127.0.0.1:8897
+
+# for ConfigAuthenticator mode
+#auth.admin_token=
+#auth.user_tokens=[]
+
+# rpc server configs for multi graph-servers or raft-servers
+rpc.server_host=127.0.0.1
+rpc.server_port=$RPC_PORT$
+#rpc.server_timeout=30
+
+# rpc client configs (like enable to keep cache consistency)
+#rpc.remote_url=127.0.0.1:8091,127.0.0.1:8092,127.0.0.1:8093
+#rpc.client_connect_timeout=20
+#rpc.client_reconnect_period=10
+#rpc.client_read_timeout=40
+#rpc.client_retries=3
+#rpc.client_load_balancer=consistentHash
+
+# raft group initial peers
+#raft.group_peers=127.0.0.1:8091,127.0.0.1:8092,127.0.0.1:8093
+
+# lightweight load balancing (beta)
+server.id=$SERVER_ID$
+server.role=$ROLE$
+
+# slow query log
+log.slow_query_threshold=1000
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/store-application.yml.template
 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/store-application.yml.template
new file mode 100644
index 000000000..93ceb7638
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-dist/src/assembly/static/conf/store-application.yml.template
@@ -0,0 +1,64 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+pdserver:
+  # PD service address, multiple PD addresses separated by commas
+  address: $PD_SERVER_ADDRESS$
+
+management:
+  metrics:
+    export:
+      prometheus:
+        enabled: true
+  endpoints:
+    web:
+      exposure:
+        include: "*"
+
+grpc:
+  # grpc service address
+  host: 127.0.0.1
+  port: $GRPC_PORT$
+  netty-server:
+    max-inbound-message-size: 1000MB
+raft:
+  # raft cache queue size
+  disruptorBufferSize: 1024
+  address: $RAFT_ADDRESS$
+  max-log-file-size: 600000000000
+  # Snapshot generation interval, in seconds
+  snapshotInterval: 1800
+server:
+  # rest service address
+  port: $REST_PORT$
+
+app:
+  # Storage path, support multiple paths, separated by commas
+  data-path: ./storage
+  #raft-path: ./storage
+
+spring:
+  application:
+    name: store-node-grpc-server
+  profiles:
+    active: default
+    include: pd
+
+logging:
+  config: 'file:./conf/log4j2.xml'
+  level:
+    root: info
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/pom.xml 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/pom.xml
new file mode 100644
index 000000000..8feb6181f
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/pom.xml
@@ -0,0 +1,66 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>hugegraph-clustertest-minicluster</artifactId>
+
+    <parent>
+        <groupId>org.apache.hugegraph</groupId>
+        <artifactId>hugegraph-cluster-test</artifactId>
+        <version>${revision}</version>
+    </parent>
+
+
+    <properties>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <log4j2.version>2.17.0</log4j2.version>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+            <version>3.13.0</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>2.12.0</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+            <version>2.0.9</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <version>1.18.24</version>
+            <scope>compile</scope>
+        </dependency>
+    </dependencies>
+
+</project>
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/ClusterConstant.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/ClusterConstant.java
new file mode 100644
index 000000000..9120c0cf9
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/ClusterConstant.java
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+import java.io.File;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Objects;
+
+import org.apache.commons.lang3.SystemUtils;
+
+public class ClusterConstant {
+
+    public static final String LOG = "logs";
+    public static final String PROJECT_DIR = getProjectDir();
+    public static final String LIB_DIR = "lib";
+    public static final String EXT_DIR = "ext";
+    public static final String PLUGINS_DIR = "plugins";
+    public static final String BIN_DIR = "bin";
+    public static final String CONF_DIR = "conf";
+    public static final String PD_PACKAGE_PREFIX = 
"apache-hugegraph-pd-incubating";
+    public static final String PD_JAR_PREFIX = "hg-pd-service";
+    public static final String STORE_PACKAGE_PREFIX = 
"apache-hugegraph-store-incubating";
+    public static final String STORE_JAR_PREFIX = "hg-store-node";
+    public static final String SERVER_PACKAGE_PREFIX = 
"apache-hugegraph-server-incubating";
+    public static final String CT_PACKAGE_PREFIX = 
"apache-hugegraph-ct-incubating";
+    public static final String APPLICATION_FILE = "application.yml";
+    public static final String SERVER_PROPERTIES = "rest-server.properties";
+    public static final String HUGEGRAPH_PROPERTIES = 
"graphs/hugegraph.properties";
+    public static final String LOG4J_FILE = "log4j2.xml";
+    public static final String PD_TEMPLATE_FILE = 
"pd-application.yml.template";
+    public static final String STORE_TEMPLATE_FILE = 
"store-application.yml.template";
+    public static final String SERVER_TEMPLATE_FILE = 
"rest-server.properties.template";
+    public static final String GRAPH_TEMPLATE_FILE = 
"hugegraph.properties.template";
+    public static final String GREMLIN_DRIVER_SETTING_FILE = 
"gremlin-driver-settings.yaml";
+    public static final String GREMLIN_SERVER_FILE = "gremlin-server.yaml";
+    public static final String REMOTE_SETTING_FILE = "remote.yaml";
+    public static final String REMOTE_OBJECTS_SETTING_FILE = 
"remote-objects.yaml";
+    public static final String EMPTY_SAMPLE_GROOVY_FILE = 
"scripts/empty-sample.groovy";
+    public static final String EXAMPLE_GROOVY_FILE = "scripts/example.groovy";
+    public static final String LOCALHOST = "127.0.0.1";
+
+    public static final String JAVA_CMD =
+            System.getProperty("java.home") + File.separator + BIN_DIR + 
File.separator +
+            (SystemUtils.IS_OS_WINDOWS ? "java.exe" : "java");
+    public static final String PD_DIST_PATH =
+            PROJECT_DIR + File.separator + "hugegraph-pd" + File.separator;
+    public static final String PD_LIB_PATH =
+            getFileInDir(PD_DIST_PATH, PD_PACKAGE_PREFIX) + File.separator + 
LIB_DIR +
+            File.separator;
+    public static final String PD_TEMPLATE_PATH =
+            getFileInDir(PD_DIST_PATH, PD_PACKAGE_PREFIX) + File.separator + 
CONF_DIR +
+            File.separator;
+    public static final String STORE_DIST_PATH =
+            PROJECT_DIR + File.separator + "hugegraph-store" + File.separator;
+    public static final String STORE_LIB_PATH =
+            getFileInDir(STORE_DIST_PATH, STORE_PACKAGE_PREFIX) + 
File.separator + LIB_DIR +
+            File.separator;
+    public static final String STORE_TEMPLATE_PATH =
+            getFileInDir(STORE_DIST_PATH, STORE_PACKAGE_PREFIX) + 
File.separator + CONF_DIR +
+            File.separator;
+    public static final String SERVER_DIST_PATH =
+            PROJECT_DIR + File.separator + "hugegraph-server" + File.separator;
+    public static final String SERVER_LIB_PATH =
+            getFileInDir(SERVER_DIST_PATH, SERVER_PACKAGE_PREFIX) +
+            File.separator;
+    public static final String SERVER_PACKAGE_PATH =
+            getFileInDir(SERVER_DIST_PATH, SERVER_PACKAGE_PREFIX) +
+            File.separator;
+    public static final String SERVER_TEMPLATE_PATH =
+            SERVER_PACKAGE_PATH + CONF_DIR + File.separator;
+    public static final String CT_DIST_PATH =
+            PROJECT_DIR + File.separator + "hugegraph-cluster-test" + 
File.separator;
+    public static final String CT_PACKAGE_PATH =
+            getFileInDir(CT_DIST_PATH, CT_PACKAGE_PREFIX) + File.separator;
+    public static final String CONFIG_FILE_PATH = CT_PACKAGE_PATH + CONF_DIR + 
File.separator;
+
+    private ClusterConstant() {
+        throw new IllegalStateException("Utility class");
+    }
+
+    public static String getFileInDir(String path, String fileName) {
+        File dir = new File(path);
+        if (dir.exists() && dir.isDirectory()) {
+            for (File file : Objects.requireNonNull(dir.listFiles())) {
+                if (file.getName().startsWith(fileName) && 
!file.getName().endsWith(".gz")) {
+                    return path + file.getName();
+                }
+            }
+        }
+        return "";
+    }
+
+    public static boolean isJava11OrHigher() {
+        String version = System.getProperty("java.version");
+        if (version.startsWith("1.")) {
+            version = version.substring(2, 3);
+        } else {
+            int dot = version.indexOf(".");
+            if (dot != -1) {
+                version = version.substring(0, dot);
+            }
+        }
+        int versionNumber = Integer.parseInt(version);
+        return versionNumber >= 11;
+    }
+
+    public static String getProjectDir() {
+        String userDir = System.getProperty("user.dir"); // get current dir
+        Path path = Paths.get(userDir);
+
+        if (userDir.endsWith("hugegraph-cluster-test")) {
+            return path.getParent().toString();
+        } else if (userDir.endsWith("hugegraph-clustertest-test")) {
+            return path.getParent().getParent().toString();
+        }
+
+        return userDir; // Return current dir if not matched
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvType.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvType.java
new file mode 100644
index 000000000..56449a42b
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvType.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+public enum EnvType {
+
+    SingleNode,
+    MultiNode;
+
+    public static EnvType getSystemEnvType() {
+        String envType = System.getProperty("test_env", SingleNode.toString());
+        return EnvType.valueOf(envType);
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvUtil.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvUtil.java
new file mode 100644
index 000000000..4d4bab383
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/EnvUtil.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.StandardCopyOption;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.slf4j.Logger;
+
+public class EnvUtil {
+
+    private static final Logger LOG = HGTestLogger.UTIL_LOG;
+    private static final Set<Integer> ports = new HashSet<>();
+
+    public static int getAvailablePort() {
+        try {
+            int port = -1;
+            while (port < 0 || ports.contains(port)) {
+                ServerSocket socket = new ServerSocket(0);
+                port = socket.getLocalPort();
+                socket.close();
+            }
+            ports.add(port);
+            return port;
+        } catch (IOException e) {
+            LOG.error("Failed to get available ports", e);
+            return -1;
+        }
+    }
+
+    public static void copyFileToDestination(Path source, Path destination) {
+        try {
+            ensureParentDirectoryExists(destination);
+            Files.copy(source, destination, 
StandardCopyOption.REPLACE_EXISTING);
+        } catch (IOException ioException) {
+            LOG.error("Failed to copy files to destination dir", ioException);
+            throw new RuntimeException(ioException);
+        }
+    }
+
+    private static void ensureParentDirectoryExists(Path destination) throws 
IOException {
+        Path parentDir = destination.getParent();
+        if (parentDir != null && Files.notExists(parentDir)) {
+            Files.createDirectories(parentDir);
+        }
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/HGTestLogger.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/HGTestLogger.java
new file mode 100644
index 000000000..ceef1e40b
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/base/HGTestLogger.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.base;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class HGTestLogger {
+
+    public static Logger UTIL_LOG = 
LoggerFactory.getLogger(HGTestLogger.class);
+    public static Logger ENV_LOG = LoggerFactory.getLogger(HGTestLogger.class);
+    public static Logger CONFIG_LOG = 
LoggerFactory.getLogger(HGTestLogger.class);
+    public static Logger NODE_LOG = 
LoggerFactory.getLogger(HGTestLogger.class);
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java
new file mode 100644
index 000000000..36a7240d2
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public abstract class AbstractConfig {
+
+    protected static final Logger LOG = HGTestLogger.CONFIG_LOG;
+    protected String config;
+    protected Map<String, String> properties = new HashMap<>();
+    protected String fileName;
+
+    protected void readTemplate(Path filePath) {
+        try {
+            this.config = new String(Files.readAllBytes(filePath));
+        } catch (IOException e) {
+            LOG.error("failed to get file", e);
+        }
+    }
+
+    protected void updateConfigs() {
+        for (Map.Entry<String, String> entry : properties.entrySet()) {
+            String placeholder = "$" + entry.getKey() + "$";
+            this.config = this.config.replace(placeholder, entry.getValue());
+        }
+    }
+
+    public void writeConfig(String filePath) {
+        updateConfigs();
+        Path destPath = Paths.get(filePath + File.separator + this.fileName);
+        try {
+            if (Files.notExists(destPath.getParent())) {
+                Files.createDirectories(destPath.getParent());
+            }
+        } catch (IOException e) {
+            LOG.error("Failed to create dir", e);
+        }
+        try (FileWriter writer = new FileWriter(String.valueOf(destPath))) {
+            writer.write(this.config);
+        } catch (IOException e) {
+            LOG.error("Failed to write in file", e);
+        }
+    }
+
+    public String getProperty(String propertyName) {
+        return properties.get(propertyName);
+    }
+
+    protected void setProperty(String propertyName, String value) {
+        if (properties.containsKey(propertyName)) {
+            properties.replace(propertyName, value);
+        } else {
+            properties.put(propertyName, value);
+        }
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ClusterConfig.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ClusterConfig.java
new file mode 100644
index 000000000..c71e4b07e
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ClusterConfig.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public class ClusterConfig {
+
+    protected static final Logger LOG = HGTestLogger.CONFIG_LOG;
+    protected List<PDConfig> pdConfigs;
+    protected List<StoreConfig> storeConfigs;
+    protected List<ServerConfig> serverConfigs;
+    protected List<GraphConfig> graphConfigs;
+
+    protected List<String> pdGrpcList, pdRaftList, storeGrpcList;
+
+    public ClusterConfig(int pdCnt, int storeCnt, int serverCnt) {
+        pdConfigs = new ArrayList<>();
+        storeConfigs = new ArrayList<>();
+        serverConfigs = new ArrayList<>();
+        graphConfigs = new ArrayList<>();
+        pdGrpcList = new ArrayList<>();
+        pdRaftList = new ArrayList<>();
+        storeGrpcList = new ArrayList<>();
+
+        for (int i = 0; i < pdCnt; i++) {
+            PDConfig pdConfig = new PDConfig();
+            pdConfig.setStoreCount(storeCnt);
+            pdConfigs.add(pdConfig);
+            pdGrpcList.add(pdConfig.getGrpcAddress());
+            pdRaftList.add(pdConfig.getRaftAddress());
+        }
+
+        for (int i = 0; i < storeCnt; i++) {
+            StoreConfig storeConfig = new StoreConfig();
+            storeConfig.setPDServerList(pdGrpcList);
+            storeConfigs.add(storeConfig);
+            storeGrpcList.add(storeConfig.getGrpcAddress());
+        }
+
+        for (int i = 0; i < serverCnt; i++) {
+            ServerConfig serverConfig = new ServerConfig();
+            serverConfigs.add(serverConfig);
+            GraphConfig graphConfig = new GraphConfig();
+            graphConfig.setPDPeersList(pdGrpcList);
+            graphConfigs.add(graphConfig);
+        }
+
+        for (int i = 0; i < pdCnt; i++) {
+            PDConfig pdConfig = pdConfigs.get(i);
+            pdConfig.setRaftPeerList(pdRaftList);
+            pdConfig.setStoreGrpcList(storeGrpcList);
+        }
+    }
+
+    public PDConfig getPDConfig(int i) {
+        return pdConfigs.get(i);
+    }
+
+    public StoreConfig getStoreConfig(int i) {
+        return storeConfigs.get(i);
+    }
+
+    public ServerConfig getServerConfig(int i) {
+        return serverConfigs.get(i);
+    }
+
+    public GraphConfig getGraphConfig(int i) {
+        return graphConfigs.get(i);
+    }
+
+    public List<String> getPDRestAddrs() {
+        List<String> addrs = new ArrayList<>();
+        for (PDConfig pdConfig : pdConfigs) {
+            addrs.add(pdConfig.getRaftAddress());
+        }
+        return addrs;
+    }
+
+    public List<String> getPDGrpcAddrs() {
+        List<String> addrs = new ArrayList<>();
+        for (PDConfig pdConfig : pdConfigs) {
+            addrs.add(pdConfig.getGrpcAddress());
+        }
+        return addrs;
+    }
+
+    public List<String> getStoreRestAddrs() {
+        List<String> addrs = new ArrayList<>();
+        for (StoreConfig storeConfig : storeConfigs) {
+            addrs.add("127.0.0.1" + ":" + storeConfig.getRestPort());
+        }
+        return addrs;
+    }
+
+    public List<String> getStoreGrpcAddrs() {
+        List<String> addrs = new ArrayList<>();
+        for (StoreConfig storeConfig : storeConfigs) {
+            addrs.add("127.0.0.1" + ":" + storeConfig.getGrpcPort());
+        }
+        return addrs;
+    }
+
+    public List<String> getServerRestAddrs() {
+        List<String> addrs = new ArrayList<>();
+        for (ServerConfig serverConfig : serverConfigs) {
+            addrs.add("127.0.0.1" + ":" + serverConfig.getRestPort());
+        }
+        return addrs;
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java
new file mode 100644
index 000000000..a6b425d51
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.GRAPH_TEMPLATE_FILE;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.HUGEGRAPH_PROPERTIES;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+public class GraphConfig extends AbstractConfig {
+
+    public GraphConfig() {
+        readTemplate(Paths.get(CONFIG_FILE_PATH + GRAPH_TEMPLATE_FILE));
+        this.fileName = HUGEGRAPH_PROPERTIES;
+    }
+
+    public void setPDPeersList(List<String> pdPeersList) {
+        String pdPeers = String.join(",", pdPeersList);
+        setProperty("PD_PEERS_LIST", pdPeers);
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java
new file mode 100644
index 000000000..d53e45d57
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.APPLICATION_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOCALHOST;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+import lombok.Getter;
+
+@Getter
+public class PDConfig extends AbstractConfig {
+
+    private final int raftPort;
+    private final int grpcPort;
+    private final int restPort;
+
+    public PDConfig() {
+        readTemplate(Paths.get(CONFIG_FILE_PATH + PD_TEMPLATE_FILE));
+        this.fileName = APPLICATION_FILE;
+        this.raftPort = getAvailablePort();
+        this.grpcPort = getAvailablePort();
+        this.restPort = getAvailablePort();
+        properties.put("GRPC_PORT", String.valueOf(this.grpcPort));
+        properties.put("REST_PORT", String.valueOf(this.restPort));
+        properties.put("RAFT_ADDRESS", LOCALHOST + ":" + this.raftPort);
+    }
+
+    public void setRaftPeerList(List<String> raftPeerList) {
+        String raftPeers = String.join(",", raftPeerList);
+        setProperty("RAFT_PEERS_LIST", raftPeers);
+    }
+
+    public void setStoreCount(int storeCount) {
+        setProperty("STORE_COUNT", String.valueOf(storeCount));
+    }
+
+    public void setStoreGrpcList(List<String> storeGrpcList) {
+        String storeGrpcLists = String.join(",", storeGrpcList);
+        setProperty("STORE_GRPC_LIST", storeGrpcLists);
+    }
+
+    public String getRaftAddress() {
+        return LOCALHOST + ":" + this.raftPort;
+    }
+
+    public String getGrpcAddress() {
+        return LOCALHOST + ":" + this.grpcPort;
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ServerConfig.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ServerConfig.java
new file mode 100644
index 000000000..569a11ddd
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/ServerConfig.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOCALHOST;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_PROPERTIES;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.SERVER_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.nio.file.Paths;
+
+import lombok.Getter;
+
+@Getter
+public class ServerConfig extends AbstractConfig {
+
+    private final int rpcPort;
+    private final int restPort;
+
+    public ServerConfig() {
+        readTemplate(Paths.get(CONFIG_FILE_PATH + SERVER_TEMPLATE_FILE));
+        this.fileName = SERVER_PROPERTIES;
+        this.rpcPort = getAvailablePort();
+        this.restPort = getAvailablePort();
+        properties.put("REST_SERVER_ADDRESS", LOCALHOST + ":" + this.restPort);
+        properties.put("RPC_PORT", String.valueOf(this.rpcPort));
+    }
+
+    public void setServerID(String serverID) {
+        setProperty("SERVER_ID", serverID);
+    }
+
+    public void setRole(String role) {
+        setProperty("ROLE", role);
+    }
+}
+
+
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/StoreConfig.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/StoreConfig.java
new file mode 100644
index 000000000..50495f18a
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/StoreConfig.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.APPLICATION_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONFIG_FILE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOCALHOST;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.nio.file.Paths;
+import java.util.List;
+
+import lombok.Getter;
+
+@Getter
+public class StoreConfig extends AbstractConfig {
+
+    private final int raftPort;
+    private final int grpcPort;
+    private final int restPort;
+
+    public StoreConfig() {
+        readTemplate(Paths.get(CONFIG_FILE_PATH + STORE_TEMPLATE_FILE));
+        this.fileName = APPLICATION_FILE;
+        this.raftPort = getAvailablePort();
+        this.grpcPort = getAvailablePort();
+        this.restPort = getAvailablePort();
+        properties.put("GRPC_PORT", String.valueOf(this.grpcPort));
+        properties.put("REST_PORT", String.valueOf(this.restPort));
+        properties.put("RAFT_ADDRESS", LOCALHOST + ":" + this.raftPort);
+    }
+
+    public void setPDServerList(List<String> pdServerList) {
+        String pdServers = String.join(",", pdServerList);
+        setProperty("PD_SERVER_ADDRESS", pdServers);
+    }
+
+    public String getGrpcAddress() {
+        return LOCALHOST + ":" + this.grpcPort;
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java
new file mode 100644
index 000000000..0c2486092
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java
@@ -0,0 +1,182 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.apache.hugegraph.ct.config.ClusterConfig;
+import org.apache.hugegraph.ct.config.GraphConfig;
+import org.apache.hugegraph.ct.config.PDConfig;
+import org.apache.hugegraph.ct.config.ServerConfig;
+import org.apache.hugegraph.ct.config.StoreConfig;
+import org.apache.hugegraph.ct.node.PDNodeWrapper;
+import org.apache.hugegraph.ct.node.ServerNodeWrapper;
+import org.apache.hugegraph.ct.node.StoreNodeWrapper;
+import org.slf4j.Logger;
+
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public abstract class AbstractEnv implements BaseEnv {
+
+    private static final Logger LOG = HGTestLogger.ENV_LOG;
+
+    protected ClusterConfig clusterConfig;
+    protected List<PDNodeWrapper> pdNodeWrappers;
+    protected List<ServerNodeWrapper> serverNodeWrappers;
+    protected List<StoreNodeWrapper> storeNodeWrappers;
+    @Setter
+    protected int cluster_id = 0;
+
+    protected AbstractEnv() {
+        this.pdNodeWrappers = new ArrayList<>();
+        this.serverNodeWrappers = new ArrayList<>();
+        this.storeNodeWrappers = new ArrayList<>();
+    }
+
+    protected void init(int pdCnt, int storeCnt, int serverCnt) {
+        this.clusterConfig = new ClusterConfig(pdCnt, storeCnt, serverCnt);
+        for (int i = 0; i < pdCnt; i++) {
+            PDNodeWrapper pdNodeWrapper = new PDNodeWrapper(cluster_id, i);
+            PDConfig pdConfig = clusterConfig.getPDConfig(i);
+            pdNodeWrappers.add(pdNodeWrapper);
+            pdConfig.writeConfig(pdNodeWrapper.getNodePath() + CONF_DIR);
+        }
+
+        for (int i = 0; i < storeCnt; i++) {
+            StoreNodeWrapper storeNodeWrapper = new 
StoreNodeWrapper(cluster_id, i);
+            StoreConfig storeConfig = clusterConfig.getStoreConfig(i);
+            storeNodeWrappers.add(storeNodeWrapper);
+            storeConfig.writeConfig(storeNodeWrapper.getNodePath() + CONF_DIR);
+        }
+
+        for (int i = 0; i < serverCnt; i++) {
+            ServerNodeWrapper serverNodeWrapper = new 
ServerNodeWrapper(cluster_id, i);
+            serverNodeWrappers.add(serverNodeWrapper);
+            ServerConfig serverConfig = clusterConfig.getServerConfig(i);
+            serverConfig.setServerID(serverNodeWrapper.getID());
+            GraphConfig graphConfig = clusterConfig.getGraphConfig(i);
+            if (i == 0) {
+                serverConfig.setRole("master");
+            } else {
+                serverConfig.setRole("worker");
+            }
+            serverConfig.writeConfig(serverNodeWrapper.getNodePath() + 
CONF_DIR);
+            graphConfig.writeConfig(serverNodeWrapper.getNodePath() + 
CONF_DIR);
+        }
+    }
+
+    public void startCluster() {
+        for (PDNodeWrapper pdNodeWrapper : pdNodeWrappers) {
+            pdNodeWrapper.start();
+            while (!pdNodeWrapper.isStarted()) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+        for (StoreNodeWrapper storeNodeWrapper : storeNodeWrappers) {
+            storeNodeWrapper.start();
+            while (!storeNodeWrapper.isStarted()) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+        for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+            serverNodeWrapper.start();
+            while (!serverNodeWrapper.isStarted()) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+    }
+
+    public void stopCluster() {
+        for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+            serverNodeWrapper.stop();
+        }
+        for (StoreNodeWrapper storeNodeWrapper : storeNodeWrappers) {
+            storeNodeWrapper.stop();
+        }
+        for (PDNodeWrapper pdNodeWrapper : pdNodeWrappers) {
+            pdNodeWrapper.stop();
+        }
+    }
+
+    public ClusterConfig getConf() {
+        return this.clusterConfig;
+    }
+
+    public List<String> getPDRestAddrs() {
+        return clusterConfig.getPDRestAddrs();
+    }
+
+    public List<String> getPDGrpcAddrs() {
+        return clusterConfig.getPDGrpcAddrs();
+    }
+
+    public List<String> getStoreRestAddrs() {
+        return clusterConfig.getStoreRestAddrs();
+    }
+
+    public List<String> getStoreGrpcAddrs() {
+        return clusterConfig.getStoreGrpcAddrs();
+    }
+
+    public List<String> getServerRestAddrs() {
+        return clusterConfig.getServerRestAddrs();
+    }
+
+    public List<String> getPDNodeDir() {
+        List<String> nodeDirs = new ArrayList<>();
+        for (PDNodeWrapper pdNodeWrapper : pdNodeWrappers) {
+            nodeDirs.add(pdNodeWrapper.getNodePath());
+        }
+        return nodeDirs;
+    }
+
+    public List<String> getStoreNodeDir() {
+        List<String> nodeDirs = new ArrayList<>();
+        for (StoreNodeWrapper storeNodeWrapper : storeNodeWrappers) {
+            nodeDirs.add(storeNodeWrapper.getNodePath());
+        }
+        return nodeDirs;
+    }
+
+    public List<String> getServerNodeDir() {
+        List<String> nodeDirs = new ArrayList<>();
+        for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+            nodeDirs.add(serverNodeWrapper.getNodePath());
+        }
+        return nodeDirs;
+    }
+
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/BaseEnv.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/BaseEnv.java
new file mode 100644
index 000000000..f6c4ba5fb
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/BaseEnv.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import java.util.List;
+
+import org.apache.hugegraph.ct.config.ClusterConfig;
+
+public interface BaseEnv {
+
+    /* init the cluster environment with simple mode */
+    void startCluster();
+
+    /* clear the cluster env and all config*/
+    void stopCluster();
+
+    ClusterConfig getConf();
+
+    void init();
+
+    List<String> getPDRestAddrs();
+
+    List<String> getPDGrpcAddrs();
+
+    List<String> getStoreRestAddrs();
+
+    List<String> getServerRestAddrs();
+
+    List<String> getPDNodeDir();
+
+    List<String> getStoreNodeDir();
+
+    List<String> getServerNodeDir();
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/EnvFactory.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/EnvFactory.java
new file mode 100644
index 000000000..a716697c5
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/EnvFactory.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import org.apache.hugegraph.ct.base.EnvType;
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public class EnvFactory {
+
+    private static final Logger LOG = HGTestLogger.ENV_LOG;
+    private static BaseEnv env;
+
+    public static BaseEnv getEnv() {
+        if (env == null) {
+            EnvType envType = EnvType.getSystemEnvType();
+            switch (envType) {
+                case SingleNode:
+                    env = new SimpleEnv();
+                    break;
+                case MultiNode:
+                    env = new MultiNodeEnv();
+                    break;
+                default:
+                    LOG.error("No such env type: {}", envType);
+            }
+        }
+        return env;
+    }
+
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/MultiNodeEnv.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/MultiNodeEnv.java
new file mode 100644
index 000000000..83a540f26
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/MultiNodeEnv.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+public class MultiNodeEnv extends AbstractEnv {
+
+    public MultiNodeEnv() {
+        super();
+        this.init();
+    }
+
+    public MultiNodeEnv(int pdNum, int storeNum, int serverNum) {
+        super();
+        super.init(pdNum, storeNum, serverNum);
+    }
+
+    @Override
+    public void init() {
+        super.init(3, 3, 3);
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/SimpleEnv.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/SimpleEnv.java
new file mode 100644
index 000000000..595ed0fbe
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/SimpleEnv.java
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+public class SimpleEnv extends AbstractEnv {
+
+    public SimpleEnv() {
+        super();
+        init();
+    }
+
+    public void init() {
+        super.init(1, 1, 1);
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/AbstractNodeWrapper.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/AbstractNodeWrapper.java
new file mode 100644
index 000000000..8236bb139
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/AbstractNodeWrapper.java
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CT_PACKAGE_PATH;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.NoSuchFileException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Scanner;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.file.PathUtils;
+import org.apache.hugegraph.ct.base.ClusterConstant;
+import org.apache.hugegraph.ct.base.EnvUtil;
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+import lombok.Getter;
+
+public abstract class AbstractNodeWrapper implements BaseNodeWrapper {
+
+    protected final Logger LOG = HGTestLogger.NODE_LOG;
+
+    protected int clusterIndex;
+    @Getter
+    protected String workPath;
+    @Getter
+    protected String configPath;
+    protected Process instance;
+    protected int index;
+    protected List<String> fileNames;
+    protected String startLine;
+
+    public AbstractNodeWrapper() {
+        this.clusterIndex = 1;
+        fileNames = new ArrayList<>();
+        this.configPath = getNodePath();
+    }
+
+    public AbstractNodeWrapper(int clusterIndex, int index) {
+        this.clusterIndex = clusterIndex;
+        this.index = index;
+        fileNames = new ArrayList<>();
+        this.configPath = getNodePath();
+    }
+
+    /**
+     * Node Dir should be created before changing Config
+     */
+    public void createNodeDir(Path sourcePath, String destDir) {
+        try {
+            try {
+                if (!new File(destDir).exists()) {
+                    FileUtils.createParentDirectories(new File(destDir));
+                }
+            } catch (NoSuchFileException fileException) {
+                // Ignored
+            }
+            // To avoid following symbolic links
+            try (Stream<Path> stream = Files.walk(sourcePath)) {
+                stream.forEach(source -> {
+                    Path relativePath = sourcePath.relativize(source);
+                    Path destination = 
Paths.get(destDir).resolve(relativePath);
+                    if (fileNames.contains(relativePath.toString())) {
+                        EnvUtil.copyFileToDestination(source, destination);
+                    }
+                });
+            }
+        } catch (IOException ioException) {
+            LOG.error("Got error copying files to node destination dir", 
ioException);
+            throw new AssertionError();
+        }
+    }
+
+    public void createLogDir() {
+        String logPath = getLogPath();
+        try {
+            FileUtils.createParentDirectories(new File(logPath));
+        } catch (IOException e) {
+            LOG.error("Create log dir failed", e);
+            throw new AssertionError();
+        }
+    }
+
+    public void deleteDir() {
+        try {
+            PathUtils.deleteDirectory(Paths.get(getNodePath()));
+        } catch (IOException ex) {
+            try {
+                TimeUnit.SECONDS.sleep(1);
+            } catch (InterruptedException e) {
+                Thread.currentThread().interrupt();
+                LOG.error("Fail to delete node file", e);
+                throw new AssertionError("Delete node dir failed. " + e);
+            }
+        }
+    }
+
+    /**
+     * @return (user.dir).id
+     */
+    @Override
+    public String getNodePath() {
+        return CT_PACKAGE_PATH + getID() + File.separator;
+    }
+
+    @Override
+    public String getLogPath() {
+        return getNodePath() + ClusterConstant.LOG + File.separator + getID() 
+ "-start.log";
+    }
+
+    @Override
+    public void updateWorkPath(String workPath) {
+        this.workPath = workPath;
+    }
+
+    @Override
+    public void updateConfigPath(String ConfigPath) {
+        this.configPath = ConfigPath;
+    }
+
+    @Override
+    public boolean isStarted() {
+        try (Scanner sc = new Scanner(new FileReader(getLogPath()))) {
+            while (sc.hasNextLine()) {
+                String line = sc.nextLine();
+                if (line.contains(startLine)) return true;
+            }
+        } catch (FileNotFoundException ignored) {
+        }
+        return false;
+    }
+
+    public void stop() {
+        if (this.instance == null) {
+            return;
+        }
+        this.instance.destroy();
+        try {
+            if (!this.instance.waitFor(20, TimeUnit.SECONDS)) {
+                this.instance.destroyForcibly().waitFor(10, TimeUnit.SECONDS);
+            }
+        } catch (InterruptedException e) {
+            Thread.currentThread().interrupt();
+            LOG.error("Waiting node to shutdown error.", e);
+        }
+        deleteDir();
+    }
+
+    public boolean isAlive() {
+        return this.instance.isAlive();
+    }
+
+    protected ProcessBuilder runCmd(List<String> startCmd, File stdoutFile) 
throws IOException {
+        FileUtils.write(stdoutFile,
+                        String.join(" ", startCmd) + System.lineSeparator() + 
System.lineSeparator(),
+                        StandardCharsets.UTF_8, true);
+        ProcessBuilder processBuilder = new ProcessBuilder(startCmd)
+                .redirectOutput(ProcessBuilder.Redirect.appendTo(stdoutFile))
+                .redirectError(ProcessBuilder.Redirect.appendTo(stdoutFile));
+        processBuilder.directory(new File(configPath));
+        return processBuilder;
+    }
+
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/BaseNodeWrapper.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/BaseNodeWrapper.java
new file mode 100644
index 000000000..f428b227c
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/BaseNodeWrapper.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+public interface BaseNodeWrapper {
+
+    void start();
+
+    void stop();
+
+    boolean isAlive();
+
+    String getID();
+
+    String getNodePath();
+
+    String getLogPath();
+
+    void updateWorkPath(String workPath);
+
+    void updateConfigPath(String ConfigPath);
+
+    boolean isStarted();
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/PDNodeWrapper.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/PDNodeWrapper.java
new file mode 100644
index 000000000..a89c614c4
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/PDNodeWrapper.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.JAVA_CMD;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOG4J_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_JAR_PREFIX;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_LIB_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_TEMPLATE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.getFileInDir;
+import static org.apache.hugegraph.ct.base.ClusterConstant.isJava11OrHigher;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class PDNodeWrapper extends AbstractNodeWrapper {
+
+    public PDNodeWrapper() {
+        super();
+        fileNames = new ArrayList<>(Arrays.asList(LOG4J_FILE));
+        this.workPath = PD_LIB_PATH;
+        this.startLine = "Hugegraph-pd started.";
+        createNodeDir(Paths.get(PD_TEMPLATE_PATH), getNodePath() + CONF_DIR + 
File.separator);
+        createLogDir();
+    }
+
+    public PDNodeWrapper(int clusterIndex, int index) {
+        super(clusterIndex, index);
+        this.fileNames = new ArrayList<>(Arrays.asList(LOG4J_FILE));
+        this.workPath = PD_LIB_PATH;
+        this.startLine = "Hugegraph-pd started.";
+        createNodeDir(Paths.get(PD_TEMPLATE_PATH), getNodePath() + CONF_DIR + 
File.separator);
+        createLogDir();
+    }
+
+    /*
+    workPath is path of JAR package, configPath is path of config files
+     */
+    @Override
+    public void start() {
+        try {
+            File stdoutFile = new File(getLogPath());
+            List<String> startCmd = new ArrayList<>();
+            startCmd.add(JAVA_CMD);
+            if (!isJava11OrHigher()) {
+                LOG.error("Please make sure that the JDK is installed and the 
version >= 11");
+                return;
+            }
+
+            String pdNodeJarPath = getFileInDir(workPath, PD_JAR_PREFIX);
+            startCmd.addAll(Arrays.asList(
+                    "-Dname=HugeGraphPD" + this.index,
+                    "-Xms512m",
+                    "-Xmx4g",
+                    "-XX:+HeapDumpOnOutOfMemoryError",
+                    "-XX:HeapDumpPath=" + configPath + "logs",
+                    "-Dlog4j.configurationFile=" + configPath + File.separator 
+
+                    CONF_DIR + File.separator + "log4j2.xml",
+                    "-Dspring.config.location=" + configPath + CONF_DIR + 
File.separator +
+                    "application.yml",
+                    "-jar", pdNodeJarPath));
+            ProcessBuilder processBuilder = runCmd(startCmd, stdoutFile);
+            this.instance = processBuilder.start();
+        } catch (IOException ex) {
+            throw new AssertionError("Start node failed. " + ex);
+        }
+    }
+
+    @Override
+    public String getID() {
+        return "PD" + this.index;
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/ServerNodeWrapper.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/ServerNodeWrapper.java
new file mode 100644
index 000000000..e39bc3955
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/ServerNodeWrapper.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.EMPTY_SAMPLE_GROOVY_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.EXAMPLE_GROOVY_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.EXT_DIR;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.GREMLIN_DRIVER_SETTING_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.GREMLIN_SERVER_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.JAVA_CMD;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LIB_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOG4J_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PLUGINS_DIR;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.REMOTE_OBJECTS_SETTING_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.REMOTE_SETTING_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_LIB_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.SERVER_PACKAGE_PATH;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.SERVER_TEMPLATE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.isJava11OrHigher;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class ServerNodeWrapper extends AbstractNodeWrapper {
+
+    public ServerNodeWrapper(int clusterIndex, int index) {
+        super(clusterIndex, index);
+        this.fileNames = new ArrayList<>(
+                List.of(LOG4J_FILE, GREMLIN_SERVER_FILE, 
GREMLIN_DRIVER_SETTING_FILE,
+                        REMOTE_SETTING_FILE, REMOTE_OBJECTS_SETTING_FILE));
+        this.workPath = SERVER_LIB_PATH;
+        createNodeDir(Paths.get(SERVER_TEMPLATE_PATH), getNodePath() + 
CONF_DIR + File.separator);
+        this.fileNames = new ArrayList<>(List.of(EMPTY_SAMPLE_GROOVY_FILE, 
EXAMPLE_GROOVY_FILE));
+        this.startLine = "INFO: [HttpServer] Started.";
+        createNodeDir(Paths.get(SERVER_PACKAGE_PATH), getNodePath());
+        createLogDir();
+    }
+
+    private static void addJarsToClasspath(File directory, List<String> 
classpath) {
+        if (directory.exists() && directory.isDirectory()) {
+            File[] files = directory.listFiles((dir, name) -> 
name.endsWith(".jar"));
+            if (files != null) {
+                for (File file : files) {
+                    classpath.add(file.getAbsolutePath());
+                }
+            }
+        }
+    }
+
+    @Override
+    public void start() {
+        try {
+            File stdoutFile = new File(getLogPath());
+            List<String> startCmd = new ArrayList<>();
+            startCmd.add(JAVA_CMD);
+            if (!isJava11OrHigher()) {
+                LOG.error("Please make sure that the JDK is installed and the 
version >= 11");
+                return;
+            }
+
+            List<String> classpath = new ArrayList<>();
+            addJarsToClasspath(new File(workPath + LIB_DIR), classpath);
+            addJarsToClasspath(new File(workPath + EXT_DIR), classpath);
+            addJarsToClasspath(new File(workPath + PLUGINS_DIR), classpath);
+            String storeClassPath = String.join(":", classpath);
+
+            startCmd.addAll(Arrays.asList(
+                    "-Dname=HugeGraphServer" + this.index,
+                    "--add-exports=java.base/jdk.internal.reflect=ALL-UNNAMED",
+                    "-cp", storeClassPath,
+                    "org.apache.hugegraph.dist.HugeGraphServer",
+                    "./conf/gremlin-server.yaml",
+                    "./conf/rest-server.properties"));
+            ProcessBuilder processBuilder = runCmd(startCmd, stdoutFile);
+            this.instance = processBuilder.start();
+        } catch (IOException ex) {
+            throw new AssertionError("Started server node failed. " + ex);
+        }
+    }
+
+    @Override
+    public String getID() {
+        return "Server" + this.index;
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/StoreNodeWrapper.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/StoreNodeWrapper.java
new file mode 100644
index 000000000..1cb0f67ea
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/node/StoreNodeWrapper.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.node;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+import static org.apache.hugegraph.ct.base.ClusterConstant.JAVA_CMD;
+import static org.apache.hugegraph.ct.base.ClusterConstant.LOG4J_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_JAR_PREFIX;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_LIB_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.STORE_TEMPLATE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.getFileInDir;
+import static org.apache.hugegraph.ct.base.ClusterConstant.isJava11OrHigher;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+public class StoreNodeWrapper extends AbstractNodeWrapper {
+
+    public StoreNodeWrapper() {
+        super();
+        this.fileNames = new ArrayList<>(List.of(LOG4J_FILE));
+        this.workPath = STORE_LIB_PATH;
+        this.startLine = "o.a.h.s.n.StoreNodeApplication - Starting 
StoreNodeApplication";
+        createNodeDir(Paths.get(STORE_TEMPLATE_PATH), getNodePath() + CONF_DIR 
+ File.separator);
+        createLogDir();
+    }
+
+    public StoreNodeWrapper(int clusterId, int index) {
+        super(clusterId, index);
+        this.fileNames = new ArrayList<>(List.of(LOG4J_FILE));
+        this.workPath = STORE_LIB_PATH;
+        this.startLine = "o.a.h.s.n.StoreNodeApplication - Starting 
StoreNodeApplication";
+        createNodeDir(Paths.get(STORE_TEMPLATE_PATH), getNodePath() + CONF_DIR 
+ File.separator);
+        createLogDir();
+    }
+
+    @Override
+    public void start() {
+        try {
+            File stdoutFile = new File(getLogPath());
+            List<String> startCmd = new ArrayList<>();
+            startCmd.add(JAVA_CMD);
+            if (!isJava11OrHigher()) {
+                LOG.error("Please make sure that the JDK is installed and the 
version >= 11");
+                return;
+            }
+
+            String storeNodeJarPath = getFileInDir(workPath, STORE_JAR_PREFIX);
+            startCmd.addAll(Arrays.asList(
+                    "-Dname=HugeGraphStore" + this.index,
+                    "-Dlog4j.configurationFile=" + configPath + CONF_DIR
+                    + File.separator + "log4j2.xml",
+                    "-Dfastjson.parser.safeMode=true",
+                    "-Xms512m",
+                    "-Xmx2048m",
+                    "-XX:MetaspaceSize=256M",
+                    "-XX:+UseG1GC",
+                    "-XX:+ParallelRefProcEnabled",
+                    "-XX:+HeapDumpOnOutOfMemoryError",
+                    "-XX:HeapDumpPath=" + configPath + "logs",
+                    "-Dspring.config.location=" + configPath + CONF_DIR
+                    + File.separator + "application.yml",
+                    "-jar", storeNodeJarPath));
+            ProcessBuilder processBuilder = runCmd(startCmd, stdoutFile);
+            this.instance = processBuilder.start();
+        } catch (IOException ex) {
+            throw new AssertionError("Start node failed. " + ex);
+        }
+    }
+
+    @Override
+    public String getID() {
+        return "Store" + this.index;
+    }
+}
diff --git a/hugegraph-cluster-test/hugegraph-clustertest-test/pom.xml 
b/hugegraph-cluster-test/hugegraph-clustertest-test/pom.xml
new file mode 100644
index 000000000..c88840454
--- /dev/null
+++ b/hugegraph-cluster-test/hugegraph-clustertest-test/pom.xml
@@ -0,0 +1,98 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.hugegraph</groupId>
+        <artifactId>hugegraph-cluster-test</artifactId>
+        <version>${revision}</version>
+    </parent>
+
+    <artifactId>hugegraph-clustertest-test</artifactId>
+
+    <properties>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    </properties>
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.hugegraph</groupId>
+            <artifactId>hugegraph-clustertest-minicluster</artifactId>
+            <version>${revision}</version>
+            <scope>compile</scope>
+        </dependency>
+        <!-- TODO: avoid depending on toolchain -->
+        <dependency>
+            <groupId>org.apache.hugegraph</groupId>
+            <artifactId>hugegraph-client</artifactId>
+            <version>${toolchain.vision}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hugegraph</groupId>
+            <artifactId>hg-pd-client</artifactId>
+            <version>${revision}</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.13.2</version>
+            <scope>compile</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>2.20</version>
+                <executions>
+                    <execution>
+                        <id>simple-cluster-test</id>
+                        <configuration>
+                            <testSourceDirectory>${basedir}/src/main/java/
+                            </testSourceDirectory>
+                            <testClassesDirectory>${basedir}/target/classes/
+                            </testClassesDirectory>
+                            <includes>
+                                
<include>**/SimpleClusterSuiteTest.java</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>multi-cluster-test</id>
+                        <configuration>
+                            <testSourceDirectory>${basedir}/src/main/java/
+                            </testSourceDirectory>
+                            <testClassesDirectory>${basedir}/target/classes/
+                            </testClassesDirectory>
+                            <includes>
+                                
<include>**/MultiClusterSuiteTest.java</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java
new file mode 100644
index 000000000..59394101c
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.hugegraph.ct.env.BaseEnv;
+import org.apache.hugegraph.ct.env.MultiNodeEnv;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/**
+ * MultiNode Test generate the cluster env with 3 pd node + 3 store node + 3 
server node.
+ * Or you can set different num of nodes by using env = new 
MultiNodeEnv(pdNum, storeNum, serverNum)
+ * All nodes are deployed in ports generated randomly, the application of 
nodes are stored
+ * in /apache-hugegraph-ct-incubating-1.5.0, you can visit each node with rest 
api.
+ */
+public class BaseMultiClusterTest {
+
+    protected static BaseEnv env;
+    protected static Process p;
+
+    @BeforeClass
+    public static void initEnv() {
+        env = new MultiNodeEnv();
+        env.startCluster();
+    }
+
+    @AfterClass
+    public static void clearEnv() {
+        env.stopCluster();
+    }
+
+    protected String execCmd(String[] cmds) throws IOException {
+        ProcessBuilder process = new ProcessBuilder(cmds);
+        p = process.start();
+        BufferedReader reader = new BufferedReader(new 
InputStreamReader(p.getInputStream()));
+        StringBuilder builder = new StringBuilder();
+        String line;
+        while ((line = reader.readLine()) != null) {
+            builder.append(line);
+            builder.append(System.lineSeparator());
+        }
+        p.destroy();
+        return builder.toString();
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java
new file mode 100644
index 000000000..0318df1ad
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hugegraph.driver.GraphManager;
+import org.apache.hugegraph.driver.GremlinManager;
+import org.apache.hugegraph.driver.HugeClient;
+import org.apache.hugegraph.driver.SchemaManager;
+import org.apache.hugegraph.pd.client.PDClient;
+import org.apache.hugegraph.pd.client.PDConfig;
+import org.apache.hugegraph.pd.common.PDException;
+import org.apache.hugegraph.structure.constant.T;
+import org.apache.hugegraph.structure.graph.Edge;
+import org.apache.hugegraph.structure.graph.Path;
+import org.apache.hugegraph.structure.graph.Vertex;
+import org.apache.hugegraph.structure.gremlin.Result;
+import org.apache.hugegraph.structure.gremlin.ResultSet;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class MultiClusterDeployTest extends BaseMultiClusterTest {
+
+    @Test
+    public void testPDNodesDeployment() {
+        try {
+            List<String> addrs = env.getPDGrpcAddrs();
+            for (String addr : addrs) {
+                PDConfig pdConfig = PDConfig.of(addr);
+                PDClient pdClient = PDClient.create(pdConfig);
+                pdClient.dbCompaction();
+            }
+            assert true;
+        } catch (PDException e) {
+            assert false;
+        }
+    }
+
+    @Test
+    public void testStoreNodesDeployment() throws IOException {
+        List<String> addrs = env.getStoreRestAddrs();
+        for (String addr : addrs) {
+            String[] cmds = {"curl", addr};
+            // TODO: why not use the sb param?
+            StringBuilder sb = new StringBuilder();
+            for (String cmd : cmds) {
+                sb.append(cmd).append(" ");
+            }
+            String responseMsg = execCmd(cmds);
+            Assert.assertTrue(responseMsg.startsWith("{"));
+        }
+    }
+
+    @Test
+    public void testServerNodesDeployment() {
+        List<String> addrs = env.getServerRestAddrs();
+        for (String addr : addrs) {
+            HugeClient hugeClient = HugeClient.builder("http://"; + addr, 
"hugegraph")
+                                              .build();
+            SchemaManager schema = hugeClient.schema();
+
+            schema.propertyKey("name").asText().ifNotExist().create();
+            schema.propertyKey("age").asInt().ifNotExist().create();
+            schema.propertyKey("city").asText().ifNotExist().create();
+            schema.propertyKey("weight").asDouble().ifNotExist().create();
+            schema.propertyKey("lang").asText().ifNotExist().create();
+            schema.propertyKey("date").asDate().ifNotExist().create();
+            schema.propertyKey("price").asInt().ifNotExist().create();
+
+            schema.vertexLabel("person")
+                  .properties("name", "age", "city")
+                  .primaryKeys("name")
+                  .ifNotExist()
+                  .create();
+
+            schema.vertexLabel("software")
+                  .properties("name", "lang", "price")
+                  .primaryKeys("name")
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("personByCity")
+                  .onV("person")
+                  .by("city")
+                  .secondary()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("personByAgeAndCity")
+                  .onV("person")
+                  .by("age", "city")
+                  .secondary()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("softwareByPrice")
+                  .onV("software")
+                  .by("price")
+                  .range()
+                  .ifNotExist()
+                  .create();
+
+            schema.edgeLabel("knows")
+                  .sourceLabel("person")
+                  .targetLabel("person")
+                  .properties("date", "weight")
+                  .ifNotExist()
+                  .create();
+
+            schema.edgeLabel("created")
+                  .sourceLabel("person").targetLabel("software")
+                  .properties("date", "weight")
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("createdByDate")
+                  .onE("created")
+                  .by("date")
+                  .secondary()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("createdByWeight")
+                  .onE("created")
+                  .by("weight")
+                  .range()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("knowsByWeight")
+                  .onE("knows")
+                  .by("weight")
+                  .range()
+                  .ifNotExist()
+                  .create();
+
+            GraphManager graph = hugeClient.graph();
+            Vertex marko = graph.addVertex(T.LABEL, "person", "name", "marko",
+                                           "age", 29, "city", "Beijing");
+            Vertex vadas = graph.addVertex(T.LABEL, "person", "name", "vadas",
+                                           "age", 27, "city", "Hongkong");
+            Vertex lop = graph.addVertex(T.LABEL, "software", "name", "lop",
+                                         "lang", "java", "price", 328);
+            Vertex josh = graph.addVertex(T.LABEL, "person", "name", "josh",
+                                          "age", 32, "city", "Beijing");
+            Vertex ripple = graph.addVertex(T.LABEL, "software", "name", 
"ripple",
+                                            "lang", "java", "price", 199);
+            Vertex peter = graph.addVertex(T.LABEL, "person", "name", "peter",
+                                           "age", 35, "city", "Shanghai");
+
+            marko.addEdge("knows", vadas, "date", "2016-01-10", "weight", 0.5);
+            marko.addEdge("knows", josh, "date", "2013-02-20", "weight", 1.0);
+            marko.addEdge("created", lop, "date", "2017-12-10", "weight", 0.4);
+            josh.addEdge("created", lop, "date", "2009-11-11", "weight", 0.4);
+            josh.addEdge("created", ripple, "date", "2017-12-10", "weight", 
1.0);
+            peter.addEdge("created", lop, "date", "2017-03-24", "weight", 0.2);
+
+            GremlinManager gremlin = hugeClient.gremlin();
+            System.out.println("==== Path ====");
+            ResultSet resultSet = 
gremlin.gremlin("g.V().outE().path()").execute();
+            Iterator<Result> results = resultSet.iterator();
+            results.forEachRemaining(result -> {
+                System.out.println(result.getObject().getClass());
+                Object object = result.getObject();
+                if (object instanceof Vertex) {
+                    System.out.println(((Vertex) object).id());
+                } else if (object instanceof Edge) {
+                    System.out.println(((Edge) object).id());
+                } else if (object instanceof Path) {
+                    List<Object> elements = ((Path) object).objects();
+                    elements.forEach(element -> {
+                        System.out.println(element.getClass());
+                        System.out.println(element);
+                    });
+                } else {
+                    System.out.println(object);
+                }
+            });
+
+            hugeClient.close();
+            assert true;
+            break;
+        }
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterFileTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterFileTest.java
new file mode 100644
index 000000000..d74155ad1
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterFileTest.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.File;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class MultiClusterFileTest extends BaseMultiClusterTest {
+
+    @Test
+    public void checkPDNodeDir() {
+        for (String nodeDir : env.getPDNodeDir()) {
+            Assert.assertTrue(new File(nodeDir).isDirectory());
+        }
+    }
+
+    @Test
+    public void checkStoreNodeDir() {
+        for (String nodeDir : env.getStoreNodeDir()) {
+            Assert.assertTrue(new File(nodeDir).isDirectory());
+        }
+    }
+
+    @Test
+    public void checkServerNodeDir() {
+        for (String nodeDir : env.getServerNodeDir()) {
+            Assert.assertTrue(new File(nodeDir).isDirectory());
+        }
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterSuiteTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterSuiteTest.java
new file mode 100644
index 000000000..6e55cdd20
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterSuiteTest.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+import lombok.extern.slf4j.Slf4j;
+
+@RunWith(Suite.class)
[email protected]({
+        MultiClusterDeployTest.class,
+        MultiClusterFileTest.class,
+})
+@Slf4j
+public class MultiClusterSuiteTest {
+
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/BaseSimpleTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/BaseSimpleTest.java
new file mode 100644
index 000000000..61954de81
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/BaseSimpleTest.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.SimpleClusterTest;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.hugegraph.ct.env.BaseEnv;
+import org.apache.hugegraph.ct.env.SimpleEnv;
+import org.apache.hugegraph.driver.HugeClient;
+import org.apache.hugegraph.pd.client.PDClient;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+/**
+ * Simple Test generate the cluster env with 1 pd node + 1 store node + 1 
server node.
+ * All nodes are deployed in ports generated randomly; The application of 
nodes is stored
+ * in /apache-hugegraph-ct-incubating-1.5.0, you can visit each node with rest 
api.
+ */
+public class BaseSimpleTest {
+
+    protected static BaseEnv env;
+    protected static Process p;
+    protected static PDClient pdClient;
+    protected static HugeClient hugeClient;
+
+    @BeforeClass
+    public static void initEnv() {
+        env = new SimpleEnv();
+        env.startCluster();
+    }
+
+    @AfterClass
+    public static void clearEnv() throws InterruptedException {
+        env.stopCluster();
+        Thread.sleep(2000);
+    }
+
+    protected String execCmd(String[] cmds) throws IOException {
+        ProcessBuilder process = new ProcessBuilder(cmds);
+        p = process.start();
+        BufferedReader reader = new BufferedReader(new 
InputStreamReader(p.getInputStream()));
+        StringBuilder builder = new StringBuilder();
+        String line;
+        while ((line = reader.readLine()) != null) {
+            builder.append(line);
+            builder.append(System.lineSeparator());
+        }
+        p.destroy();
+        return builder.toString();
+    }
+
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterDeployTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterDeployTest.java
new file mode 100644
index 000000000..61a73ff0f
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterDeployTest.java
@@ -0,0 +1,200 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.SimpleClusterTest;
+
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.hugegraph.driver.GraphManager;
+import org.apache.hugegraph.driver.GremlinManager;
+import org.apache.hugegraph.driver.HugeClient;
+import org.apache.hugegraph.driver.SchemaManager;
+import org.apache.hugegraph.pd.client.PDClient;
+import org.apache.hugegraph.pd.client.PDConfig;
+import org.apache.hugegraph.pd.common.PDException;
+import org.apache.hugegraph.structure.constant.T;
+import org.apache.hugegraph.structure.graph.Edge;
+import org.apache.hugegraph.structure.graph.Path;
+import org.apache.hugegraph.structure.graph.Vertex;
+import org.apache.hugegraph.structure.gremlin.Result;
+import org.apache.hugegraph.structure.gremlin.ResultSet;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SimpleClusterDeployTest extends BaseSimpleTest {
+
+    @Test
+    public void testPDNodesDeployment() {
+        try {
+            List<String> addrs = env.getPDGrpcAddrs();
+            for (String addr : addrs) {
+                PDConfig pdConfig = PDConfig.of(addr);
+                pdClient = PDClient.create(pdConfig);
+                pdClient.dbCompaction();
+            }
+            assert true;
+        } catch (PDException pdException) {
+            assert false;
+        }
+    }
+
+    @Test
+    public void testStoreNodesDeployment() throws IOException {
+        List<String> addrs = env.getStoreRestAddrs();
+        for (String addr : addrs) {
+            String[] cmds = {"curl", addr};
+            // TODO: what's the purpose of this?
+            StringBuilder sb = new StringBuilder();
+            for (String cmd : cmds) {
+                sb.append(cmd).append(" ");
+            }
+            String responseMsg = execCmd(cmds);
+            Assert.assertTrue(responseMsg.startsWith("{"));
+        }
+    }
+
+    @Test
+    public void testServerNodesDeployment() {
+        List<String> addrs = env.getServerRestAddrs();
+        for (String addr : addrs) {
+            hugeClient = HugeClient.builder("http://"; + addr, 
"hugegraph").build();
+            SchemaManager schema = hugeClient.schema();
+
+            schema.propertyKey("name").asText().ifNotExist().create();
+            schema.propertyKey("age").asInt().ifNotExist().create();
+            schema.propertyKey("city").asText().ifNotExist().create();
+            schema.propertyKey("weight").asDouble().ifNotExist().create();
+            schema.propertyKey("lang").asText().ifNotExist().create();
+            schema.propertyKey("date").asDate().ifNotExist().create();
+            schema.propertyKey("price").asInt().ifNotExist().create();
+
+            schema.vertexLabel("person")
+                  .properties("name", "age", "city")
+                  .primaryKeys("name")
+                  .ifNotExist()
+                  .create();
+
+            schema.vertexLabel("software")
+                  .properties("name", "lang", "price")
+                  .primaryKeys("name")
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("personByCity")
+                  .onV("person")
+                  .by("city")
+                  .secondary()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("personByAgeAndCity")
+                  .onV("person")
+                  .by("age", "city")
+                  .secondary()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("softwareByPrice")
+                  .onV("software")
+                  .by("price")
+                  .range()
+                  .ifNotExist()
+                  .create();
+
+            schema.edgeLabel("knows")
+                  .sourceLabel("person")
+                  .targetLabel("person")
+                  .properties("date", "weight")
+                  .ifNotExist()
+                  .create();
+
+            schema.edgeLabel("created")
+                  .sourceLabel("person").targetLabel("software")
+                  .properties("date", "weight")
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("createdByDate")
+                  .onE("created")
+                  .by("date")
+                  .secondary()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("createdByWeight")
+                  .onE("created")
+                  .by("weight")
+                  .range()
+                  .ifNotExist()
+                  .create();
+
+            schema.indexLabel("knowsByWeight")
+                  .onE("knows")
+                  .by("weight")
+                  .range()
+                  .ifNotExist()
+                  .create();
+
+            GraphManager graph = hugeClient.graph();
+            Vertex marko = graph.addVertex(T.LABEL, "person", "name", "marko",
+                                           "age", 29, "city", "Beijing");
+            Vertex vadas = graph.addVertex(T.LABEL, "person", "name", "vadas",
+                                           "age", 27, "city", "Hongkong");
+            Vertex lop = graph.addVertex(T.LABEL, "software", "name", "lop",
+                                         "lang", "java", "price", 328);
+            Vertex josh = graph.addVertex(T.LABEL, "person", "name", "josh",
+                                          "age", 32, "city", "Beijing");
+            Vertex ripple = graph.addVertex(T.LABEL, "software", "name", 
"ripple",
+                                            "lang", "java", "price", 199);
+            Vertex peter = graph.addVertex(T.LABEL, "person", "name", "peter",
+                                           "age", 35, "city", "Shanghai");
+
+            marko.addEdge("knows", vadas, "date", "2016-01-10", "weight", 0.5);
+            marko.addEdge("knows", josh, "date", "2013-02-20", "weight", 1.0);
+            marko.addEdge("created", lop, "date", "2017-12-10", "weight", 0.4);
+            josh.addEdge("created", lop, "date", "2009-11-11", "weight", 0.4);
+            josh.addEdge("created", ripple, "date", "2017-12-10", "weight", 
1.0);
+            peter.addEdge("created", lop, "date", "2017-03-24", "weight", 0.2);
+
+            GremlinManager gremlin = hugeClient.gremlin();
+            System.out.println("==== Path ====");
+            ResultSet resultSet = 
gremlin.gremlin("g.V().outE().path()").execute();
+            Iterator<Result> results = resultSet.iterator();
+            results.forEachRemaining(result -> {
+                System.out.println(result.getObject().getClass());
+                Object object = result.getObject();
+                if (object instanceof Vertex) {
+                    System.out.println(((Vertex) object).id());
+                } else if (object instanceof Edge) {
+                    System.out.println(((Edge) object).id());
+                } else if (object instanceof Path) {
+                    List<Object> elements = ((Path) object).objects();
+                    elements.forEach(element -> {
+                        System.out.println(element.getClass());
+                        System.out.println(element);
+                    });
+                } else {
+                    System.out.println(object);
+                }
+            });
+
+            hugeClient.close();
+        }
+    }
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterFileTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterFileTest.java
new file mode 100644
index 000000000..1cae2bcdb
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterFileTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.SimpleClusterTest;
+
+import java.io.File;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SimpleClusterFileTest extends BaseSimpleTest {
+
+    @Test
+    public void checkPDNodeDir() {
+        for (String nodeDir : env.getPDNodeDir()) {
+            Assert.assertTrue(new File(nodeDir).isDirectory());
+        }
+    }
+
+    @Test
+    public void checkStoreNodeDir() {
+        for (String nodeDir : env.getStoreNodeDir()) {
+            Assert.assertTrue(new File(nodeDir).isDirectory());
+        }
+    }
+
+    @Test
+    public void checkServerNodeDir() {
+        for (String nodeDir : env.getServerNodeDir()) {
+            Assert.assertTrue(new File(nodeDir).isDirectory());
+        }
+    }
+
+}
diff --git 
a/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterSuiteTest.java
 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterSuiteTest.java
new file mode 100644
index 000000000..7f24d8b46
--- /dev/null
+++ 
b/hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/SimpleClusterTest/SimpleClusterSuiteTest.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.SimpleClusterTest;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+import lombok.extern.slf4j.Slf4j;
+
+@RunWith(Suite.class)
[email protected]({
+        SimpleClusterDeployTest.class,
+        SimpleClusterFileTest.class,
+})
+@Slf4j
+public class SimpleClusterSuiteTest {
+
+}
diff --git a/hugegraph-cluster-test/pom.xml b/hugegraph-cluster-test/pom.xml
new file mode 100644
index 000000000..fcc409d6a
--- /dev/null
+++ b/hugegraph-cluster-test/pom.xml
@@ -0,0 +1,139 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>hugegraph-cluster-test</artifactId>
+    <version>${revision}</version>
+    <packaging>pom</packaging>
+
+    <parent>
+        <groupId>org.apache.hugegraph</groupId>
+        <artifactId>hugegraph</artifactId>
+        <version>${revision}</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <modules>
+        <module>hugegraph-clustertest-minicluster</module>
+        <module>hugegraph-clustertest-dist</module>
+        <module>hugegraph-clustertest-test</module>
+    </modules>
+
+    <properties>
+        <maven.compiler.source>11</maven.compiler.source>
+        <maven.compiler.target>11</maven.compiler.target>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        
<final.name>apache-${release.name}-ct-incubating-${project.version}</final.name>
+    </properties>
+
+    <dependencies>
+        <dependency>
+            <groupId>com.google.code.findbugs</groupId>
+            <artifactId>jsr305</artifactId>
+            <version>3.0.2</version>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <!-- TODO: we should refactor/use junit 5+ later -->
+            <version>4.13.2</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-clean-plugin</artifactId>
+                <configuration>
+                    <filesets>
+                        <fileset>
+                            <directory>${project.basedir}/</directory>
+                            <includes>
+                                <include>*.tar</include>
+                                <include>*.tar.gz</include>
+                                <include>.flattened-pom.xml</include>
+                                <include>${final.name}/**</include>
+                            </includes>
+                            <followSymlinks>false</followSymlinks>
+                        </fileset>
+                        <fileset>
+                            <directory>${final.name}</directory>
+                        </fileset>
+                    </filesets>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+    <profiles>
+        <profile>
+            <id>simple-cluster-test</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-surefire-plugin</artifactId>
+                        <version>2.20</version>
+                        <executions>
+                            <execution>
+                                <id>simple-cluster-test</id>
+                                <goals>
+                                    <goal>test</goal>
+                                </goals>
+                                <phase>test</phase>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <profile>
+            <id>multi-cluster-test</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-surefire-plugin</artifactId>
+                        <version>2.20</version>
+                        <executions>
+                            <execution>
+                                <id>multi-cluster-test</id>
+                                <goals>
+                                    <goal>test</goal>
+                                </goals>
+                                <phase>test</phase>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+
+</project>
diff --git a/install-dist/scripts/dependency/known-dependencies.txt 
b/install-dist/scripts/dependency/known-dependencies.txt
index 5b8ab3f5a..02b5dda11 100644
--- a/install-dist/scripts/dependency/known-dependencies.txt
+++ b/install-dist/scripts/dependency/known-dependencies.txt
@@ -1,7 +1,3 @@
-HdrHistogram-2.1.12.jar
-HdrHistogram-2.1.9.jar
-LatencyUtils-2.0.3.jar
-ST4-4.0.8.jar
 accessors-smart-1.2.jar
 airline-0.8.jar
 android-json-0.0.20131108.vaadin1.jar
@@ -61,11 +57,13 @@ commons-collections4-4.4.jar
 commons-compress-1.21.jar
 commons-configuration-1.10.jar
 commons-configuration2-2.8.0.jar
+commons-io-2.12.0.jar
 commons-io-2.7.jar
 commons-io-2.8.0.jar
 commons-lang-2.6.jar
 commons-lang3-3.11.jar
 commons-lang3-3.12.0.jar
+commons-lang3-3.13.0.jar
 commons-logging-1.1.1.jar
 commons-logging-1.2.jar
 commons-math3-3.2.jar
@@ -144,6 +142,8 @@ hamcrest-2.2.jar
 hamcrest-core-1.3.jar
 hanlp-portable-1.8.3.jar
 hbase-shaded-endpoint-2.0.6.jar
+HdrHistogram-2.1.12.jar
+HdrHistogram-2.1.9.jar
 hessian-3.3.6.jar
 hessian-3.3.7.jar
 hg-pd-client-1.5.0.jar
@@ -214,12 +214,12 @@ javassist-3.21.0-GA.jar
 javassist-3.24.0-GA.jar
 javassist-3.28.0-GA.jar
 javatuples-1.2.jar
-javax-websocket-client-impl-9.4.46.v20220331.jar
-javax-websocket-server-impl-9.4.46.v20220331.jar
 javax.activation-api-1.2.0.jar
 javax.annotation-api-1.3.2.jar
 javax.inject-1.jar
 javax.json-1.0.jar
+javax-websocket-client-impl-9.4.46.v20220331.jar
+javax-websocket-server-impl-9.4.46.v20220331.jar
 jaxb-api-2.3.1.jar
 jaxb-core-3.0.2.jar
 jaxb-impl-3.0.2.jar
@@ -262,8 +262,8 @@ jetty-util-9.4.46.v20220331.jar
 jetty-util-ajax-9.4.46.v20220331.jar
 jetty-webapp-9.4.46.v20220331.jar
 jetty-xml-9.4.46.v20220331.jar
-jffi-1.2.16-native.jar
 jffi-1.2.16.jar
+jffi-1.2.16-native.jar
 jflex-1.8.2.jar
 jieba-analysis-1.0.2.jar
 jjwt-api-0.11.5.jar
@@ -280,10 +280,10 @@ jraft-core-1.3.11.jar
 jraft-core-1.3.13.jar
 jraft-core-1.3.9.jar
 json-20210307.jar
+jsonassert-1.5.0.jar
 json-path-2.5.0.jar
 json-simple-1.1.jar
 json-smart-2.3.jar
-jsonassert-1.5.0.jar
 jsr305-3.0.1.jar
 jsr305-3.0.2.jar
 jul-to-slf4j-1.7.36.jar
@@ -314,6 +314,7 @@ kotlin-stdlib-1.6.20.jar
 kotlin-stdlib-common-1.5.31.jar
 kotlin-stdlib-jdk7-1.6.10.jar
 kotlin-stdlib-jdk8-1.6.10.jar
+LatencyUtils-2.0.3.jar
 listenablefuture-9999.0-empty-to-avoid-conflict-with-guava.jar
 log4j-api-2.15.0.jar
 log4j-api-2.17.0.jar
@@ -331,6 +332,7 @@ log4j-slf4j-impl-2.17.0.jar
 log4j-slf4j-impl-2.17.1.jar
 log4j-slf4j-impl-2.18.0.jar
 logging-interceptor-4.10.0.jar
+lombok-1.18.24.jar
 lookout-api-1.4.1.jar
 lucene-analyzers-common-8.11.2.jar
 lucene-analyzers-smartcn-8.11.2.jar
@@ -338,6 +340,7 @@ lucene-core-8.11.2.jar
 lucene-queries-4.7.2.jar
 lucene-queryparser-4.7.2.jar
 lucene-sandbox-4.7.2.jar
+lz4-java-1.4.0.jar
 lz4-java-1.8.0.jar
 metrics-annotation-4.2.4.jar
 metrics-core-3.0.2.jar
@@ -361,10 +364,10 @@ netty-buffer-4.1.52.Final.jar
 netty-buffer-4.1.72.Final.jar
 netty-codec-4.1.52.Final.jar
 netty-codec-4.1.72.Final.jar
-netty-codec-http-4.1.52.Final.jar
-netty-codec-http-4.1.72.Final.jar
 netty-codec-http2-4.1.52.Final.jar
 netty-codec-http2-4.1.72.Final.jar
+netty-codec-http-4.1.52.Final.jar
+netty-codec-http-4.1.72.Final.jar
 netty-codec-socks-4.1.52.Final.jar
 netty-codec-socks-4.1.72.Final.jar
 netty-common-4.1.52.Final.jar
@@ -412,20 +415,20 @@ powermock-module-junit4-2.0.0-RC.3.jar
 powermock-module-junit4-common-2.0.0-RC.3.jar
 powermock-module-junit4-rule-2.0.0-RC.3.jar
 powermock-reflect-2.0.0-RC.3.jar
-proto-google-common-protos-1.17.0.jar
-proto-google-common-protos-2.0.1.jar
 protobuf-java-3.11.0.jar
 protobuf-java-3.17.2.jar
 protobuf-java-3.21.7.jar
 protobuf-java-3.5.1.jar
 protobuf-java-util-3.17.2.jar
+proto-google-common-protos-1.17.0.jar
+proto-google-common-protos-2.0.1.jar
 protostuff-api-1.6.0.jar
 protostuff-collectionschema-1.6.0.jar
 protostuff-core-1.6.0.jar
 protostuff-runtime-1.6.0.jar
 psjava-0.1.19.jar
-reporter-config-base-3.0.3.jar
 reporter-config3-3.0.3.jar
+reporter-config-base-3.0.3.jar
 rewriting-9.0-9.0.20190305.jar
 rocksdbjni-6.29.5.jar
 rocksdbjni-7.2.2.jar
@@ -442,9 +445,9 @@ sjk-cli-0.22.jar
 sjk-core-0.14.jar
 sjk-core-0.22.jar
 sjk-hflame-0.22.jar
-sjk-jfr-standalone-0.7.jar
 sjk-jfr5-0.5.jar
 sjk-jfr6-0.7.jar
+sjk-jfr-standalone-0.7.jar
 sjk-json-0.14.jar
 sjk-json-0.22.jar
 sjk-nps-0.9.jar
@@ -453,6 +456,7 @@ sjk-stacktrace-0.22.jar
 slf4j-api-1.7.21.jar
 slf4j-api-1.7.25.jar
 slf4j-api-1.7.32.jar
+slf4j-api-2.0.9.jar
 snakeyaml-1.18.jar
 snakeyaml-1.26.jar
 snakeyaml-1.27.jar
@@ -487,6 +491,7 @@ spring-expression-5.3.20.jar
 spring-jcl-5.3.20.jar
 spring-web-5.3.20.jar
 spring-webmvc-5.3.20.jar
+ST4-4.0.8.jar
 stream-2.5.2.jar
 swagger-annotations-1.5.18.jar
 swagger-annotations-jakarta-2.2.18.jar
diff --git a/pom.xml b/pom.xml
index c98827745..1fa07660e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -94,6 +94,7 @@
         <maven.compiler.target>11</maven.compiler.target>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
         <shell-executable>bash</shell-executable>
+        <toolchain.vision>1.5.0</toolchain.vision>
     </properties>
 
     <modules>
@@ -102,6 +103,7 @@
         <module>hugegraph-store</module>
         <module>hugegraph-commons</module>
         <module>install-dist</module>
+        <module>hugegraph-cluster-test</module>
     </modules>
 
     <dependencyManagement>


Reply via email to