This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new 53dcd9efee [ci] Add no-transfer-progress maven option to reduce
verbose download log (#7070)
53dcd9efee is described below
commit 53dcd9efee68f0d92c71894912887d0c6a8cd309
Author: Zouxxyy <[email protected]>
AuthorDate: Sat Jan 17 20:37:42 2026 +0800
[ci] Add no-transfer-progress maven option to reduce verbose download log
(#7070)
---
.github/workflows/check-licensing.yml | 4 ++--
.github/workflows/e2e-tests-flink-1.x.yml | 6 +++---
.github/workflows/e2e-tests-flink-2.x-jdk11.yml | 6 +++---
.github/workflows/paimon-python-checks.yml | 4 ++--
.github/workflows/publish-faiss_snapshot.yml | 6 +++---
.github/workflows/publish_snapshot-jdk17.yml | 4 ++--
.github/workflows/publish_snapshot.yml | 4 ++--
.github/workflows/utitcase-flink-1.x-common.yml | 4 ++--
.github/workflows/utitcase-flink-1.x-others.yml | 4 ++--
.github/workflows/utitcase-flink-2.x-jdk11.yml | 4 ++--
.github/workflows/utitcase-jdk11.yml | 4 ++--
.github/workflows/utitcase-spark-3.x.yml | 4 ++--
.github/workflows/utitcase-spark-4.x.yml | 4 ++--
.github/workflows/utitcase.yml | 4 ++--
tools/ci/sonar_check.sh | 2 +-
tools/releasing/deploy_staging_jars.sh | 2 +-
tools/releasing/deploy_staging_jars_for_jdk11.sh | 4 ++--
tools/releasing/deploy_staging_jars_for_jdk17.sh | 4 ++--
tools/releasing/update_branch_version.sh | 2 +-
19 files changed, 38 insertions(+), 38 deletions(-)
diff --git a/.github/workflows/check-licensing.yml
b/.github/workflows/check-licensing.yml
index 59b26cc24b..0b60667968 100644
--- a/.github/workflows/check-licensing.yml
+++ b/.github/workflows/check-licensing.yml
@@ -51,13 +51,13 @@ jobs:
run: |
set -o pipefail
- mvn clean deploy ${{ env.MVN_COMMON_OPTIONS }} -DskipTests \
+ mvn clean deploy ${{ env.MVN_COMMON_OPTIONS }} -ntp -DskipTests \
-DaltDeploymentRepository=validation_repository::default::file:${{
env.MVN_VALIDATION_DIR }} \
| tee ${{ env.MVN_BUILD_OUTPUT_FILE }}
- name: Check licensing
run: |
- mvn ${{ env.MVN_COMMON_OPTIONS }} exec:java@check-licensing -N \
+ mvn ${{ env.MVN_COMMON_OPTIONS }} -ntp exec:java@check-licensing -N \
-Dexec.args="${{ env.MVN_BUILD_OUTPUT_FILE }} $(pwd) ${{
env.MVN_VALIDATION_DIR }}" \
-Dlog4j.configurationFile=file://$(pwd)/tools/ci/log4j.properties
env:
diff --git a/.github/workflows/e2e-tests-flink-1.x.yml
b/.github/workflows/e2e-tests-flink-1.x.yml
index 63e6c368cc..476b17ec3e 100644
--- a/.github/workflows/e2e-tests-flink-1.x.yml
+++ b/.github/workflows/e2e-tests-flink-1.x.yml
@@ -56,7 +56,7 @@ jobs:
distribution: 'temurin'
- name: Build Flink
- run: mvn -T 2C -B clean install -DskipTests -Pflink1,spark3 -pl
paimon-e2e-tests -am -Pflink-${{ matrix.flink_version }}
+ run: mvn -T 2C -B -ntp clean install -DskipTests -Pflink1,spark3 -pl
paimon-e2e-tests -am -Pflink-${{ matrix.flink_version }}
- name: Test Flink
run: |
@@ -66,9 +66,9 @@ jobs:
echo "JVM timezone is set to $jvm_timezone"
profile="flink-${{ matrix.flink_version }}"
if [ "${{ matrix.flink_version }}" = "${{ matrix.flink_version[-1]
}}" ]; then
- mvn -T 1C -B test -Pflink1,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone
+ mvn -T 1C -B -ntp test -Pflink1,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone
else
- mvn -T 1C -B test -Pflink1,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone -P${profile}
+ mvn -T 1C -B -ntp test -Pflink1,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone -P${profile}
fi
env:
MAVEN_OPTS: -Xmx4096m
diff --git a/.github/workflows/e2e-tests-flink-2.x-jdk11.yml
b/.github/workflows/e2e-tests-flink-2.x-jdk11.yml
index 0e1fdda96b..941fcc0e86 100644
--- a/.github/workflows/e2e-tests-flink-2.x-jdk11.yml
+++ b/.github/workflows/e2e-tests-flink-2.x-jdk11.yml
@@ -53,7 +53,7 @@ jobs:
- name: Build Flink
run: |
- mvn -T 2C -B clean install -DskipTests -Pflink2,spark3 -pl
paimon-e2e-tests -am -Pflink-${{ matrix.flink_version }},java11
+ mvn -T 2C -B -ntp clean install -DskipTests -Pflink2,spark3 -pl
paimon-e2e-tests -am -Pflink-${{ matrix.flink_version }},java11
- name: Test Flink
run: |
@@ -63,9 +63,9 @@ jobs:
echo "JVM timezone is set to $jvm_timezone"
profile="flink-${{ matrix.flink_version }}"
if [ "${{ matrix.flink_version }}" = "${{ matrix.flink_version[-1]
}}" ]; then
- mvn -T 1C -B test -Pflink2,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone -Pjava11
+ mvn -T 1C -B -ntp test -Pflink2,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone -Pjava11
else
- mvn -T 1C -B test -Pflink2,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone -P${profile},java11
+ mvn -T 1C -B -ntp test -Pflink2,spark3 -pl paimon-e2e-tests
-Duser.timezone=$jvm_timezone -P${profile},java11
fi
env:
MAVEN_OPTS: -Xmx4096m
\ No newline at end of file
diff --git a/.github/workflows/paimon-python-checks.yml
b/.github/workflows/paimon-python-checks.yml
index e08997f515..63fed5e58a 100755
--- a/.github/workflows/paimon-python-checks.yml
+++ b/.github/workflows/paimon-python-checks.yml
@@ -120,12 +120,12 @@ jobs:
- name: Build Java
run: |
echo "Start compiling modules"
- mvn -T 2C -B clean install -DskipTests
+ mvn -T 2C -B -ntp clean install -DskipTests
- name: Build Java Paimon Faiss
run: |
echo "Start compiling modules"
- mvn -T 2C -B clean install -DskipTests -Ppaimon-faiss
+ mvn -T 2C -B -ntp clean install -DskipTests -Ppaimon-faiss
- name: Install Python dependencies
shell: bash
diff --git a/.github/workflows/publish-faiss_snapshot.yml
b/.github/workflows/publish-faiss_snapshot.yml
index 3162f737cd..dad7a2fa0e 100644
--- a/.github/workflows/publish-faiss_snapshot.yml
+++ b/.github/workflows/publish-faiss_snapshot.yml
@@ -162,11 +162,11 @@ jobs:
- name: Build and package paimon-faiss-jni
run: |
- mvn -B clean install -pl paimon-faiss/paimon-faiss-jni -am
-DskipTests -Ppaimon-faiss -Drat.skip
+ mvn -B -ntp clean install -pl paimon-faiss/paimon-faiss-jni -am
-DskipTests -Ppaimon-faiss -Drat.skip
- name: Build and package paimon-faiss-index
run: |
- mvn -B clean install -pl paimon-faiss/paimon-faiss-index -am
-DskipTests -Ppaimon-faiss -Drat.skip
+ mvn -B -ntp clean install -pl paimon-faiss/paimon-faiss-index -am
-DskipTests -Ppaimon-faiss -Drat.skip
- name: Publish snapshot
env:
@@ -180,7 +180,7 @@ jobs:
echo "<password>$ASF_PASSWORD</password>" >> $tmp_settings
echo "</server></servers></settings>" >> $tmp_settings
- mvn --settings $tmp_settings deploy -pl
paimon-faiss/paimon-faiss-jni,paimon-faiss/paimon-faiss-index -Dgpg.skip
-Drat.skip -DskipTests -Ppaimon-faiss
+ mvn --settings $tmp_settings -ntp deploy -pl
paimon-faiss/paimon-faiss-jni,paimon-faiss/paimon-faiss-index -Dgpg.skip
-Drat.skip -DskipTests -Ppaimon-faiss
rm $tmp_settings
diff --git a/.github/workflows/publish_snapshot-jdk17.yml
b/.github/workflows/publish_snapshot-jdk17.yml
index 7aaecdf101..eb22a4c0f4 100644
--- a/.github/workflows/publish_snapshot-jdk17.yml
+++ b/.github/workflows/publish_snapshot-jdk17.yml
@@ -63,8 +63,8 @@ jobs:
echo "<password>$ASF_PASSWORD</password>" >> $tmp_settings
echo "</server></servers></settings>" >> $tmp_settings
- mvn --settings $tmp_settings clean install -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark4,flink1,paimon-lucene -pl
org.apache.paimon:paimon-spark-4.0_2.13 -am
+ mvn --settings $tmp_settings -ntp clean install -Dgpg.skip
-Drat.skip -DskipTests -Papache-release,spark4,flink1,paimon-lucene -pl
org.apache.paimon:paimon-spark-4.0_2.13 -am
# skip deploy paimon-spark-common_2.13 since they are already
deployed in publish-snapshot.yml
- mvn --settings $tmp_settings clean deploy -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark4,flink1,paimon-lucene -pl
org.apache.paimon:paimon-spark4-common_2.13,org.apache.paimon:paimon-spark-ut_2.13,org.apache.paimon:paimon-spark-4.0_2.13
+ mvn --settings $tmp_settings -ntp clean deploy -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark4,flink1,paimon-lucene -pl
org.apache.paimon:paimon-spark4-common_2.13,org.apache.paimon:paimon-spark-ut_2.13,org.apache.paimon:paimon-spark-4.0_2.13
rm $tmp_settings
diff --git a/.github/workflows/publish_snapshot.yml
b/.github/workflows/publish_snapshot.yml
index 95b5559400..df3f7e80f9 100644
--- a/.github/workflows/publish_snapshot.yml
+++ b/.github/workflows/publish_snapshot.yml
@@ -65,8 +65,8 @@ jobs:
echo "<password>$ASF_PASSWORD</password>" >> $tmp_settings
echo "</server></servers></settings>" >> $tmp_settings
- mvn --settings $tmp_settings clean deploy -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark3,flink1 -pl
'!paimon-faiss/paimon-faiss-jni,!paimon-faiss/paimon-faiss-index'
+ mvn --settings $tmp_settings -ntp clean deploy -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark3,flink1 -pl
'!paimon-faiss/paimon-faiss-jni,!paimon-faiss/paimon-faiss-index'
# deploy for scala 2.13
- mvn --settings $tmp_settings clean deploy -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark3,scala-2.13,flink1 -pl
org.apache.paimon:paimon-spark-common_2.13,org.apache.paimon:paimon-spark3-common_2.13,org.apache.paimon:paimon-spark-3.2_2.13,org.apache.paimon:paimon-spark-3.3_2.13,org.apache.paimon:paimon-spark-3.4_2.13,org.apache.paimon:paimon-spark-3.5_2.13
+ mvn --settings $tmp_settings -ntp clean deploy -Dgpg.skip -Drat.skip
-DskipTests -Papache-release,spark3,scala-2.13,flink1 -pl
org.apache.paimon:paimon-spark-common_2.13,org.apache.paimon:paimon-spark3-common_2.13,org.apache.paimon:paimon-spark-3.2_2.13,org.apache.paimon:paimon-spark-3.3_2.13,org.apache.paimon:paimon-spark-3.4_2.13,org.apache.paimon:paimon-spark-3.5_2.13
rm $tmp_settings
diff --git a/.github/workflows/utitcase-flink-1.x-common.yml
b/.github/workflows/utitcase-flink-1.x-common.yml
index c41cdbfd70..03a6d1e815 100644
--- a/.github/workflows/utitcase-flink-1.x-common.yml
+++ b/.github/workflows/utitcase-flink-1.x-common.yml
@@ -54,7 +54,7 @@ jobs:
run: |
COMPILE_MODULE="org.apache.paimon:paimon-flink-common"
echo "Start compiling modules: $COMPILE_MODULE"
- mvn -T 2C -B clean install -DskipTests -Pflink1,spark3 -pl
"${COMPILE_MODULE}" -am
+ mvn -T 2C -B -ntp clean install -DskipTests -Pflink1,spark3 -pl
"${COMPILE_MODULE}" -am
- name: Test Flink
run: |
@@ -63,7 +63,7 @@ jobs:
echo "JVM timezone is set to $jvm_timezone"
TEST_MODULE="org.apache.paimon:paimon-flink-common"
echo "Start testing module: $TEST_MODULE"
- mvn -T 2C -B test verify -Pflink1,spark3 -pl "${TEST_MODULE}"
-Duser.timezone=$jvm_timezone
+ mvn -T 2C -B -ntp test verify -Pflink1,spark3 -pl "${TEST_MODULE}"
-Duser.timezone=$jvm_timezone
echo "All modules tested"
env:
MAVEN_OPTS: -Xmx4096m -XX:+UseG1GC -XX:CICompilerCount=2
\ No newline at end of file
diff --git a/.github/workflows/utitcase-flink-1.x-others.yml
b/.github/workflows/utitcase-flink-1.x-others.yml
index e5a4036a73..3c21037c68 100644
--- a/.github/workflows/utitcase-flink-1.x-others.yml
+++ b/.github/workflows/utitcase-flink-1.x-others.yml
@@ -52,7 +52,7 @@ jobs:
- name: Build Flink
run: |
- mvn -T 2C -B clean install -DskipTests -Pflink1,spark3
+ mvn -T 2C -B -ntp clean install -DskipTests -Pflink1,spark3
- name: Test Flink
run: |
@@ -64,6 +64,6 @@ jobs:
test_modules+="org.apache.paimon:paimon-flink-${suffix},"
done
test_modules="${test_modules%,}"
- mvn -T 2C -B test verify -Pflink1,spark3 -pl "${test_modules}"
-Duser.timezone=$jvm_timezone
+ mvn -T 2C -B -ntp test verify -Pflink1,spark3 -pl "${test_modules}"
-Duser.timezone=$jvm_timezone
env:
MAVEN_OPTS: -Xmx4096m -XX:+UseG1GC -XX:CICompilerCount=2
\ No newline at end of file
diff --git a/.github/workflows/utitcase-flink-2.x-jdk11.yml
b/.github/workflows/utitcase-flink-2.x-jdk11.yml
index 47e1321834..4be7d4ae62 100644
--- a/.github/workflows/utitcase-flink-2.x-jdk11.yml
+++ b/.github/workflows/utitcase-flink-2.x-jdk11.yml
@@ -48,7 +48,7 @@ jobs:
- name: Build Flink
run: |
- mvn -T 2C -B clean install -DskipTests -Pflink2,spark3
+ mvn -T 2C -B -ntp clean install -DskipTests -Pflink2,spark3
- name: Test Flink
run: |
@@ -61,6 +61,6 @@ jobs:
test_modules+="org.apache.paimon:paimon-flink-${suffix},"
done
test_modules="${test_modules%,}"
- mvn -T 2C -B test verify -Pflink2,spark3 -pl "${test_modules}"
-Duser.timezone=$jvm_timezone
+ mvn -T 2C -B -ntp test verify -Pflink2,spark3 -pl "${test_modules}"
-Duser.timezone=$jvm_timezone
env:
MAVEN_OPTS: -Xmx4096m
\ No newline at end of file
diff --git a/.github/workflows/utitcase-jdk11.yml
b/.github/workflows/utitcase-jdk11.yml
index 7abffd5958..a5db2019a3 100644
--- a/.github/workflows/utitcase-jdk11.yml
+++ b/.github/workflows/utitcase-jdk11.yml
@@ -44,7 +44,7 @@ jobs:
java-version: ${{ env.JDK_VERSION }}
distribution: 'temurin'
- name: Build
- run: mvn -T 1C -B clean install -DskipTests
-Pflink1,spark3,paimon-lucene
+ run: mvn -T 1C -B -ntp clean install -DskipTests
-Pflink1,spark3,paimon-lucene
- name: Test
run: |
# run tests with random timezone to find out timezone related bugs
@@ -56,6 +56,6 @@ jobs:
test_modules+="!org.apache.paimon:paimon-spark-${suffix}_2.12,"
done
test_modules="${test_modules%,}"
- mvn -T 1C -B clean install -pl "${test_modules}"
-Pflink1,spark3,paimon-lucene -Pskip-paimon-flink-tests
-Duser.timezone=$jvm_timezone
+ mvn -T 1C -B -ntp clean install -pl "${test_modules}"
-Pflink1,spark3,paimon-lucene -Pskip-paimon-flink-tests
-Duser.timezone=$jvm_timezone
env:
MAVEN_OPTS: -Xmx4096m
\ No newline at end of file
diff --git a/.github/workflows/utitcase-spark-3.x.yml
b/.github/workflows/utitcase-spark-3.x.yml
index 840b22548d..3fff587799 100644
--- a/.github/workflows/utitcase-spark-3.x.yml
+++ b/.github/workflows/utitcase-spark-3.x.yml
@@ -54,7 +54,7 @@ jobs:
distribution: 'temurin'
- name: Build Spark 3 with ${{ matrix.scala_version }}
- run: mvn -T 2C -B clean install -DskipTests -Pspark3,flink1,scala-${{
matrix.scala_version }}
+ run: mvn -T 2C -B -ntp clean install -DskipTests
-Pspark3,flink1,scala-${{ matrix.scala_version }}
- name: Test Spark 3 with ${{ matrix.scala_version }}
timeout-minutes: 60
@@ -68,6 +68,6 @@ jobs:
test_modules+="org.apache.paimon:paimon-spark-${suffix}_${{
matrix.scala_version }},"
done
test_modules="${test_modules%,}"
- mvn -T 2C -B verify -pl "${test_modules}"
-Duser.timezone=$jvm_timezone -Pspark3,flink1,scala-${{ matrix.scala_version }}
+ mvn -T 2C -B -ntp verify -pl "${test_modules}"
-Duser.timezone=$jvm_timezone -Pspark3,flink1,scala-${{ matrix.scala_version }}
env:
MAVEN_OPTS: -Xmx4096m
\ No newline at end of file
diff --git a/.github/workflows/utitcase-spark-4.x.yml
b/.github/workflows/utitcase-spark-4.x.yml
index 39161425af..fc031342c4 100644
--- a/.github/workflows/utitcase-spark-4.x.yml
+++ b/.github/workflows/utitcase-spark-4.x.yml
@@ -51,7 +51,7 @@ jobs:
distribution: 'temurin'
- name: Build Spark
- run: mvn -T 2C -B clean install -DskipTests
-Pspark4,flink1,paimon-lucene
+ run: mvn -T 2C -B -ntp clean install -DskipTests
-Pspark4,flink1,paimon-lucene
- name: Test Spark
timeout-minutes: 60
@@ -65,6 +65,6 @@ jobs:
test_modules+="org.apache.paimon:paimon-spark-${suffix}_2.13,"
done
test_modules="${test_modules%,}"
- mvn -T 2C -B verify -pl "${test_modules}"
-Duser.timezone=$jvm_timezone -Pspark4,flink1,paimon-lucene
+ mvn -T 2C -B -ntp verify -pl "${test_modules}"
-Duser.timezone=$jvm_timezone -Pspark4,flink1,paimon-lucene
env:
MAVEN_OPTS: -Xmx4096m
\ No newline at end of file
diff --git a/.github/workflows/utitcase.yml b/.github/workflows/utitcase.yml
index 98a970fa7e..52423a026e 100644
--- a/.github/workflows/utitcase.yml
+++ b/.github/workflows/utitcase.yml
@@ -73,7 +73,7 @@ jobs:
- name: Build Others
run: |
echo "Start compiling modules"
- mvn -T 2C -B clean install -DskipTests -Pflink1,spark3,paimon-faiss
+ mvn -T 2C -B -ntp clean install -DskipTests
-Pflink1,spark3,paimon-faiss
- name: Test Others
timeout-minutes: 60
@@ -87,6 +87,6 @@ jobs:
TEST_MODULES+="!org.apache.paimon:paimon-spark-${suffix}_2.12,"
done
TEST_MODULES="${TEST_MODULES%,}"
- mvn -T 2C -B clean install -pl "${TEST_MODULES}"
-Pskip-paimon-flink-tests -Duser.timezone=$jvm_timezone
+ mvn -T 2C -B -ntp clean install -pl "${TEST_MODULES}"
-Pskip-paimon-flink-tests -Duser.timezone=$jvm_timezone
env:
MAVEN_OPTS: -Xmx4096m -XX:+UseG1GC -XX:CICompilerCount=2
\ No newline at end of file
diff --git a/tools/ci/sonar_check.sh b/tools/ci/sonar_check.sh
index a3b774ef54..d17c937dff 100644
--- a/tools/ci/sonar_check.sh
+++ b/tools/ci/sonar_check.sh
@@ -18,7 +18,7 @@ if [ ! "$SONAR_TOKEN" ]; then
echo "SONAR_TOKEN environment is null, skip check"
exit 0
fi
-mvn --batch-mode verify sonar:sonar \
+mvn --batch-mode -ntp verify sonar:sonar \
-Dmaven.test.skip=true -Dsonar.host.url=https://sonarcloud.io \
-Dsonar.organization=apache \
-Dsonar.projectKey=apache-paimon \
diff --git a/tools/releasing/deploy_staging_jars.sh
b/tools/releasing/deploy_staging_jars.sh
index cddff9ad91..bcfde30624 100755
--- a/tools/releasing/deploy_staging_jars.sh
+++ b/tools/releasing/deploy_staging_jars.sh
@@ -42,6 +42,6 @@ fi
cd ${PROJECT_ROOT}
echo "Deploying to repository.apache.org"
-${MVN} clean deploy -Papache-release,docs-and-source,spark3,flink1 -DskipTests
-DretryFailedDeploymentCount=10 $CUSTOM_OPTIONS
+${MVN} clean deploy -ntp -Papache-release,docs-and-source,spark3,flink1
-DskipTests -DretryFailedDeploymentCount=10 $CUSTOM_OPTIONS
cd ${CURR_DIR}
diff --git a/tools/releasing/deploy_staging_jars_for_jdk11.sh
b/tools/releasing/deploy_staging_jars_for_jdk11.sh
index d5d5e8b1b5..7c28eb7562 100755
--- a/tools/releasing/deploy_staging_jars_for_jdk11.sh
+++ b/tools/releasing/deploy_staging_jars_for_jdk11.sh
@@ -43,11 +43,11 @@ fi
cd ${PROJECT_ROOT}
echo "Building flink2 and iceberg modules"
-${MVN} clean install -Pdocs-and-source,flink2 -DskipTests \
+${MVN} clean install -ntp -Pdocs-and-source,flink2 -DskipTests \
-pl
org.apache.paimon:paimon-flink-2.0,org.apache.paimon:paimon-flink-2.1,org.apache.paimon:paimon-flink-2.2,org.apache.paimon:paimon-iceberg
-am $CUSTOM_OPTIONS
echo "Deploying flink2 and iceberg modules to repository.apache.org"
-${MVN} deploy -Papache-release,docs-and-source,flink2 -DskipTests
-DretryFailedDeploymentCount=10 \
+${MVN} deploy -ntp -Papache-release,docs-and-source,flink2 -DskipTests
-DretryFailedDeploymentCount=10 \
-pl
org.apache.paimon:paimon-flink-2.0,org.apache.paimon:paimon-flink-2.1,org.apache.paimon:paimon-flink-2.2,org.apache.paimon:paimon-flink2-common,org.apache.paimon:paimon-iceberg
$CUSTOM_OPTIONS
cd ${CURR_DIR}
diff --git a/tools/releasing/deploy_staging_jars_for_jdk17.sh
b/tools/releasing/deploy_staging_jars_for_jdk17.sh
index d01f84d76e..c4b2e761ec 100755
--- a/tools/releasing/deploy_staging_jars_for_jdk17.sh
+++ b/tools/releasing/deploy_staging_jars_for_jdk17.sh
@@ -43,10 +43,10 @@ fi
cd ${PROJECT_ROOT}
echo "Building spark4 module"
-${MVN} clean install -Pdocs-and-source,spark4 -DskipTests -pl
paimon-spark/paimon-spark-4.0 -am $CUSTOM_OPTIONS
+${MVN} clean install -ntp -Pdocs-and-source,spark4 -DskipTests -pl
paimon-spark/paimon-spark-4.0 -am $CUSTOM_OPTIONS
echo "Deploying spark4 module to repository.apache.org"
-${MVN} deploy -Papache-release,docs-and-source,spark4 -DskipTests
-DretryFailedDeploymentCount=10 \
+${MVN} deploy -ntp -Papache-release,docs-and-source,spark4 -DskipTests
-DretryFailedDeploymentCount=10 \
-pl
org.apache.paimon:paimon-spark-common_2.13,org.apache.paimon:paimon-spark4-common,org.apache.paimon:paimon-spark-4.0
$CUSTOM_OPTIONS
cd ${CURR_DIR}
diff --git a/tools/releasing/update_branch_version.sh
b/tools/releasing/update_branch_version.sh
index ac42cc771e..d0c72cef6b 100755
--- a/tools/releasing/update_branch_version.sh
+++ b/tools/releasing/update_branch_version.sh
@@ -51,7 +51,7 @@ fi
cd ${PROJECT_ROOT}
# change version in all pom files
-mvn versions:set -DgenerateBackupPoms=false -DnewVersion=${NEW_VERSION}
+mvn versions:set -ntp -DgenerateBackupPoms=false -DnewVersion=${NEW_VERSION}
git commit -am "[release] Update version to ${NEW_VERSION}"