This is an automated email from the ASF dual-hosted git repository.

dianfu pushed a commit to branch release-1.12
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.12 by this push:
     new a5ec7a7  [FLINK-23984][python][tests] Temporary disable the PyFlink 
end to end tests due to Python 3.7 was removed from debian
a5ec7a7 is described below

commit a5ec7a7222273368824ad6fa06e9dc51ec3af7c4
Author: Dian Fu <dia...@apache.org>
AuthorDate: Thu Aug 26 14:00:50 2021 +0800

    [FLINK-23984][python][tests] Temporary disable the PyFlink end to end tests 
due to Python 3.7 was removed from debian
---
 flink-end-to-end-tests/run-nightly-tests.sh | 14 +++++++-------
 1 file changed, 7 insertions(+), 7 deletions(-)

diff --git a/flink-end-to-end-tests/run-nightly-tests.sh 
b/flink-end-to-end-tests/run-nightly-tests.sh
index 4a00628..1123c5f 100755
--- a/flink-end-to-end-tests/run-nightly-tests.sh
+++ b/flink-end-to-end-tests/run-nightly-tests.sh
@@ -140,7 +140,7 @@ if [[ ${PROFILE} != *"jdk11"* ]]; then
 
        if [[ `uname -i` != 'aarch64' ]]; then
                # Skip PyFlink e2e test, because MiniConda and Pyarrow which 
Pyflink depends doesn't support aarch64 currently.
-               run_test "Run kubernetes pyflink application test" 
"$END_TO_END_DIR/test-scripts/test_kubernetes_pyflink_application.sh"
+               # run_test "Run kubernetes pyflink application test" 
"$END_TO_END_DIR/test-scripts/test_kubernetes_pyflink_application.sh"
                
                # Hadoop YARN deosn't support aarch64 at this moment. See: 
https://issues.apache.org/jira/browse/HADOOP-16723
                run_test "Running Kerberized YARN per-job on Docker test 
(default input)" "$END_TO_END_DIR/test-scripts/test_yarn_job_kerberos_docker.sh"
@@ -224,13 +224,13 @@ run_test "Dependency shading of table modules test" 
"$END_TO_END_DIR/test-script
 
 run_test "Shaded Hadoop S3A with credentials provider end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_batch_wordcount.sh hadoop_with_provider"
 
-if [[ `uname -i` != 'aarch64' ]]; then
-    run_test "PyFlink end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_pyflink.sh" "skip_check_exceptions"
-fi
+#if [[ `uname -i` != 'aarch64' ]]; then
+#    run_test "PyFlink end-to-end test" 
"$END_TO_END_DIR/test-scripts/test_pyflink.sh" "skip_check_exceptions"
+#fi
 # These tests are known to fail on JDK11. See FLINK-13719
-if [[ ${PROFILE} != *"jdk11"* ]] && [[ `uname -i` != 'aarch64' ]]; then
-    run_test "PyFlink YARN per-job on Docker test" 
"$END_TO_END_DIR/test-scripts/test_pyflink_yarn.sh" "skip_check_exceptions"
-fi
+#if [[ ${PROFILE} != *"jdk11"* ]] && [[ `uname -i` != 'aarch64' ]]; then
+#    run_test "PyFlink YARN per-job on Docker test" 
"$END_TO_END_DIR/test-scripts/test_pyflink_yarn.sh" "skip_check_exceptions"
+#fi
 
 
################################################################################
 # Sticky Scheduling

Reply via email to