This is an automated email from the ASF dual-hosted git repository.

rmetzger pushed a commit to branch release-1.13
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.13 by this push:
     new ed02b5d  [FLINK-22856][Azure] Upgrade to ubuntu-20.04
ed02b5d is described below

commit ed02b5deca5881ac5572228ddd1cd3e85eadfd32
Author: Robert Metzger <rmetz...@apache.org>
AuthorDate: Wed Jun 2 15:10:22 2021 +0200

    [FLINK-22856][Azure] Upgrade to ubuntu-20.04
---
 azure-pipelines.yml                                |  6 ++---
 flink-end-to-end-tests/run-nightly-tests.sh        |  2 +-
 .../test-scripts/common_docker.sh                  |  8 +++---
 .../test-scripts/common_kubernetes.sh              |  7 +++---
 tools/azure-pipelines/build-apache-repo.yml        | 20 +++++++--------
 tools/azure-pipelines/build-nightly-dist.yml       |  4 +--
 tools/azure-pipelines/build-python-wheels.yml      |  2 +-
 tools/azure-pipelines/jobs-template.yml            | 29 ++++++++++++----------
 8 files changed, 41 insertions(+), 37 deletions(-)

diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index cbe3b11..23d0f49 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -71,16 +71,16 @@ stages:
         parameters: # see template file for a definition of the parameters.
           stage_name: ci_build
           test_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.11"
           run_end_to_end: false
           container: flink-build-container
           jdk: jdk8
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-16.04'
+          vmImage: 'ubuntu-20.04'
         steps:
           - script: ./tools/ci/docs.sh
   # CI / Special stage for release, e.g. building PyFlink wheel packages, etc:
diff --git a/flink-end-to-end-tests/run-nightly-tests.sh 
b/flink-end-to-end-tests/run-nightly-tests.sh
index ef8b0a8..03f0b33 100755
--- a/flink-end-to-end-tests/run-nightly-tests.sh
+++ b/flink-end-to-end-tests/run-nightly-tests.sh
@@ -37,7 +37,7 @@ if [ -z "$FLINK_LOG_DIR" ] ; then
 fi
 
 # On Azure CI, use artifacts dir
-if [ -z "$DEBUG_FILES_OUTPUT_DIR"] ; then
+if [ -z "$DEBUG_FILES_OUTPUT_DIR" ] ; then
     export DEBUG_FILES_OUTPUT_DIR="$FLINK_LOG_DIR"
 fi
 
diff --git a/flink-end-to-end-tests/test-scripts/common_docker.sh 
b/flink-end-to-end-tests/test-scripts/common_docker.sh
index af37db8..3127662 100644
--- a/flink-end-to-end-tests/test-scripts/common_docker.sh
+++ b/flink-end-to-end-tests/test-scripts/common_docker.sh
@@ -58,15 +58,15 @@ function build_image() {
 }
 
 function start_file_server() {
-    command -v python >/dev/null 2>&1
+    command -v python3 >/dev/null 2>&1
     if [[ $? -eq 0 ]]; then
-      python ${TEST_INFRA_DIR}/python2_fileserver.py &
+      python3 ${TEST_INFRA_DIR}/python3_fileserver.py &
       return
     fi
 
-    command -v python3 >/dev/null 2>&1
+    command -v python >/dev/null 2>&1
     if [[ $? -eq 0 ]]; then
-      python3 ${TEST_INFRA_DIR}/python3_fileserver.py &
+      python ${TEST_INFRA_DIR}/python2_fileserver.py &
       return
     fi
 
diff --git a/flink-end-to-end-tests/test-scripts/common_kubernetes.sh 
b/flink-end-to-end-tests/test-scripts/common_kubernetes.sh
index 7a393d9..e4fa575 100755
--- a/flink-end-to-end-tests/test-scripts/common_kubernetes.sh
+++ b/flink-end-to-end-tests/test-scripts/common_kubernetes.sh
@@ -50,6 +50,8 @@ function setup_kubernetes_for_linux {
     fi
     # conntrack is required for minikube 1.9 and later
     sudo apt-get install conntrack
+    # required to resolve HOST_JUJU_LOCK_PERMISSION error of "minikube start 
--vm-driver=none"
+    sudo sysctl fs.protected_regular=0
 }
 
 function check_kubernetes_status {
@@ -76,7 +78,7 @@ function start_kubernetes_if_not_running {
         # here.
         # Similarly, the kubelets are marking themself as "low disk space",
         # causing Flink to avoid this node (again, failing the test)
-        sudo CHANGE_MINIKUBE_NONE_USER=true minikube start --vm-driver=none \
+        CHANGE_MINIKUBE_NONE_USER=true sudo -E minikube start --vm-driver=none 
\
             --extra-config=kubelet.image-gc-high-threshold=99 \
             --extra-config=kubelet.image-gc-low-threshold=98 \
             --extra-config=kubelet.minimum-container-ttl-duration=120m \
@@ -108,7 +110,6 @@ function start_kubernetes {
             exit 1
         fi
     fi
-    eval $(minikube docker-env)
 }
 
 function stop_kubernetes {
@@ -118,7 +119,7 @@ function stop_kubernetes {
         kill $minikube_mount_pid 2> /dev/null
     else
         echo "Stopping minikube ..."
-        stop_command="sudo minikube stop"
+        stop_command="minikube stop"
         if ! retry_times ${MINIKUBE_START_RETRIES} ${MINIKUBE_START_BACKOFF} 
"${stop_command}"; then
             echo "Could not stop minikube. Aborting..."
             exit 1
diff --git a/tools/azure-pipelines/build-apache-repo.yml 
b/tools/azure-pipelines/build-apache-repo.yml
index 1c4797a..bb1a84d 100644
--- a/tools/azure-pipelines/build-apache-repo.yml
+++ b/tools/azure-pipelines/build-apache-repo.yml
@@ -66,14 +66,14 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.11"
           run_end_to_end: false
           container: flink-build-container
           jdk: jdk8
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-16.04'
+          vmImage: 'ubuntu-20.04'
         steps:
           # Skip docs check if this is a pull request that doesn't contain a 
documentation change
           - bash: |
@@ -109,9 +109,9 @@ stages:
         parameters:
           stage_name: cron_azure
           test_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.11"
           run_end_to_end: true
           container: flink-build-container
@@ -122,7 +122,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.4.1 -Pskip-hive-tests"
           run_end_to_end: true
           container: flink-build-container
@@ -133,7 +133,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dinclude_hadoop_aws -Dhadoop.version=3.1.3 
-Phadoop3-tests"
           run_end_to_end: true
           container: flink-build-container
@@ -144,7 +144,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.12 -Phive-1.2.1"
           run_end_to_end: true
           container: flink-build-container
@@ -155,7 +155,7 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.11 -Djdk11"
           run_end_to_end: true
           container: flink-build-container
@@ -166,14 +166,14 @@ stages:
           test_pool_definition:
             name: Default
           e2e_pool_definition:
-            vmImage: 'ubuntu-16.04'
+            vmImage: 'ubuntu-20.04'
           environment: PROFILE="-Dhadoop.version=2.8.3 -Dinclude_hadoop_aws 
-Dscala-2.11 -Penable-adaptive-scheduler"
           run_end_to_end: true
           container: flink-build-container
           jdk: jdk8
       - job: docs_404_check # run on a MSFT provided machine
         pool:
-          vmImage: 'ubuntu-16.04'
+          vmImage: 'ubuntu-20.04'
         steps:
           - task: UseRubyVersion@0
             inputs:
diff --git a/tools/azure-pipelines/build-nightly-dist.yml 
b/tools/azure-pipelines/build-nightly-dist.yml
index 867605c..5e170bc 100644
--- a/tools/azure-pipelines/build-nightly-dist.yml
+++ b/tools/azure-pipelines/build-nightly-dist.yml
@@ -16,7 +16,7 @@
 jobs:
   - job: ${{parameters.stage_name}}_binary
     pool:
-      vmImage: 'ubuntu-16.04'
+      vmImage: 'ubuntu-20.04'
     container: flink-build-container
     workspace:
       clean: all
@@ -63,7 +63,7 @@ jobs:
       #    artifact: nightly-release
   - job: ${{parameters.stage_name}}_maven
     pool:
-      vmImage: 'ubuntu-16.04'
+      vmImage: 'ubuntu-20.04'
     container: flink-build-container
     timeoutInMinutes: 100 # 40 minutes per scala version + 20 buffer
     workspace:
diff --git a/tools/azure-pipelines/build-python-wheels.yml 
b/tools/azure-pipelines/build-python-wheels.yml
index 02d6e3b..f25756b 100644
--- a/tools/azure-pipelines/build-python-wheels.yml
+++ b/tools/azure-pipelines/build-python-wheels.yml
@@ -18,7 +18,7 @@ jobs:
     strategy:
       matrix:
         linux:
-          vm-label: 'ubuntu-16.04'
+          vm-label: 'ubuntu-20.04'
         mac:
           vm-label: 'macOS-10.15'
     pool:
diff --git a/tools/azure-pipelines/jobs-template.yml 
b/tools/azure-pipelines/jobs-template.yml
index d67a34b..360370c 100644
--- a/tools/azure-pipelines/jobs-template.yml
+++ b/tools/azure-pipelines/jobs-template.yml
@@ -60,10 +60,10 @@ jobs:
     condition: not(eq('${{parameters.test_pool_definition.name}}', 'Default'))
     displayName: Cache Maven local repo
   - script: |
-      echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_11_X64"
-      echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_11_X64/bin:$PATH"
-    displayName: "Set to jdk11"
-    condition: eq('${{parameters.jdk}}', 'jdk11')
+      echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64"
+      echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_8_X64/bin:$PATH"
+    displayName: "Set to jdk8"
+    condition: eq('${{parameters.jdk}}', 'jdk8')
   # Compile
   - script: |
       ${{parameters.environment}} ./tools/ci/compile.sh || exit $?
@@ -133,10 +133,10 @@ jobs:
     displayName: Cache Maven local repo
 
   - script: |
-      echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_11_X64"
-      echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_11_X64/bin:$PATH"
-    displayName: "Set to jdk11"
-    condition: eq('${{parameters.jdk}}', 'jdk11')
+      echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64"
+      echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_8_X64/bin:$PATH"
+    displayName: "Set to jdk8"
+    condition: eq('${{parameters.jdk}}', 'jdk8')
 
   - script: sudo sysctl -w kernel.core_pattern=core.%p
     displayName: Set coredump pattern
@@ -210,10 +210,10 @@ jobs:
       continueOnError: true
       condition: not(eq(variables['SKIP'], '1'))
     - script: |
-        echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_11_X64"
-        echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_11_X64/bin:$PATH"
-      displayName: "Set to jdk11"
-      condition: eq('${{parameters.jdk}}', 'jdk11')
+        echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_8_X64"
+        echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_8_X64/bin:$PATH"
+      displayName: "Set to jdk8"
+      condition: eq('${{parameters.jdk}}', 'jdk8')
     - script: |
         echo "Setting up Maven"
         source ./tools/ci/maven-utils.sh
@@ -223,7 +223,10 @@ jobs:
         ./tools/azure-pipelines/free_disk_space.sh
 
         echo "Installing required software"
-        sudo apt-get install -y bc
+        sudo apt-get install -y bc libapr1
+        # install libssl1.0.0 for netty tcnative
+        wget 
http://security.ubuntu.com/ubuntu/pool/main/o/openssl1.0/libssl1.0.0_1.0.2n-1ubuntu5.6_amd64.deb
+        sudo apt install ./libssl1.0.0_1.0.2n-1ubuntu5.6_amd64.deb
       displayName: Prepare E2E run
       condition: not(eq(variables['SKIP'], '1'))
     - script: ${{parameters.environment}} ./tools/ci/compile.sh

Reply via email to