This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new 79870d10794 [FLINK-28165][yarn] Inline 'include_hadoop_aws' profile
79870d10794 is described below

commit 79870d10794bd3be384debcdecd2bf080c274ee8
Author: Chesnay Schepler <ches...@apache.org>
AuthorDate: Fri Nov 11 13:19:49 2022 +0100

    [FLINK-28165][yarn] Inline 'include_hadoop_aws' profile
---
 azure-pipelines.yml                                |   4 +-
 flink-yarn/pom.xml                                 | 116 +++++++++------------
 .../flink/yarn/YarnFileStageTestS3ITCase.java      |  26 -----
 tools/azure-pipelines/build-apache-repo.yml        |  12 +--
 4 files changed, 58 insertions(+), 100 deletions(-)

diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 557f9db7145..a62e5f8d07e 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -77,7 +77,7 @@ stages:
             vmImage: 'ubuntu-20.04'
           e2e_pool_definition:
             vmImage: 'ubuntu-20.04'
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12"
           run_end_to_end: false
           container: flink-build-container
           jdk: 8
@@ -97,5 +97,5 @@ stages:
       - template: tools/azure-pipelines/build-python-wheels.yml
         parameters:
           stage_name: cron_python_wheels
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12"
           container: flink-build-container
diff --git a/flink-yarn/pom.xml b/flink-yarn/pom.xml
index 333aa243beb..1583a851ece 100644
--- a/flink-yarn/pom.xml
+++ b/flink-yarn/pom.xml
@@ -129,73 +129,57 @@ under the License.
                                </exclusion>
                        </exclusions>
                </dependency>
-       </dependencies>
 
-       <profiles>
-               <profile>
-                       <!-- Hadoop >= 2.6 moved the S3 file systems from 
hadoop-common into hadoop-aws artifact
-                               (see 
https://issues.apache.org/jira/browse/HADOOP-11074)
-                               We can add the (test) dependency per default 
once 2.6 is the minimum required version.
-                       -->
-                       <id>include_hadoop_aws</id>
-                       <activation>
-                               <property>
-                                       <name>include_hadoop_aws</name>
-                               </property>
-                       </activation>
-                       <dependencies>
-                               <!-- for the S3 tests of 
YarnFileStageTestS3ITCase -->
-                               <dependency>
-                                       <groupId>org.apache.hadoop</groupId>
-                                       <artifactId>hadoop-aws</artifactId>
-                                       
<version>${flink.hadoop.version}</version>
-                                       <scope>test</scope>
-                                       <exclusions>
-                                               <exclusion>
-                                                       <groupId>log4j</groupId>
-                                                       
<artifactId>log4j</artifactId>
-                                               </exclusion>
-                                               <exclusion>
-                                                       
<groupId>org.slf4j</groupId>
-                                                       
<artifactId>slf4j-log4j12</artifactId>
-                                               </exclusion>
-                                               <exclusion>
-                                                       
<groupId>org.apache.avro</groupId>
-                                                       
<artifactId>avro</artifactId>
-                                               </exclusion>
-                                               <!-- The aws-java-sdk-core 
requires jackson 2.6, but
-                                                       hadoop pulls in 2.3 -->
-                                               <exclusion>
-                                                       
<groupId>com.fasterxml.jackson.core</groupId>
-                                                       
<artifactId>jackson-annotations</artifactId>
-                                               </exclusion>
-                                               <exclusion>
-                                                       
<groupId>com.fasterxml.jackson.core</groupId>
-                                                       
<artifactId>jackson-core</artifactId>
-                                               </exclusion>
-                                               <exclusion>
-                                                       
<groupId>com.fasterxml.jackson.core</groupId>
-                                                       
<artifactId>jackson-databind</artifactId>
-                                               </exclusion>
-                                       </exclusions>
-                               </dependency>
-                               <!-- override Hadoop's default dependency on 
too low SDK versions that do not work
-                                       with our httpcomponents version when 
initialising the s3a file system -->
-                               <dependency>
-                                       <groupId>com.amazonaws</groupId>
-                                       <artifactId>aws-java-sdk-s3</artifactId>
-                                       <version>${aws.sdk.version}</version>
-                                       <scope>test</scope>
-                               </dependency>
-                               <dependency>
-                                       <groupId>com.amazonaws</groupId>
-                                       
<artifactId>aws-java-sdk-sts</artifactId>
-                                       <version>${aws.sdk.version}</version>
-                                       <scope>test</scope>
-                               </dependency>
-                       </dependencies>
-               </profile>
-       </profiles>
+               <!-- for the S3 tests of YarnFileStageTestS3ITCase -->
+               <dependency>
+                       <groupId>org.apache.hadoop</groupId>
+                       <artifactId>hadoop-aws</artifactId>
+                       <version>${flink.hadoop.version}</version>
+                       <scope>test</scope>
+                       <exclusions>
+                               <exclusion>
+                                       <groupId>log4j</groupId>
+                                       <artifactId>log4j</artifactId>
+                               </exclusion>
+                               <exclusion>
+                                       <groupId>org.slf4j</groupId>
+                                       <artifactId>slf4j-log4j12</artifactId>
+                               </exclusion>
+                               <exclusion>
+                                       <groupId>org.apache.avro</groupId>
+                                       <artifactId>avro</artifactId>
+                               </exclusion>
+                               <!-- The aws-java-sdk-core requires jackson 
2.6, but
+                                       hadoop pulls in 2.3 -->
+                               <exclusion>
+                                       
<groupId>com.fasterxml.jackson.core</groupId>
+                                       
<artifactId>jackson-annotations</artifactId>
+                               </exclusion>
+                               <exclusion>
+                                       
<groupId>com.fasterxml.jackson.core</groupId>
+                                       <artifactId>jackson-core</artifactId>
+                               </exclusion>
+                               <exclusion>
+                                       
<groupId>com.fasterxml.jackson.core</groupId>
+                                       
<artifactId>jackson-databind</artifactId>
+                               </exclusion>
+                       </exclusions>
+               </dependency>
+               <!-- override Hadoop's default dependency on too low SDK 
versions that do not work
+                       with our httpcomponents version when initialising the 
s3a file system -->
+               <dependency>
+                       <groupId>com.amazonaws</groupId>
+                       <artifactId>aws-java-sdk-s3</artifactId>
+                       <version>${aws.sdk.version}</version>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.amazonaws</groupId>
+                       <artifactId>aws-java-sdk-sts</artifactId>
+                       <version>${aws.sdk.version}</version>
+                       <scope>test</scope>
+               </dependency>
+       </dependencies>
 
        <build>
                <plugins>
diff --git 
a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java 
b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java
index de576418d15..d323e35aeb4 100644
--- 
a/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java
+++ 
b/flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java
@@ -34,8 +34,6 @@ import org.junit.jupiter.api.BeforeAll;
 import org.junit.jupiter.api.TestTemplate;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.junit.jupiter.api.io.TempDir;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
@@ -46,7 +44,6 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
-import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assumptions.assumeThat;
 import static org.assertj.core.api.Assumptions.assumeThatThrownBy;
 
@@ -59,23 +56,13 @@ import static 
org.assertj.core.api.Assumptions.assumeThatThrownBy;
 @ExtendWith(RetryExtension.class)
 class YarnFileStageTestS3ITCase {
 
-    private static final Logger log = 
LoggerFactory.getLogger(YarnFileStageTestS3ITCase.class);
-
     private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
 
-    /** Number of tests executed. */
-    private static int numRecursiveUploadTests = 0;
-
-    /** Will be updated by {@link #checkCredentialsAndSetup(File)} if the test 
is not skipped. */
-    private static boolean skipTest = true;
-
     @BeforeAll
     static void checkCredentialsAndSetup(@TempDir File tempFolder) throws 
IOException {
         // check whether credentials exist
         S3TestCredentials.assumeCredentialsAvailable();
 
-        skipTest = false;
-
         setupCustomHadoopConfig(tempFolder);
     }
 
@@ -84,18 +71,6 @@ class YarnFileStageTestS3ITCase {
         FileSystem.initialize(new Configuration());
     }
 
-    @AfterAll
-    static void checkAtLeastOneTestRun() {
-        if (!skipTest) {
-            assertThat(numRecursiveUploadTests)
-                    .as(
-                            "No S3 filesystem upload test executed. Please 
activate the "
-                                    + "'include_hadoop_aws' build profile or 
set '-Dinclude_hadoop_aws' during build "
-                                    + "(Hadoop >= 2.6 moved S3 filesystems out 
of hadoop-common).")
-                    .isGreaterThan(0);
-        }
-    }
-
     /**
      * Create a Hadoop config file containing S3 access credentials.
      *
@@ -148,7 +123,6 @@ class YarnFileStageTestS3ITCase {
      */
     private void testRecursiveUploadForYarn(String scheme, String pathSuffix, 
File tempFolder)
             throws Exception {
-        ++numRecursiveUploadTests;
 
         final Path basePath =
                 new Path(S3TestCredentials.getTestBucketUriWithScheme(scheme) 
+ TEST_DATA_DIR);
diff --git a/tools/azure-pipelines/build-apache-repo.yml 
b/tools/azure-pipelines/build-apache-repo.yml
index 5499a2d07b1..f84237a9bc0 100644
--- a/tools/azure-pipelines/build-apache-repo.yml
+++ b/tools/azure-pipelines/build-apache-repo.yml
@@ -70,7 +70,7 @@ stages:
             name: Default
           e2e_pool_definition:
             vmImage: 'ubuntu-20.04'
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12"
           run_end_to_end: false
           container: flink-build-container
           jdk: 8
@@ -114,7 +114,7 @@ stages:
             vmImage: 'ubuntu-20.04'
           e2e_pool_definition:
             vmImage: 'ubuntu-20.04'
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12"
           run_end_to_end: true
           container: flink-build-container
           jdk: 8
@@ -125,7 +125,7 @@ stages:
             name: Default
           e2e_pool_definition:
             vmImage: 'ubuntu-20.04'
-          environment: PROFILE="-Dinclude_hadoop_aws 
-Dflink.hadoop.version=3.1.3 -Phadoop3-tests,hive3"
+          environment: PROFILE="-Dflink.hadoop.version=3.1.3 
-Phadoop3-tests,hive3"
           run_end_to_end: true
           container: flink-build-container
           jdk: 8
@@ -136,7 +136,7 @@ stages:
             name: Default
           e2e_pool_definition:
             vmImage: 'ubuntu-20.04'
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12 -Djdk11 -Pjava11-target"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12 
-Djdk11 -Pjava11-target"
           run_end_to_end: true
           container: flink-build-container
           jdk: 11
@@ -147,7 +147,7 @@ stages:
             name: Default
           e2e_pool_definition:
             vmImage: 'ubuntu-20.04'
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12 -Penable-adaptive-scheduler"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12 
-Penable-adaptive-scheduler"
           run_end_to_end: true
           container: flink-build-container
           jdk: 8
@@ -162,5 +162,5 @@ stages:
       - template: build-python-wheels.yml
         parameters:
           stage_name: cron_python_wheels
-          environment: PROFILE="-Dflink.hadoop.version=2.8.5 
-Dinclude_hadoop_aws -Dscala-2.12"
+          environment: PROFILE="-Dflink.hadoop.version=2.8.5 -Dscala-2.12"
           container: flink-build-container

Reply via email to