This is an automated email from the ASF dual-hosted git repository.

sunchao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 816ebaca4a8 [SPARK-42452][BUILD] Remove `hadoop-2` profile from Apache 
Spark 3.5.0
816ebaca4a8 is described below

commit 816ebaca4a81e0a4369b1ffff43a76e23f5e4271
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Tue Apr 18 08:58:05 2023 -0700

    [SPARK-42452][BUILD] Remove `hadoop-2` profile from Apache Spark 3.5.0
    
    ### What changes were proposed in this pull request?
    This pr aims to remove `hadoop-2` profile from Apache Spark 3.5.0.
    
    ### Why are the changes needed?
    Spark 3.4.0 no longer releases Hadoop2 binary distribtuion(SPARK-42447) and 
Hadoop 2 GitHub Action job already removed after SPARK-42447, we can remove 
`hadoop-2` profile from Apache Spark 3.5.0.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    Closes #40788 from LuciferYang/SPARK-42452.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: Chao Sun <sunc...@apple.com>
---
 assembly/README                                    |   2 +-
 dev/deps/spark-deps-hadoop-2-hive-2.3              | 273 ---------------------
 dev/run-tests-jenkins.py                           |   3 -
 dev/run-tests.py                                   |   1 -
 dev/test-dependencies.sh                           |   3 -
 docs/building-spark.md                             |   4 -
 hadoop-cloud/pom.xml                               |   7 -
 pom.xml                                            |  19 --
 python/pyspark/install.py                          |   2 +-
 python/pyspark/tests/test_install_spark.py         |  26 --
 .../kubernetes/integration-tests/README.md         |   6 +-
 .../kubernetes/integration-tests/pom.xml           |  11 -
 resource-managers/yarn/pom.xml                     |  36 ---
 sql/core/pom.xml                                   |  18 --
 14 files changed, 3 insertions(+), 408 deletions(-)

diff --git a/assembly/README b/assembly/README
index 1fd6d885834..a380d8cb330 100644
--- a/assembly/README
+++ b/assembly/README
@@ -9,4 +9,4 @@ This module is off by default. To activate it specify the 
profile in the command
 
 If you need to build an assembly for a different version of Hadoop the
 hadoop-version system property needs to be set as in this example:
-  -Dhadoop.version=2.7.4
+  -Dhadoop.version=3.3.5
diff --git a/dev/deps/spark-deps-hadoop-2-hive-2.3 
b/dev/deps/spark-deps-hadoop-2-hive-2.3
deleted file mode 100644
index 5fa2ddfd367..00000000000
--- a/dev/deps/spark-deps-hadoop-2-hive-2.3
+++ /dev/null
@@ -1,273 +0,0 @@
-HikariCP/2.5.1//HikariCP-2.5.1.jar
-JLargeArrays/1.5//JLargeArrays-1.5.jar
-JTransforms/3.1//JTransforms-3.1.jar
-RoaringBitmap/0.9.39//RoaringBitmap-0.9.39.jar
-ST4/4.0.4//ST4-4.0.4.jar
-activation/1.1.1//activation-1.1.1.jar
-aircompressor/0.21//aircompressor-0.21.jar
-algebra_2.12/2.0.1//algebra_2.12-2.0.1.jar
-annotations/17.0.0//annotations-17.0.0.jar
-antlr-runtime/3.5.2//antlr-runtime-3.5.2.jar
-antlr4-runtime/4.9.3//antlr4-runtime-4.9.3.jar
-aopalliance-repackaged/2.6.1//aopalliance-repackaged-2.6.1.jar
-aopalliance/1.0//aopalliance-1.0.jar
-apacheds-i18n/2.0.0-M15//apacheds-i18n-2.0.0-M15.jar
-apacheds-kerberos-codec/2.0.0-M15//apacheds-kerberos-codec-2.0.0-M15.jar
-api-asn1-api/1.0.0-M20//api-asn1-api-1.0.0-M20.jar
-api-util/1.0.0-M20//api-util-1.0.0-M20.jar
-arpack/3.0.3//arpack-3.0.3.jar
-arpack_combined_all/0.1//arpack_combined_all-0.1.jar
-arrow-format/11.0.0//arrow-format-11.0.0.jar
-arrow-memory-core/11.0.0//arrow-memory-core-11.0.0.jar
-arrow-memory-netty/11.0.0//arrow-memory-netty-11.0.0.jar
-arrow-vector/11.0.0//arrow-vector-11.0.0.jar
-audience-annotations/0.5.0//audience-annotations-0.5.0.jar
-avro-ipc/1.11.1//avro-ipc-1.11.1.jar
-avro-mapred/1.11.1//avro-mapred-1.11.1.jar
-avro/1.11.1//avro-1.11.1.jar
-azure-storage/2.0.0//azure-storage-2.0.0.jar
-blas/3.0.3//blas-3.0.3.jar
-bonecp/0.8.0.RELEASE//bonecp-0.8.0.RELEASE.jar
-breeze-macros_2.12/2.1.0//breeze-macros_2.12-2.1.0.jar
-breeze_2.12/2.1.0//breeze_2.12-2.1.0.jar
-cats-kernel_2.12/2.1.1//cats-kernel_2.12-2.1.1.jar
-chill-java/0.10.0//chill-java-0.10.0.jar
-chill_2.12/0.10.0//chill_2.12-0.10.0.jar
-commons-beanutils/1.9.4//commons-beanutils-1.9.4.jar
-commons-cli/1.5.0//commons-cli-1.5.0.jar
-commons-codec/1.15//commons-codec-1.15.jar
-commons-collections/3.2.2//commons-collections-3.2.2.jar
-commons-collections4/4.4//commons-collections4-4.4.jar
-commons-compiler/3.1.9//commons-compiler-3.1.9.jar
-commons-compress/1.22//commons-compress-1.22.jar
-commons-configuration/1.6//commons-configuration-1.6.jar
-commons-crypto/1.1.0//commons-crypto-1.1.0.jar
-commons-dbcp/1.4//commons-dbcp-1.4.jar
-commons-digester/1.8//commons-digester-1.8.jar
-commons-httpclient/3.1//commons-httpclient-3.1.jar
-commons-io/2.4//commons-io-2.4.jar
-commons-lang/2.6//commons-lang-2.6.jar
-commons-lang3/3.12.0//commons-lang3-3.12.0.jar
-commons-logging/1.1.3//commons-logging-1.1.3.jar
-commons-math3/3.6.1//commons-math3-3.6.1.jar
-commons-net/3.1//commons-net-3.1.jar
-commons-pool/1.5.4//commons-pool-1.5.4.jar
-commons-text/1.10.0//commons-text-1.10.0.jar
-compress-lzf/1.1.2//compress-lzf-1.1.2.jar
-curator-client/2.7.1//curator-client-2.7.1.jar
-curator-framework/2.7.1//curator-framework-2.7.1.jar
-curator-recipes/2.7.1//curator-recipes-2.7.1.jar
-datanucleus-api-jdo/4.2.4//datanucleus-api-jdo-4.2.4.jar
-datanucleus-core/4.1.17//datanucleus-core-4.1.17.jar
-datanucleus-rdbms/4.1.19//datanucleus-rdbms-4.1.19.jar
-derby/10.14.2.0//derby-10.14.2.0.jar
-dropwizard-metrics-hadoop-metrics2-reporter/0.1.2//dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar
-flatbuffers-java/1.12.0//flatbuffers-java-1.12.0.jar
-gcs-connector/hadoop2-2.2.11/shaded/gcs-connector-hadoop2-2.2.11-shaded.jar
-gmetric4j/1.0.10//gmetric4j-1.0.10.jar
-gson/2.2.4//gson-2.2.4.jar
-guava/14.0.1//guava-14.0.1.jar
-guice-servlet/3.0//guice-servlet-3.0.jar
-guice/3.0//guice-3.0.jar
-hadoop-annotations/2.7.4//hadoop-annotations-2.7.4.jar
-hadoop-auth/2.7.4//hadoop-auth-2.7.4.jar
-hadoop-aws/2.7.4//hadoop-aws-2.7.4.jar
-hadoop-azure/2.7.4//hadoop-azure-2.7.4.jar
-hadoop-client/2.7.4//hadoop-client-2.7.4.jar
-hadoop-common/2.7.4//hadoop-common-2.7.4.jar
-hadoop-hdfs/2.7.4//hadoop-hdfs-2.7.4.jar
-hadoop-mapreduce-client-app/2.7.4//hadoop-mapreduce-client-app-2.7.4.jar
-hadoop-mapreduce-client-common/2.7.4//hadoop-mapreduce-client-common-2.7.4.jar
-hadoop-mapreduce-client-core/2.7.4//hadoop-mapreduce-client-core-2.7.4.jar
-hadoop-mapreduce-client-jobclient/2.7.4//hadoop-mapreduce-client-jobclient-2.7.4.jar
-hadoop-mapreduce-client-shuffle/2.7.4//hadoop-mapreduce-client-shuffle-2.7.4.jar
-hadoop-openstack/2.7.4//hadoop-openstack-2.7.4.jar
-hadoop-yarn-api/2.7.4//hadoop-yarn-api-2.7.4.jar
-hadoop-yarn-client/2.7.4//hadoop-yarn-client-2.7.4.jar
-hadoop-yarn-common/2.7.4//hadoop-yarn-common-2.7.4.jar
-hadoop-yarn-server-common/2.7.4//hadoop-yarn-server-common-2.7.4.jar
-hadoop-yarn-server-web-proxy/2.7.4//hadoop-yarn-server-web-proxy-2.7.4.jar
-hive-beeline/2.3.9//hive-beeline-2.3.9.jar
-hive-cli/2.3.9//hive-cli-2.3.9.jar
-hive-common/2.3.9//hive-common-2.3.9.jar
-hive-exec/2.3.9/core/hive-exec-2.3.9-core.jar
-hive-jdbc/2.3.9//hive-jdbc-2.3.9.jar
-hive-llap-common/2.3.9//hive-llap-common-2.3.9.jar
-hive-metastore/2.3.9//hive-metastore-2.3.9.jar
-hive-serde/2.3.9//hive-serde-2.3.9.jar
-hive-service-rpc/3.1.3//hive-service-rpc-3.1.3.jar
-hive-shims-0.23/2.3.9//hive-shims-0.23-2.3.9.jar
-hive-shims-common/2.3.9//hive-shims-common-2.3.9.jar
-hive-shims-scheduler/2.3.9//hive-shims-scheduler-2.3.9.jar
-hive-shims/2.3.9//hive-shims-2.3.9.jar
-hive-storage-api/2.8.1//hive-storage-api-2.8.1.jar
-hk2-api/2.6.1//hk2-api-2.6.1.jar
-hk2-locator/2.6.1//hk2-locator-2.6.1.jar
-hk2-utils/2.6.1//hk2-utils-2.6.1.jar
-htrace-core/3.1.0-incubating//htrace-core-3.1.0-incubating.jar
-httpclient/4.5.14//httpclient-4.5.14.jar
-httpcore/4.4.16//httpcore-4.4.16.jar
-istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
-ivy/2.5.1//ivy-2.5.1.jar
-jackson-annotations/2.14.2//jackson-annotations-2.14.2.jar
-jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
-jackson-core/2.14.2//jackson-core-2.14.2.jar
-jackson-databind/2.14.2//jackson-databind-2.14.2.jar
-jackson-dataformat-cbor/2.14.2//jackson-dataformat-cbor-2.14.2.jar
-jackson-dataformat-yaml/2.14.2//jackson-dataformat-yaml-2.14.2.jar
-jackson-datatype-jsr310/2.14.2//jackson-datatype-jsr310-2.14.2.jar
-jackson-jaxrs/1.9.13//jackson-jaxrs-1.9.13.jar
-jackson-mapper-asl/1.9.13//jackson-mapper-asl-1.9.13.jar
-jackson-module-scala_2.12/2.14.2//jackson-module-scala_2.12-2.14.2.jar
-jackson-xc/1.9.13//jackson-xc-1.9.13.jar
-jakarta.annotation-api/1.3.5//jakarta.annotation-api-1.3.5.jar
-jakarta.inject/2.6.1//jakarta.inject-2.6.1.jar
-jakarta.servlet-api/4.0.3//jakarta.servlet-api-4.0.3.jar
-jakarta.validation-api/2.0.2//jakarta.validation-api-2.0.2.jar
-jakarta.ws.rs-api/2.1.6//jakarta.ws.rs-api-2.1.6.jar
-jakarta.xml.bind-api/2.3.2//jakarta.xml.bind-api-2.3.2.jar
-janino/3.1.9//janino-3.1.9.jar
-javassist/3.25.0-GA//javassist-3.25.0-GA.jar
-javax.inject/1//javax.inject-1.jar
-javax.jdo/3.2.0-m3//javax.jdo-3.2.0-m3.jar
-javolution/5.5.1//javolution-5.5.1.jar
-jaxb-api/2.2.11//jaxb-api-2.2.11.jar
-jaxb-runtime/2.3.2//jaxb-runtime-2.3.2.jar
-jcl-over-slf4j/2.0.7//jcl-over-slf4j-2.0.7.jar
-jdo-api/3.0.1//jdo-api-3.0.1.jar
-jersey-client/2.36//jersey-client-2.36.jar
-jersey-common/2.36//jersey-common-2.36.jar
-jersey-container-servlet-core/2.36//jersey-container-servlet-core-2.36.jar
-jersey-container-servlet/2.36//jersey-container-servlet-2.36.jar
-jersey-hk2/2.36//jersey-hk2-2.36.jar
-jersey-server/2.36//jersey-server-2.36.jar
-jetty-sslengine/6.1.26//jetty-sslengine-6.1.26.jar
-jetty-util/6.1.26//jetty-util-6.1.26.jar
-jetty-util/9.4.51.v20230217//jetty-util-9.4.51.v20230217.jar
-jetty/6.1.26//jetty-6.1.26.jar
-jline/2.14.6//jline-2.14.6.jar
-joda-time/2.12.2//joda-time-2.12.2.jar
-jodd-core/3.5.2//jodd-core-3.5.2.jar
-jpam/1.1//jpam-1.1.jar
-json/1.8//json-1.8.jar
-json4s-ast_2.12/3.7.0-M11//json4s-ast_2.12-3.7.0-M11.jar
-json4s-core_2.12/3.7.0-M11//json4s-core_2.12-3.7.0-M11.jar
-json4s-jackson_2.12/3.7.0-M11//json4s-jackson_2.12-3.7.0-M11.jar
-json4s-scalap_2.12/3.7.0-M11//json4s-scalap_2.12-3.7.0-M11.jar
-jsp-api/2.1//jsp-api-2.1.jar
-jsr305/3.0.0//jsr305-3.0.0.jar
-jta/1.1//jta-1.1.jar
-jul-to-slf4j/2.0.7//jul-to-slf4j-2.0.7.jar
-kryo-shaded/4.0.2//kryo-shaded-4.0.2.jar
-kubernetes-client-api/6.5.1//kubernetes-client-api-6.5.1.jar
-kubernetes-client/6.5.1//kubernetes-client-6.5.1.jar
-kubernetes-httpclient-okhttp/6.5.1//kubernetes-httpclient-okhttp-6.5.1.jar
-kubernetes-model-admissionregistration/6.5.1//kubernetes-model-admissionregistration-6.5.1.jar
-kubernetes-model-apiextensions/6.5.1//kubernetes-model-apiextensions-6.5.1.jar
-kubernetes-model-apps/6.5.1//kubernetes-model-apps-6.5.1.jar
-kubernetes-model-autoscaling/6.5.1//kubernetes-model-autoscaling-6.5.1.jar
-kubernetes-model-batch/6.5.1//kubernetes-model-batch-6.5.1.jar
-kubernetes-model-certificates/6.5.1//kubernetes-model-certificates-6.5.1.jar
-kubernetes-model-common/6.5.1//kubernetes-model-common-6.5.1.jar
-kubernetes-model-coordination/6.5.1//kubernetes-model-coordination-6.5.1.jar
-kubernetes-model-core/6.5.1//kubernetes-model-core-6.5.1.jar
-kubernetes-model-discovery/6.5.1//kubernetes-model-discovery-6.5.1.jar
-kubernetes-model-events/6.5.1//kubernetes-model-events-6.5.1.jar
-kubernetes-model-extensions/6.5.1//kubernetes-model-extensions-6.5.1.jar
-kubernetes-model-flowcontrol/6.5.1//kubernetes-model-flowcontrol-6.5.1.jar
-kubernetes-model-gatewayapi/6.5.1//kubernetes-model-gatewayapi-6.5.1.jar
-kubernetes-model-metrics/6.5.1//kubernetes-model-metrics-6.5.1.jar
-kubernetes-model-networking/6.5.1//kubernetes-model-networking-6.5.1.jar
-kubernetes-model-node/6.5.1//kubernetes-model-node-6.5.1.jar
-kubernetes-model-policy/6.5.1//kubernetes-model-policy-6.5.1.jar
-kubernetes-model-rbac/6.5.1//kubernetes-model-rbac-6.5.1.jar
-kubernetes-model-resource/6.5.1//kubernetes-model-resource-6.5.1.jar
-kubernetes-model-scheduling/6.5.1//kubernetes-model-scheduling-6.5.1.jar
-kubernetes-model-storageclass/6.5.1//kubernetes-model-storageclass-6.5.1.jar
-lapack/3.0.3//lapack-3.0.3.jar
-leveldbjni-all/1.8//leveldbjni-all-1.8.jar
-libfb303/0.9.3//libfb303-0.9.3.jar
-libthrift/0.12.0//libthrift-0.12.0.jar
-log4j-1.2-api/2.20.0//log4j-1.2-api-2.20.0.jar
-log4j-api/2.20.0//log4j-api-2.20.0.jar
-log4j-core/2.20.0//log4j-core-2.20.0.jar
-log4j-slf4j2-impl/2.20.0//log4j-slf4j2-impl-2.20.0.jar
-logging-interceptor/3.12.12//logging-interceptor-3.12.12.jar
-lz4-java/1.8.0//lz4-java-1.8.0.jar
-mesos/1.4.3/shaded-protobuf/mesos-1.4.3-shaded-protobuf.jar
-metrics-core/4.2.17//metrics-core-4.2.17.jar
-metrics-graphite/4.2.17//metrics-graphite-4.2.17.jar
-metrics-jmx/4.2.17//metrics-jmx-4.2.17.jar
-metrics-json/4.2.17//metrics-json-4.2.17.jar
-metrics-jvm/4.2.17//metrics-jvm-4.2.17.jar
-minlog/1.3.0//minlog-1.3.0.jar
-netty-all/4.1.89.Final//netty-all-4.1.89.Final.jar
-netty-buffer/4.1.89.Final//netty-buffer-4.1.89.Final.jar
-netty-codec-http/4.1.89.Final//netty-codec-http-4.1.89.Final.jar
-netty-codec-http2/4.1.89.Final//netty-codec-http2-4.1.89.Final.jar
-netty-codec-socks/4.1.89.Final//netty-codec-socks-4.1.89.Final.jar
-netty-codec/4.1.89.Final//netty-codec-4.1.89.Final.jar
-netty-common/4.1.89.Final//netty-common-4.1.89.Final.jar
-netty-handler-proxy/4.1.89.Final//netty-handler-proxy-4.1.89.Final.jar
-netty-handler/4.1.89.Final//netty-handler-4.1.89.Final.jar
-netty-resolver/4.1.89.Final//netty-resolver-4.1.89.Final.jar
-netty-transport-classes-epoll/4.1.89.Final//netty-transport-classes-epoll-4.1.89.Final.jar
-netty-transport-classes-kqueue/4.1.89.Final//netty-transport-classes-kqueue-4.1.89.Final.jar
-netty-transport-native-epoll/4.1.89.Final/linux-aarch_64/netty-transport-native-epoll-4.1.89.Final-linux-aarch_64.jar
-netty-transport-native-epoll/4.1.89.Final/linux-x86_64/netty-transport-native-epoll-4.1.89.Final-linux-x86_64.jar
-netty-transport-native-kqueue/4.1.89.Final/osx-aarch_64/netty-transport-native-kqueue-4.1.89.Final-osx-aarch_64.jar
-netty-transport-native-kqueue/4.1.89.Final/osx-x86_64/netty-transport-native-kqueue-4.1.89.Final-osx-x86_64.jar
-netty-transport-native-unix-common/4.1.89.Final//netty-transport-native-unix-common-4.1.89.Final.jar
-netty-transport/4.1.89.Final//netty-transport-4.1.89.Final.jar
-objenesis/3.2//objenesis-3.2.jar
-okhttp/3.12.12//okhttp-3.12.12.jar
-okio/1.15.0//okio-1.15.0.jar
-opencsv/2.3//opencsv-2.3.jar
-orc-core/1.8.3/shaded-protobuf/orc-core-1.8.3-shaded-protobuf.jar
-orc-mapreduce/1.8.3/shaded-protobuf/orc-mapreduce-1.8.3-shaded-protobuf.jar
-orc-shims/1.8.3//orc-shims-1.8.3.jar
-oro/2.0.8//oro-2.0.8.jar
-osgi-resource-locator/1.0.3//osgi-resource-locator-1.0.3.jar
-paranamer/2.8//paranamer-2.8.jar
-parquet-column/1.13.0//parquet-column-1.13.0.jar
-parquet-common/1.13.0//parquet-common-1.13.0.jar
-parquet-encoding/1.13.0//parquet-encoding-1.13.0.jar
-parquet-format-structures/1.13.0//parquet-format-structures-1.13.0.jar
-parquet-hadoop/1.13.0//parquet-hadoop-1.13.0.jar
-parquet-jackson/1.13.0//parquet-jackson-1.13.0.jar
-pickle/1.3//pickle-1.3.jar
-protobuf-java/2.5.0//protobuf-java-2.5.0.jar
-py4j/0.10.9.7//py4j-0.10.9.7.jar
-remotetea-oncrpc/1.1.2//remotetea-oncrpc-1.1.2.jar
-rocksdbjni/8.0.0//rocksdbjni-8.0.0.jar
-scala-collection-compat_2.12/2.7.0//scala-collection-compat_2.12-2.7.0.jar
-scala-compiler/2.12.17//scala-compiler-2.12.17.jar
-scala-library/2.12.17//scala-library-2.12.17.jar
-scala-parser-combinators_2.12/2.2.0//scala-parser-combinators_2.12-2.2.0.jar
-scala-reflect/2.12.17//scala-reflect-2.12.17.jar
-scala-xml_2.12/2.1.0//scala-xml_2.12-2.1.0.jar
-shims/0.9.39//shims-0.9.39.jar
-slf4j-api/2.0.7//slf4j-api-2.0.7.jar
-snakeyaml-engine/2.6//snakeyaml-engine-2.6.jar
-snakeyaml/1.33//snakeyaml-1.33.jar
-snappy-java/1.1.9.1//snappy-java-1.1.9.1.jar
-spire-macros_2.12/0.17.0//spire-macros_2.12-0.17.0.jar
-spire-platform_2.12/0.17.0//spire-platform_2.12-0.17.0.jar
-spire-util_2.12/0.17.0//spire-util_2.12-0.17.0.jar
-spire_2.12/0.17.0//spire_2.12-0.17.0.jar
-stax-api/1.0.1//stax-api-1.0.1.jar
-stream/2.9.6//stream-2.9.6.jar
-super-csv/2.2.0//super-csv-2.2.0.jar
-threeten-extra/1.7.1//threeten-extra-1.7.1.jar
-tink/1.7.0//tink-1.7.0.jar
-transaction-api/1.1//transaction-api-1.1.jar
-univocity-parsers/2.9.1//univocity-parsers-2.9.1.jar
-xbean-asm9-shaded/4.22//xbean-asm9-shaded-4.22.jar
-xercesImpl/2.12.2//xercesImpl-2.12.2.jar
-xml-apis/1.4.01//xml-apis-1.4.01.jar
-xmlenc/0.52//xmlenc-0.52.jar
-xz/1.9//xz-1.9.jar
-zjsonpatch/0.3.0//zjsonpatch-0.3.0.jar
-zookeeper-jute/3.6.3//zookeeper-jute-3.6.3.jar
-zookeeper/3.6.3//zookeeper-3.6.3.jar
-zstd-jni/1.5.5-1//zstd-jni-1.5.5-1.jar
diff --git a/dev/run-tests-jenkins.py b/dev/run-tests-jenkins.py
index 548bc2ee32c..aa82b28e382 100755
--- a/dev/run-tests-jenkins.py
+++ b/dev/run-tests-jenkins.py
@@ -178,9 +178,6 @@ def main():
     # Switch to a Maven-based build if the PR title contains "test-maven":
     if "test-maven" in ghprb_pull_title:
         os.environ["SPARK_JENKINS_BUILD_TOOL"] = "maven"
-    # Switch the Hadoop profile based on the PR title:
-    if "test-hadoop2" in ghprb_pull_title:
-        os.environ["SPARK_JENKINS_BUILD_PROFILE"] = "hadoop2"
     if "test-hadoop3" in ghprb_pull_title:
         os.environ["SPARK_JENKINS_BUILD_PROFILE"] = "hadoop3"
     # Switch the Scala profile based on the PR title:
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 92768c96905..c0c281b549e 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -217,7 +217,6 @@ def get_hadoop_profiles(hadoop_version):
     """
 
     sbt_maven_hadoop_profiles = {
-        "hadoop2": ["-Phadoop-2"],
         "hadoop3": ["-Phadoop-3"],
     }
 
diff --git a/dev/test-dependencies.sh b/dev/test-dependencies.sh
index 69575834ee1..d7967ac3afa 100755
--- a/dev/test-dependencies.sh
+++ b/dev/test-dependencies.sh
@@ -34,7 +34,6 @@ HADOOP_MODULE_PROFILES="-Phive-thriftserver -Pmesos 
-Pkubernetes -Pyarn -Phive \
     -Pspark-ganglia-lgpl -Pkinesis-asl -Phadoop-cloud"
 MVN="build/mvn"
 HADOOP_HIVE_PROFILES=(
-    hadoop-2-hive-2.3
     hadoop-3-hive-2.3
 )
 
@@ -85,8 +84,6 @@ $MVN -q versions:set -DnewVersion=$TEMP_VERSION 
-DgenerateBackupPoms=false > /de
 for HADOOP_HIVE_PROFILE in "${HADOOP_HIVE_PROFILES[@]}"; do
   if [[ $HADOOP_HIVE_PROFILE == **hadoop-3-hive-2.3** ]]; then
     HADOOP_PROFILE=hadoop-3
-  else
-    HADOOP_PROFILE=hadoop-2
   fi
   echo "Performing Maven install for $HADOOP_HIVE_PROFILE"
   $MVN $HADOOP_MODULE_PROFILES -P$HADOOP_PROFILE jar:jar jar:test-jar 
install:install clean -q
diff --git a/docs/building-spark.md b/docs/building-spark.md
index de92f7cf594..ba8dddbf6b1 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -79,10 +79,6 @@ Example:
 
     ./build/mvn -Pyarn -Dhadoop.version=3.3.0 -DskipTests clean package
 
-If you want to build with Hadoop 2.x, enable `hadoop-2` profile:
-
-    ./build/mvn -Phadoop-2 -Pyarn -Dhadoop.version=2.8.5 -DskipTests clean 
package
-
 ## Building With Hive and JDBC Support
 
 To enable Hive integration for Spark SQL along with its JDBC server and CLI,
diff --git a/hadoop-cloud/pom.xml b/hadoop-cloud/pom.xml
index 5a4969cb6b2..fece6974dbc 100644
--- a/hadoop-cloud/pom.xml
+++ b/hadoop-cloud/pom.xml
@@ -208,13 +208,6 @@
   </dependencies>
 
   <profiles>
-    <!--
-     hadoop-3 profile is activated by default so hadoop-2 profile
-     also needs to be declared here for building with -Phadoop-2.
-    -->
-    <profile>
-      <id>hadoop-2</id>
-    </profile>
     <!--
      Hadoop 3 simplifies the classpath, and adds a new committer base class 
which
      enables store-specific committers.
diff --git a/pom.xml b/pom.xml
index 9811742b866..9a4a7f0d3d6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3504,25 +3504,6 @@
     
http://hadoop.apache.org/docs/ra.b.c/hadoop-project-dist/hadoop-common/dependency-analysis.html
     -->
 
-    <profile>
-      <id>hadoop-2</id>
-      <properties>
-        <!-- make sure to update IsolatedClientLoader whenever this version is 
changed -->
-        <hadoop.version>2.7.4</hadoop.version>
-        <curator.version>2.7.1</curator.version>
-        <commons-io.version>2.4</commons-io.version>
-        <!--
-          the declaration site above of these variables explains why we need 
to re-assign them here
-        -->
-        <hadoop-client-api.artifact>hadoop-client</hadoop-client-api.artifact>
-        
<hadoop-client-runtime.artifact>hadoop-yarn-api</hadoop-client-runtime.artifact>
-        
<hadoop-client-minicluster.artifact>hadoop-client</hadoop-client-minicluster.artifact>
-        <gcs-connector.version>hadoop2-2.2.11</gcs-connector.version>
-        <!-- SPARK-36547: Please don't upgrade the version below, otherwise 
there will be an error on building Hadoop 2.7 package -->
-        <scala-maven-plugin.version>4.3.0</scala-maven-plugin.version>
-      </properties>
-    </profile>
-
     <profile>
       <id>hadoop-3</id>
       <!-- Default hadoop profile. Uses global properties. -->
diff --git a/python/pyspark/install.py b/python/pyspark/install.py
index f6230cd5ad8..d71177b5a8f 100644
--- a/python/pyspark/install.py
+++ b/python/pyspark/install.py
@@ -26,7 +26,7 @@ from shutil import rmtree
 
 DEFAULT_HADOOP = "hadoop3"
 DEFAULT_HIVE = "hive2.3"
-SUPPORTED_HADOOP_VERSIONS = ["hadoop2", "hadoop3", "without-hadoop"]
+SUPPORTED_HADOOP_VERSIONS = ["hadoop3", "without-hadoop"]
 SUPPORTED_HIVE_VERSIONS = ["hive2.3"]
 UNSUPPORTED_COMBINATIONS = []  # type: ignore
 
diff --git a/python/pyspark/tests/test_install_spark.py 
b/python/pyspark/tests/test_install_spark.py
index 6f39a09ae18..e980a17673f 100644
--- a/python/pyspark/tests/test_install_spark.py
+++ b/python/pyspark/tests/test_install_spark.py
@@ -51,18 +51,6 @@ class SparkInstallationTestCase(unittest.TestCase):
             "spark-3.0.0-bin-hadoop3.2", checked_package_name("spark-3.0.0", 
"hadoop3.2", "hive2.3")
         )
 
-        spark_version, hadoop_version, hive_version = 
checked_versions("3.2.0", "2", "2.3")
-        self.assertEqual(
-            "spark-3.2.0-bin-hadoop2.7",
-            checked_package_name(spark_version, hadoop_version, hive_version),
-        )
-
-        spark_version, hadoop_version, hive_version = 
checked_versions("3.3.0", "2", "2.3")
-        self.assertEqual(
-            "spark-3.3.0-bin-hadoop2",
-            checked_package_name(spark_version, hadoop_version, hive_version),
-        )
-
         spark_version, hadoop_version, hive_version = 
checked_versions("3.2.0", "3", "2.3")
         self.assertEqual(
             "spark-3.2.0-bin-hadoop3.2",
@@ -79,15 +67,6 @@ class SparkInstallationTestCase(unittest.TestCase):
         test_version = "3.0.1"  # Just pick one version to test.
 
         # Positive test cases
-        self.assertEqual(
-            ("spark-3.0.0", "hadoop2.7", "hive2.3"),
-            checked_versions("spark-3.0.0", "hadoop2", "hive2.3"),
-        )
-
-        self.assertEqual(
-            ("spark-3.0.0", "hadoop2.7", "hive2.3"), checked_versions("3.0.0", 
"2", "2.3")
-        )
-
         self.assertEqual(
             ("spark-2.4.1", "without-hadoop", "hive2.3"),
             checked_versions("2.4.1", "without", "2.3"),
@@ -103,11 +82,6 @@ class SparkInstallationTestCase(unittest.TestCase):
             checked_versions("spark-3.3.0", "hadoop3", "hive2.3"),
         )
 
-        self.assertEqual(
-            ("spark-3.3.0", "hadoop2", "hive2.3"),
-            checked_versions("spark-3.3.0", "hadoop2", "hive2.3"),
-        )
-
         # Negative test cases
         for (hadoop_version, hive_version) in UNSUPPORTED_COMBINATIONS:
             with self.assertRaisesRegex(RuntimeError, "Hive.*should.*Hadoop"):
diff --git a/resource-managers/kubernetes/integration-tests/README.md 
b/resource-managers/kubernetes/integration-tests/README.md
index b85e57db768..2944c189ed4 100644
--- a/resource-managers/kubernetes/integration-tests/README.md
+++ b/resource-managers/kubernetes/integration-tests/README.md
@@ -23,10 +23,6 @@ and the custom Dockerfile need to include a Java 
installation by itself.
 
     ./dev/dev-run-integration-tests.sh --docker-file 
../docker/src/main/dockerfiles/spark/Dockerfile
 
-To run tests with Hadoop 2.x instead of Hadoop 3.x, use `--hadoop-profile`.
-
-    ./dev/dev-run-integration-tests.sh --hadoop-profile hadoop-2
-
 The minimum tested version of Minikube is 1.28.0. The kube-dns addon must be 
enabled. Minikube should
 run with a minimum of 4 CPUs and 6G of memory:
 
@@ -133,7 +129,7 @@ properties to Maven.  For example:
 
     mvn integration-test -am -pl :spark-kubernetes-integration-tests_2.12 \
                             -Pkubernetes -Pkubernetes-integration-tests \
-                            -Phadoop-2 -Dhadoop.version=2.7.4 \
+                            -Phadoop-3 -Dhadoop.version=3.3.5 \
                             
-Dspark.kubernetes.test.sparkTgz=spark-3.0.0-SNAPSHOT-bin-example.tgz \
                             -Dspark.kubernetes.test.imageTag=sometag \
                             
-Dspark.kubernetes.test.imageRepo=docker.io/somerepo \
diff --git a/resource-managers/kubernetes/integration-tests/pom.xml 
b/resource-managers/kubernetes/integration-tests/pom.xml
index 1ffd5adba84..02894f82eec 100644
--- a/resource-managers/kubernetes/integration-tests/pom.xml
+++ b/resource-managers/kubernetes/integration-tests/pom.xml
@@ -203,17 +203,6 @@
   </build>
 
   <profiles>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>com.amazonaws</groupId>
-          <artifactId>aws-java-sdk</artifactId>
-          <version>1.7.4</version>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
     <profile>
       <id>hadoop-3</id>
       <activation>
diff --git a/resource-managers/yarn/pom.xml b/resource-managers/yarn/pom.xml
index 19cfdb8dcda..729b81e0dc7 100644
--- a/resource-managers/yarn/pom.xml
+++ b/resource-managers/yarn/pom.xml
@@ -33,42 +33,6 @@
   </properties>
 
   <profiles>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-api</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-common</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-web-proxy</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-client</artifactId>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-tests</artifactId>
-          <classifier>tests</classifier>
-          <scope>test</scope>
-        </dependency>
-        <!--
-          Hack to exclude 
org.apache.hadoop:hadoop-yarn-server-resourcemanager:jar:tests.
-          See the parent pom.xml for more details.
-        -->
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
     <profile>
       <id>hadoop-3</id>
       <activation>
diff --git a/sql/core/pom.xml b/sql/core/pom.xml
index b4485be689b..87ab2306660 100644
--- a/sql/core/pom.xml
+++ b/sql/core/pom.xml
@@ -284,22 +284,4 @@
       </plugin>
     </plugins>
   </build>
-  
-  <profiles>
-    <profile>
-      <id>hadoop-2</id>
-      <dependencies>
-        <dependency>
-          <groupId>org.codehaus.jackson</groupId>
-          <artifactId>jackson-core-asl</artifactId>
-          <scope>test</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.codehaus.jackson</groupId>
-          <artifactId>jackson-mapper-asl</artifactId>
-          <scope>test</scope>
-        </dependency>
-      </dependencies>
-    </profile>
-  </profiles>
 </project>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to