This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new f955994650 [test] Remove the S3 dependency from paimon-hive and 
paimon-spark (#7279)
f955994650 is described below

commit f95599465055561e314a423cf31f76f2ad321948
Author: Zouxxyy <[email protected]>
AuthorDate: Fri Feb 13 23:42:13 2026 +0800

    [test] Remove the S3 dependency from paimon-hive and paimon-spark (#7279)
---
 .../paimon/hive/FlinkGenericCatalogITCase.java     |   4 -
 .../apache/paimon/hive/HiveCatalogITCaseBase.java  |  27 +----
 .../org/apache/paimon/hive/HiveLocationTest.java   |  34 +-----
 .../hive/runner/PaimonEmbeddedHiveRunner.java      |  40 -------
 paimon-hive/pom.xml                                |  30 -----
 .../org/apache/paimon/spark/SparkS3ITCase.java     | 122 ---------------------
 paimon-spark/pom.xml                               |  39 -------
 7 files changed, 8 insertions(+), 288 deletions(-)

diff --git 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/FlinkGenericCatalogITCase.java
 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/FlinkGenericCatalogITCase.java
index 30d4953224..0ac539224c 100644
--- 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/FlinkGenericCatalogITCase.java
+++ 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/FlinkGenericCatalogITCase.java
@@ -20,9 +20,7 @@ package org.apache.paimon.hive;
 
 import org.apache.paimon.flink.FlinkGenericCatalog;
 import org.apache.paimon.flink.FlinkGenericCatalogFactory;
-import org.apache.paimon.hive.annotation.Minio;
 import org.apache.paimon.hive.runner.PaimonEmbeddedHiveRunner;
-import org.apache.paimon.s3.MinioTestContainer;
 
 import org.apache.paimon.shade.guava30.com.google.common.collect.ImmutableList;
 
@@ -62,8 +60,6 @@ public class FlinkGenericCatalogITCase extends 
AbstractTestBaseJUnit4 {
     @HiveSQL(files = {})
     protected static HiveShell hiveShell;
 
-    @Minio private static MinioTestContainer minioTestContainer;
-
     private static HiveCatalog createHiveCatalog(HiveConf hiveConf) {
         return new HiveCatalog(
                 "testcatalog", null, hiveConf, 
HiveShimLoader.getHiveVersion(), true);
diff --git 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveCatalogITCaseBase.java
 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveCatalogITCaseBase.java
index cc21d12ca5..5b5ee26bfd 100644
--- 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveCatalogITCaseBase.java
+++ 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveCatalogITCaseBase.java
@@ -25,12 +25,10 @@ import org.apache.paimon.catalog.Identifier;
 import org.apache.paimon.flink.FlinkCatalog;
 import org.apache.paimon.fs.FileIO;
 import org.apache.paimon.fs.Path;
-import org.apache.paimon.hive.annotation.Minio;
 import org.apache.paimon.hive.runner.PaimonEmbeddedHiveRunner;
 import org.apache.paimon.operation.Lock;
 import org.apache.paimon.options.Options;
 import org.apache.paimon.privilege.NoPrivilegeException;
-import org.apache.paimon.s3.MinioTestContainer;
 import org.apache.paimon.table.CatalogEnvironment;
 import org.apache.paimon.table.FileStoreTable;
 import org.apache.paimon.table.Table;
@@ -72,7 +70,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
-import java.util.UUID;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -98,15 +95,9 @@ public abstract class HiveCatalogITCaseBase {
     @HiveSQL(files = {})
     protected static HiveShell hiveShell;
 
-    @Minio private static MinioTestContainer minioTestContainer;
-
     private void before(boolean locationInProperties) throws Exception {
         this.locationInProperties = locationInProperties;
-        if (locationInProperties) {
-            path = minioTestContainer.getS3UriForDefaultBucket() + "/" + 
UUID.randomUUID();
-        } else {
-            path = folder.newFolder().toURI().toString();
-        }
+        path = folder.newFolder().toURI().toString();
         registerHiveCatalog("my_hive", new HashMap<>());
 
         tEnv.executeSql("USE CATALOG my_hive").await();
@@ -130,9 +121,6 @@ public abstract class HiveCatalogITCaseBase {
         catalogProperties.put("lock.enabled", "true");
         catalogProperties.put("location-in-properties", 
String.valueOf(locationInProperties));
         catalogProperties.put("warehouse", path);
-        if (locationInProperties) {
-            catalogProperties.putAll(minioTestContainer.getS3ConfigOptions());
-        }
 
         Options catalogOptions = new Options(catalogProperties);
         CatalogContext catalogContext = CatalogContext.create(catalogOptions);
@@ -193,23 +181,20 @@ public abstract class HiveCatalogITCaseBase {
                     };
 
     @Test
-    public void testDbLocation() {
-        String dbLocation = minioTestContainer.getS3UriForDefaultBucket() + 
"/" + UUID.randomUUID();
+    public void testDbLocation() throws Exception {
+        String dbLocation = folder.newFolder().toURI().toString();
         Catalog catalog =
                 ((FlinkCatalog) 
tEnv.getCatalog(tEnv.getCurrentCatalog()).get()).catalog();
         Map<String, String> properties = new HashMap<>();
         properties.put("location", dbLocation);
 
-        assertThatThrownBy(() -> catalog.createDatabase("location_test_db", 
false, properties))
-                .hasMessageContaining(
-                        "Could not find a file io implementation for scheme 
's3' in the classpath.");
+        catalog.createDatabase("location_test_db", false, properties);
     }
 
     @Test
     @LocationInProperties
-    public void testDbLocationWithMetastoreLocationInProperties()
-            throws Catalog.DatabaseAlreadyExistException, 
Catalog.DatabaseNotExistException {
-        String dbLocation = minioTestContainer.getS3UriForDefaultBucket() + 
"/" + UUID.randomUUID();
+    public void testDbLocationWithMetastoreLocationInProperties() throws 
Exception {
+        String dbLocation = folder.newFolder().toURI().toString();
         Catalog catalog =
                 ((FlinkCatalog) 
tEnv.getCatalog(tEnv.getCurrentCatalog()).get()).catalog();
         Map<String, String> properties = new HashMap<>();
diff --git 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveLocationTest.java
 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveLocationTest.java
index 7bb80c1936..385bfc9ede 100644
--- 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveLocationTest.java
+++ 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/HiveLocationTest.java
@@ -23,11 +23,9 @@ import org.apache.paimon.catalog.CatalogContext;
 import org.apache.paimon.catalog.Identifier;
 import org.apache.paimon.fs.FileIO;
 import org.apache.paimon.fs.Path;
-import org.apache.paimon.hive.annotation.Minio;
 import org.apache.paimon.hive.runner.PaimonEmbeddedHiveRunner;
 import org.apache.paimon.options.CatalogOptions;
 import org.apache.paimon.options.Options;
-import org.apache.paimon.s3.MinioTestContainer;
 import org.apache.paimon.schema.Schema;
 import org.apache.paimon.schema.SchemaManager;
 import org.apache.paimon.schema.TableSchema;
@@ -55,13 +53,11 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
 import java.util.UUID;
 
 import static org.assertj.core.api.Assertions.assertThat;
-import static org.assertj.core.api.Assertions.assertThatThrownBy;
 
 /** Test for specify location. */
 @RunWith(PaimonEmbeddedHiveRunner.class)
@@ -69,8 +65,6 @@ public class HiveLocationTest {
     @HiveSQL(files = {})
     private static HiveShell hiveShell;
 
-    @Minio private static MinioTestContainer minioTestContainer;
-
     public static final String HIVE_CONF = "/hive-conf";
 
     private HiveCatalog catalog;
@@ -96,11 +90,6 @@ public class HiveLocationTest {
         options.set(HiveCatalogOptions.HIVE_CONF_DIR, hiveShell.getBaseDir() + 
HIVE_CONF);
         options.set(HiveCatalogOptions.LOCATION_IN_PROPERTIES, true);
 
-        for (Map.Entry<String, String> stringStringEntry :
-                minioTestContainer.getS3ConfigOptions().entrySet()) {
-            options.set(stringStringEntry.getKey(), 
stringStringEntry.getValue());
-        }
-
         // create CatalogContext using the options
         catalogContext = CatalogContext.create(options);
 
@@ -112,14 +101,6 @@ public class HiveLocationTest {
         catalog = (HiveCatalog) hiveCatalogFactory.create(catalogContext);
 
         hmsClient = catalog.getHmsClient();
-
-        String setTemplate = "SET paimon.%s=%s";
-        minioTestContainer
-                .getS3ConfigOptions()
-                .forEach(
-                        (k, v) -> {
-                            hiveShell.execute(String.format(setTemplate, k, 
v));
-                        });
     }
 
     private static FileIO getFileIO(CatalogContext catalogContext, Path 
warehouse) {
@@ -204,24 +185,13 @@ public class HiveLocationTest {
     @Test
     public void testExternTableLocation() throws Exception {
 
-        String path = minioTestContainer.getS3UriForDefaultBucket() + "/" + 
UUID.randomUUID();
+        String path = hiveShell.getBaseDir().toAbsolutePath().toString() + "/" 
+ UUID.randomUUID();
 
         Options conf = new Options();
         conf.set(CatalogOptions.WAREHOUSE, path);
 
-        for (Map.Entry<String, String> stringStringEntry :
-                minioTestContainer.getS3ConfigOptions().entrySet()) {
-            conf.set(stringStringEntry.getKey(), stringStringEntry.getValue());
-        }
-
         RowType rowType = RowType.of(new DataType[] {DataTypes.INT()}, new 
String[] {"aaa"});
-        // create table with location field
-        assertThatThrownBy(
-                        () ->
-                                createTableWithStorageLocation(
-                                        path, rowType, "test_extern_table", 
conf, true))
-                .isInstanceOf(IllegalArgumentException.class)
-                .hasMessageContaining("No FileSystem for scheme: s3");
+        createTableWithStorageLocation(path, rowType, "test_extern_table", 
conf, true);
 
         // create table with location in table properties
         Set<String> tableForTest = Sets.newHashSet("test_extern_table1", 
"hive_inner_table1");
diff --git 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/runner/PaimonEmbeddedHiveRunner.java
 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/runner/PaimonEmbeddedHiveRunner.java
index 2a7ed597fd..f8e2a043a3 100644
--- 
a/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/runner/PaimonEmbeddedHiveRunner.java
+++ 
b/paimon-hive/paimon-hive-connector-common/src/test/java/org/apache/paimon/hive/runner/PaimonEmbeddedHiveRunner.java
@@ -18,8 +18,6 @@
 
 package org.apache.paimon.hive.runner;
 
-import org.apache.paimon.hive.annotation.Minio;
-import org.apache.paimon.s3.MinioTestContainer;
 import org.apache.paimon.utils.Preconditions;
 
 import org.apache.paimon.shade.guava30.com.google.common.io.Resources;
@@ -110,24 +108,6 @@ public class PaimonEmbeddedHiveRunner extends 
BlockJUnit4ClassRunner {
                     }
                 };
 
-        ExternalResource minio =
-                new ExternalResource() {
-                    public MinioTestContainer minioTestContainer;
-
-                    @Override
-                    protected void before() {
-                        minioTestContainer = 
setMinioTestContainer(getTestClass().getJavaClass());
-                    }
-
-                    @Override
-                    protected void after() {
-                        if (minioTestContainer != null && 
minioTestContainer.isRunning()) {
-                            minioTestContainer.close();
-                        }
-                    }
-                };
-
-        rules.add(minio);
         rules.add(hiveShell);
         return rules;
     }
@@ -369,26 +349,6 @@ public class PaimonEmbeddedHiveRunner extends 
BlockJUnit4ClassRunner {
         }
     }
 
-    private MinioTestContainer setMinioTestContainer(final Class testClass) {
-        Set<Field> allFields = ReflectionUtils.getAllFields(testClass, 
withAnnotation(Minio.class));
-
-        Preconditions.checkState(
-                allFields.size() <= 1,
-                "At most one field of type MinioTestContainer should to be 
annotated with @MinIO");
-        MinioTestContainer minioTestContainer = null;
-        if (!allFields.isEmpty()) {
-            minioTestContainer = new MinioTestContainer();
-            minioTestContainer.start();
-
-            Field field = allFields.iterator().next();
-            Preconditions.checkState(
-                    ReflectionUtils.isOfType(field, MinioTestContainer.class),
-                    "Field annotated with @MinIO should be of type 
MinioTestContainer");
-            ReflectionUtils.setStaticField(testClass, field.getName(), 
minioTestContainer);
-        }
-        return minioTestContainer;
-    }
-
     /**
      * Used as a handle for the HiveShell field in the test case so that we 
may set it once the
      * HiveShell has been instantiated.
diff --git a/paimon-hive/pom.xml b/paimon-hive/pom.xml
index 02ca83304a..cad1bf70dd 100644
--- a/paimon-hive/pom.xml
+++ b/paimon-hive/pom.xml
@@ -49,7 +49,6 @@ under the License.
         <tez.version>0.10.0</tez.version>
         <hiverunner.version>5.5.0</hiverunner.version>
         <reflections.version>0.9.8</reflections.version>
-        <aws.version>1.12.319</aws.version>
         <iceberg.flink.version>1.19</iceberg.flink.version>
         
<iceberg.flink.dropwizard.version>1.19.0</iceberg.flink.dropwizard.version>
     </properties>
@@ -96,35 +95,6 @@ under the License.
             </exclusions>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.paimon</groupId>
-            <artifactId>paimon-s3</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.paimon</groupId>
-            <artifactId>paimon-s3</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.amazonaws</groupId>
-            <artifactId>aws-java-sdk-core</artifactId>
-            <version>${aws.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.amazonaws</groupId>
-            <artifactId>aws-java-sdk-s3</artifactId>
-            <version>${aws.version}</version>
-            <scope>test</scope>
-        </dependency>
-
         <dependency>
             <groupId>org.apache.paimon</groupId>
             <artifactId>paimon-test-utils</artifactId>
diff --git 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkS3ITCase.java
 
b/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkS3ITCase.java
deleted file mode 100644
index 1f34ed0d5c..0000000000
--- 
a/paimon-spark/paimon-spark-ut/src/test/java/org/apache/paimon/spark/SparkS3ITCase.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.paimon.spark;
-
-import org.apache.paimon.catalog.CatalogContext;
-import org.apache.paimon.fs.FileIO;
-import org.apache.paimon.fs.FileIOTest;
-import org.apache.paimon.fs.Path;
-import org.apache.paimon.options.Options;
-import org.apache.paimon.s3.MinioTestContainer;
-import 
org.apache.paimon.testutils.junit.parameterized.ParameterizedTestExtension;
-import org.apache.paimon.testutils.junit.parameterized.Parameters;
-
-import org.apache.spark.sql.Row;
-import org.apache.spark.sql.SparkSession;
-import org.junit.jupiter.api.AfterAll;
-import org.junit.jupiter.api.AfterEach;
-import org.junit.jupiter.api.BeforeAll;
-import org.junit.jupiter.api.TestTemplate;
-import org.junit.jupiter.api.extension.ExtendWith;
-import org.junit.jupiter.api.extension.RegisterExtension;
-
-import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-import java.util.UUID;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-/** ITCase for using S3 in Spark. */
-@ExtendWith(ParameterizedTestExtension.class)
-public class SparkS3ITCase {
-
-    @RegisterExtension
-    public static final MinioTestContainer MINIO_CONTAINER = new 
MinioTestContainer();
-
-    private static Path warehousePath;
-
-    private static SparkSession spark = null;
-
-    @BeforeAll
-    public static void startMetastoreAndSpark() {
-        String path = MINIO_CONTAINER.getS3UriForDefaultBucket() + "/" + 
UUID.randomUUID();
-        warehousePath = new Path(path);
-        spark =
-                SparkSession.builder()
-                        .master("local[2]")
-                        .config("spark.sql.catalog.paimon", 
SparkCatalog.class.getName())
-                        .config("spark.sql.catalog.paimon.warehouse", 
warehousePath.toString())
-                        .config(
-                                "spark.sql.extensions",
-                                
"org.apache.paimon.spark.extensions.PaimonSparkSessionExtensions")
-                        .getOrCreate();
-        MINIO_CONTAINER
-                .getS3ConfigOptions()
-                .forEach((k, v) -> 
spark.conf().set("spark.sql.catalog.paimon." + k, v));
-        spark.sql("CREATE DATABASE paimon.db");
-        spark.sql("USE paimon.db");
-    }
-
-    @AfterAll
-    public static void stopMetastoreAndSpark() {
-        if (spark != null) {
-            spark.stop();
-            spark = null;
-        }
-    }
-
-    @Parameters(name = "{0}")
-    public static Collection<String> parameters() {
-        return Arrays.asList("avro", "orc", "parquet");
-    }
-
-    private final String format;
-
-    public SparkS3ITCase(String format) {
-        this.format = format;
-    }
-
-    @AfterEach
-    public void afterEach() {
-        spark.sql("DROP TABLE IF EXISTS T");
-    }
-
-    @TestTemplate
-    public void testWriteRead() {
-        spark.sql(
-                String.format(
-                        "CREATE TABLE T (a INT, b INT, c STRING) TBLPROPERTIES"
-                                + " ('primary-key'='a', 'bucket'='4', 
'file.format'='%s')",
-                        format));
-        spark.sql("INSERT INTO T VALUES (1, 2, '3')").collectAsList();
-        List<Row> rows = spark.sql("SELECT * FROM T").collectAsList();
-        assertThat(rows.toString()).isEqualTo("[[1,2,3]]");
-    }
-
-    @TestTemplate
-    public void testS3AtomicWriteMultipleThreads() throws 
InterruptedException, IOException {
-        Path file = new Path(warehousePath, UUID.randomUUID().toString());
-        Options options = new Options();
-        MINIO_CONTAINER.getS3ConfigOptions().forEach(options::setString);
-        FileIO fileIO = FileIO.get(file, CatalogContext.create(options));
-        FileIOTest.testOverwriteFileUtf8(file, fileIO);
-    }
-}
diff --git a/paimon-spark/pom.xml b/paimon-spark/pom.xml
index 08854232d9..65d6425853 100644
--- a/paimon-spark/pom.xml
+++ b/paimon-spark/pom.xml
@@ -33,10 +33,6 @@ under the License.
 
     <packaging>pom</packaging>
 
-    <properties>
-        <aws.version>1.12.319</aws.version>
-    </properties>
-
     <modules>
         <module>paimon-spark-common</module>
         <module>paimon-spark-ut</module>
@@ -289,41 +285,6 @@ under the License.
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.paimon</groupId>
-            <artifactId>paimon-s3</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.paimon</groupId>
-            <artifactId>paimon-s3</artifactId>
-            <version>${project.version}</version>
-            <classifier>tests</classifier>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>com.amazonaws</groupId>
-            <artifactId>aws-java-sdk-core</artifactId>
-            <version>${aws.version}</version>
-            <scope>test</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>com.fasterxml.jackson.core</groupId>
-                    <artifactId>*</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>com.amazonaws</groupId>
-            <artifactId>aws-java-sdk-s3</artifactId>
-            <version>${aws.version}</version>
-            <scope>test</scope>
-        </dependency>
-
         <dependency>
             <groupId>com.google.protobuf</groupId>
             <artifactId>protobuf-java</artifactId>

Reply via email to