This is an automated email from the ASF dual-hosted git repository.

jark pushed a commit to branch release-0.8
in repository https://gitbox.apache.org/repos/asf/fluss.git

commit 8e3e758be0314143c4a9523aed71b48383023e6f
Author: yuxia Luo <[email protected]>
AuthorDate: Thu Oct 30 22:59:57 2025 +0800

    [hotfix] Set paimon warehouse in FlinkCatalogITCase to make it consistent 
(#1902)
    
    (cherry picked from commit fcd2378014377341baa0755e59d0a45591cc7bcc)
---
 .../apache/fluss/flink/catalog/FlinkCatalogITCase.java    | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git 
a/fluss-flink/fluss-flink-common/src/test/java/org/apache/fluss/flink/catalog/FlinkCatalogITCase.java
 
b/fluss-flink/fluss-flink-common/src/test/java/org/apache/fluss/flink/catalog/FlinkCatalogITCase.java
index 062c6eea7..88e16e996 100644
--- 
a/fluss-flink/fluss-flink-common/src/test/java/org/apache/fluss/flink/catalog/FlinkCatalogITCase.java
+++ 
b/fluss-flink/fluss-flink-common/src/test/java/org/apache/fluss/flink/catalog/FlinkCatalogITCase.java
@@ -20,6 +20,7 @@ package org.apache.fluss.flink.catalog;
 import org.apache.fluss.cluster.ServerNode;
 import org.apache.fluss.config.ConfigOptions;
 import org.apache.fluss.config.Configuration;
+import org.apache.fluss.exception.FlussRuntimeException;
 import org.apache.fluss.exception.InvalidAlterTableException;
 import org.apache.fluss.exception.InvalidConfigException;
 import org.apache.fluss.exception.InvalidTableException;
@@ -47,6 +48,7 @@ import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.RegisterExtension;
 
+import java.nio.file.Files;
 import java.time.Duration;
 import java.time.LocalDate;
 import java.time.LocalDateTime;
@@ -76,11 +78,23 @@ abstract class FlinkCatalogITCase {
                     .setClusterConf(initClusterConf())
                     .build();
 
+    protected static String paimonWarehousePath;
+
     static Configuration initClusterConf() {
         Configuration clusterConf = new Configuration();
         // use a small check interval to cleanup partitions quickly
         clusterConf.set(ConfigOptions.AUTO_PARTITION_CHECK_INTERVAL, 
Duration.ofSeconds(3));
         clusterConf.set(ConfigOptions.DATALAKE_FORMAT, DataLakeFormat.PAIMON);
+        try {
+            paimonWarehousePath =
+                    Files.createTempDirectory("fluss-catalog-itcase")
+                            .resolve("warehouse")
+                            .toString();
+        } catch (Exception e) {
+            throw new FlussRuntimeException("Failed to create warehouse path");
+        }
+        clusterConf.setString("datalake.paimon.warehouse", 
paimonWarehousePath);
+
         return clusterConf;
     }
 
@@ -208,6 +222,7 @@ abstract class FlinkCatalogITCase {
         expectedOptions.put("bucket.num", "5");
         expectedOptions.put("table.datalake.enabled", "true");
         expectedOptions.put("table.datalake.format", "paimon");
+        expectedOptions.put("table.datalake.paimon.warehouse", 
paimonWarehousePath);
         assertOptionsEqual(table.getOptions(), expectedOptions);
 
         // alter table

Reply via email to