This is an automated email from the ASF dual-hosted git repository.
dimas pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/polaris.git
The following commit(s) were added to refs/heads/main by this push:
new 8c4874b05 fix type cast warning in PolarisCatalogUtils (#3178)
8c4874b05 is described below
commit 8c4874b053792344f7268b9063eb24f90c716984
Author: Dmitri Bourlatchkov <[email protected]>
AuthorDate: Tue Dec 2 14:31:15 2025 -0500
fix type cast warning in PolarisCatalogUtils (#3178)
```
plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java:131:
warning: [unchecked] unchecked cast
scala.collection.immutable.Map$.MODULE$.apply(
^
required: Map<String,String>
found: Map
```
---
.../org/apache/polaris/spark/utils/PolarisCatalogUtils.java | 11 +++++++----
1 file changed, 7 insertions(+), 4 deletions(-)
diff --git
a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java
b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java
index 5493f0dc3..8d78807e0 100644
---
a/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java
+++
b/plugins/spark/v3.5/spark/src/main/java/org/apache/polaris/spark/utils/PolarisCatalogUtils.java
@@ -40,6 +40,9 @@ import org.apache.spark.sql.execution.datasources.DataSource;
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Utils;
import org.apache.spark.sql.util.CaseInsensitiveStringMap;
import scala.Option;
+import scala.Tuple2;
+import scala.collection.immutable.Map$;
+import scala.collection.mutable.Builder;
public class PolarisCatalogUtils {
@@ -125,10 +128,10 @@ public class PolarisCatalogUtils {
new TableIdentifier(
identifier.name(), Option.apply(namespacePath),
Option.apply(catalogName));
- scala.collection.immutable.Map<String, String> scalaOptions =
- (scala.collection.immutable.Map<String, String>)
- scala.collection.immutable.Map$.MODULE$.apply(
-
scala.collection.JavaConverters.mapAsScalaMap(tableProperties).toSeq());
+ Builder<Tuple2<String, String>, scala.collection.immutable.Map<String,
String>> mb =
+ Map$.MODULE$.newBuilder();
+ tableProperties.forEach((k, v) -> mb.$plus$eq(Tuple2.apply(k, v)));
+ scala.collection.immutable.Map<String, String> scalaOptions = mb.result();
org.apache.spark.sql.catalyst.catalog.CatalogStorageFormat storage =
DataSource.buildStorageFormatFromOptions(scalaOptions);