This is an automated email from the ASF dual-hosted git repository.

blue pushed a commit to branch 0.9.x
in repository https://gitbox.apache.org/repos/asf/iceberg.git

commit 194407ae1ed2053a4187f40e6bce9d83f1b885e0
Author: Ryan Blue <[email protected]>
AuthorDate: Tue Jul 14 09:37:54 2020 -0700

    Spark: Fix USING clause in SparkCatalog (#1194)
---
 .../org/apache/iceberg/spark/SparkCatalog.java     | 24 ++++++++++++++++------
 1 file changed, 18 insertions(+), 6 deletions(-)

diff --git a/spark3/src/main/java/org/apache/iceberg/spark/SparkCatalog.java 
b/spark3/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
index 86b8bdb..c115b84 100644
--- a/spark3/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
+++ b/spark3/src/main/java/org/apache/iceberg/spark/SparkCatalog.java
@@ -149,8 +149,12 @@ public class SparkCatalog implements StagingTableCatalog, 
org.apache.spark.sql.c
                                  Map<String, String> properties) throws 
TableAlreadyExistsException {
     Schema icebergSchema = SparkSchemaUtil.convert(schema);
     try {
-      return new 
StagedSparkTable(icebergCatalog.newCreateTableTransaction(buildIdentifier(ident),
 icebergSchema,
-          Spark3Util.toPartitionSpec(icebergSchema, transforms), 
properties.get("location"), properties));
+      return new StagedSparkTable(icebergCatalog.newCreateTableTransaction(
+          buildIdentifier(ident),
+          icebergSchema,
+          Spark3Util.toPartitionSpec(icebergSchema, transforms),
+          properties.get("location"),
+          Spark3Util.rebuildCreateProperties(properties)));
     } catch (AlreadyExistsException e) {
       throw new TableAlreadyExistsException(ident);
     }
@@ -161,8 +165,12 @@ public class SparkCatalog implements StagingTableCatalog, 
org.apache.spark.sql.c
                                   Map<String, String> properties) throws 
NoSuchTableException {
     Schema icebergSchema = SparkSchemaUtil.convert(schema);
     try {
-      return new 
StagedSparkTable(icebergCatalog.newReplaceTableTransaction(buildIdentifier(ident),
 icebergSchema,
-          Spark3Util.toPartitionSpec(icebergSchema, transforms), 
properties.get("location"), properties,
+      return new StagedSparkTable(icebergCatalog.newReplaceTableTransaction(
+          buildIdentifier(ident),
+          icebergSchema,
+          Spark3Util.toPartitionSpec(icebergSchema, transforms),
+          properties.get("location"),
+          Spark3Util.rebuildCreateProperties(properties),
           false /* do not create */));
     } catch (org.apache.iceberg.exceptions.NoSuchTableException e) {
       throw new NoSuchTableException(ident);
@@ -173,8 +181,12 @@ public class SparkCatalog implements StagingTableCatalog, 
org.apache.spark.sql.c
   public StagedTable stageCreateOrReplace(Identifier ident, StructType schema, 
Transform[] transforms,
                                           Map<String, String> properties) {
     Schema icebergSchema = SparkSchemaUtil.convert(schema);
-    return new 
StagedSparkTable(icebergCatalog.newReplaceTableTransaction(buildIdentifier(ident),
 icebergSchema,
-        Spark3Util.toPartitionSpec(icebergSchema, transforms), 
properties.get("location"), properties,
+    return new StagedSparkTable(icebergCatalog.newReplaceTableTransaction(
+        buildIdentifier(ident),
+        icebergSchema,
+        Spark3Util.toPartitionSpec(icebergSchema, transforms),
+        properties.get("location"),
+        Spark3Util.rebuildCreateProperties(properties),
         true /* create or replace */));
   }
 

Reply via email to