gh-yzou commented on code in PR #1303:
URL: https://github.com/apache/polaris/pull/1303#discussion_r2039961586


##########
plugins/spark/v3.5/src/main/java/org/apache/polaris/spark/SparkCatalog.java:
##########
@@ -42,42 +48,118 @@
 import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.types.StructType;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+/**
+ * SparkCatalog Implementation that is able to interact with both Iceberg 
SparkCatalog and Polaris
+ * SparkCatalog. All namespaces and view related operations continue goes 
through the Iceberg
+ * SparkCatalog. For table operations, depends on the table format, the 
operation can be achieved
+ * with interaction with both Iceberg and Polaris SparkCatalog.
+ */
 public class SparkCatalog
     implements StagingTableCatalog,
         TableCatalog,
         SupportsNamespaces,
         ViewCatalog,
         SupportsReplaceView {
+  private static final Logger LOG = 
LoggerFactory.getLogger(SparkCatalog.class);
 
-  private static final Set<String> DEFAULT_NS_KEYS = 
ImmutableSet.of(TableCatalog.PROP_OWNER);
-  private String catalogName = null;
-  private org.apache.iceberg.spark.SparkCatalog icebergsSparkCatalog = null;
-
-  // TODO: Add Polaris Specific REST Catalog
+  @VisibleForTesting protected String catalogName = null;
+  @VisibleForTesting protected org.apache.iceberg.spark.SparkCatalog 
icebergsSparkCatalog = null;
+  @VisibleForTesting protected PolarisSparkCatalog polarisSparkCatalog = null;
+  @VisibleForTesting protected DeltaHelper deltaHelper = null;
 
   @Override
   public String name() {
     return catalogName;
   }
 
+  /**
+   * Check whether invalid catalog configuration is provided, and return an 
option map with catalog
+   * type configured correctly. This function mainly validates two parts: 1) 
No customized catalog
+   * implementation is provided. 2) No non-rest catalog type is configured.
+   */
+  private CaseInsensitiveStringMap validateAndResolveCatalogOptions(
+      CaseInsensitiveStringMap options) {
+    String catalogType =
+        PropertyUtil.propertyAsString(
+            options, CatalogUtil.ICEBERG_CATALOG_TYPE, 
CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
+    if (catalogType != null && 
!catalogType.equals(CatalogUtil.ICEBERG_CATALOG_TYPE_REST)) {
+      throw new IllegalStateException(
+          "Only rest catalog type is allowed, but got catalog type: "
+              + catalogType
+              + ". Either configure the type to rest or remove the config");
+    }

Review Comment:
   updated, since I put a default value when getting the property i actually 
don't need to check null, and i will update the code to always set the catalog 
type to rest after all checks passed to be safe



##########
plugins/spark/v3.5/src/main/java/org/apache/polaris/spark/SparkCatalog.java:
##########
@@ -42,42 +48,118 @@
 import org.apache.spark.sql.connector.expressions.Transform;
 import org.apache.spark.sql.types.StructType;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+/**
+ * SparkCatalog Implementation that is able to interact with both Iceberg 
SparkCatalog and Polaris
+ * SparkCatalog. All namespaces and view related operations continue goes 
through the Iceberg
+ * SparkCatalog. For table operations, depends on the table format, the 
operation can be achieved
+ * with interaction with both Iceberg and Polaris SparkCatalog.
+ */
 public class SparkCatalog
     implements StagingTableCatalog,
         TableCatalog,
         SupportsNamespaces,
         ViewCatalog,
         SupportsReplaceView {
+  private static final Logger LOG = 
LoggerFactory.getLogger(SparkCatalog.class);
 
-  private static final Set<String> DEFAULT_NS_KEYS = 
ImmutableSet.of(TableCatalog.PROP_OWNER);
-  private String catalogName = null;
-  private org.apache.iceberg.spark.SparkCatalog icebergsSparkCatalog = null;
-
-  // TODO: Add Polaris Specific REST Catalog
+  @VisibleForTesting protected String catalogName = null;
+  @VisibleForTesting protected org.apache.iceberg.spark.SparkCatalog 
icebergsSparkCatalog = null;
+  @VisibleForTesting protected PolarisSparkCatalog polarisSparkCatalog = null;
+  @VisibleForTesting protected DeltaHelper deltaHelper = null;
 
   @Override
   public String name() {
     return catalogName;
   }
 
+  /**
+   * Check whether invalid catalog configuration is provided, and return an 
option map with catalog
+   * type configured correctly. This function mainly validates two parts: 1) 
No customized catalog
+   * implementation is provided. 2) No non-rest catalog type is configured.
+   */
+  private CaseInsensitiveStringMap validateAndResolveCatalogOptions(
+      CaseInsensitiveStringMap options) {
+    String catalogType =
+        PropertyUtil.propertyAsString(
+            options, CatalogUtil.ICEBERG_CATALOG_TYPE, 
CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
+    if (catalogType != null && 
!catalogType.equals(CatalogUtil.ICEBERG_CATALOG_TYPE_REST)) {
+      throw new IllegalStateException(
+          "Only rest catalog type is allowed, but got catalog type: "
+              + catalogType
+              + ". Either configure the type to rest or remove the config");
+    }
+
+    String catalogImpl = options.get(CatalogProperties.CATALOG_IMPL);
+    if (catalogImpl != null) {
+      throw new IllegalStateException(
+          "Customized catalog implementation is not supported and not needed, 
please remove the configuration!");
+    }

Review Comment:
   updated



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to