Copilot commented on code in PR #2637:
URL: https://github.com/apache/sedona/pull/2637#discussion_r2792226615


##########
spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/LibPostalDataLoader.scala:
##########
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.spark.sql.sedona_sql.expressions
+
+import org.apache.spark.SparkFiles
+import org.slf4j.LoggerFactory
+
+import java.io.File
+import java.net.URI
+
+/**
+ * Resolves libpostal data directory paths. When the configured data directory 
points to a remote
+ * filesystem (HDFS, S3, GCS, ABFS, etc.), the data is expected to have been 
distributed to
+ * executors via `SparkContext.addFile()` and is resolved through 
`SparkFiles.get()`.
+ */
+object LibPostalDataLoader {
+
+  private val logger = LoggerFactory.getLogger(getClass)
+
+  /**
+   * Resolve the data directory to a local filesystem path. If the configured 
path already points
+   * to the local filesystem, it is returned as-is. If it points to a remote 
filesystem, the data
+   * is looked up via Spark's `SparkFiles` mechanism (the user must have called
+   * `sc.addFile(remotePath, recursive = true)` before running queries).
+   *
+   * @param configuredDir
+   *   the data directory path from Sedona configuration (may be local or 
remote)
+   * @return
+   *   a local filesystem path suitable for jpostal
+   */
+  def resolveDataDir(configuredDir: String): String = {
+    if (isRemotePath(configuredDir)) {
+      resolveFromSparkFiles(configuredDir)
+    } else {
+      normalizeLocalPath(configuredDir)
+    }
+  }

Review Comment:
   `resolveFromSparkFiles` always returns a path with a trailing separator 
(`ensureTrailingSlash`), but local paths (including normalized `file:` URIs) 
may or may not have one. This inconsistency can lead to subtle downstream bugs 
if any code concatenates paths (even outside this diff). Consider normalizing 
both branches to the same convention (either always ensure a trailing 
separator, or never).



##########
spark/common/src/test/scala/org/apache/sedona/sql/LibPostalDataLoaderTest.scala:
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.sql
+
+import io.minio.{MakeBucketArgs, MinioClient, PutObjectArgs}
+import org.apache.spark.sql.sedona_sql.expressions.LibPostalDataLoader
+import org.scalatest.matchers.should.Matchers
+import org.testcontainers.containers.MinIOContainer
+
+import java.io.{ByteArrayInputStream, File}
+import java.nio.file.Files
+
+class LibPostalDataLoaderTest extends TestBaseScala with Matchers {
+
+  describe("LibPostalDataLoader") {
+
+    describe("isRemotePath") {
+      it("should return false for local paths") {
+        LibPostalDataLoader.isRemotePath("/tmp/libpostal/") shouldBe false
+      }
+
+      it("should return false for relative paths") {
+        LibPostalDataLoader.isRemotePath("data/libpostal/") shouldBe false
+      }
+
+      it("should return false for file:// URIs") {
+        LibPostalDataLoader.isRemotePath("file:///tmp/libpostal/") shouldBe 
false
+      }
+
+      it("should return true for hdfs:// URIs") {
+        LibPostalDataLoader.isRemotePath("hdfs:///data/libpostal/") shouldBe 
true
+      }
+
+      it("should return true for hdfs:// URIs with host") {
+        
LibPostalDataLoader.isRemotePath("hdfs://namenode:9000/data/libpostal/") 
shouldBe true
+      }
+
+      it("should return true for s3a:// URIs") {
+        LibPostalDataLoader.isRemotePath("s3a://my-bucket/libpostal/") 
shouldBe true
+      }
+
+      it("should return true for gs:// URIs") {
+        LibPostalDataLoader.isRemotePath("gs://my-bucket/libpostal/") shouldBe 
true
+      }
+
+      it("should return true for abfs:// URIs") {
+        LibPostalDataLoader.isRemotePath(
+          "abfs://[email protected]/libpostal/") shouldBe 
true
+      }
+
+      it("should return true for wasb:// URIs") {
+        LibPostalDataLoader.isRemotePath(
+          "wasb://[email protected]/libpostal/") 
shouldBe true
+      }
+
+      it("should return false for empty string") {
+        LibPostalDataLoader.isRemotePath("") shouldBe false
+      }
+
+      it("should return false for Windows-like paths") {
+        // Single-letter scheme like C: should not be treated as remote
+        LibPostalDataLoader.isRemotePath("C:\\libpostal\\data\\") shouldBe 
false
+      }
+    }
+
+    describe("resolveDataDir") {
+      it("should return local path unchanged") {
+        val tempDir = Files.createTempDirectory("sedona-libpostal-test").toFile
+        try {
+          val result = 
LibPostalDataLoader.resolveDataDir(tempDir.getAbsolutePath)
+          result shouldBe tempDir.getAbsolutePath
+        } finally {
+          tempDir.delete()
+        }
+      }
+
+      it("should normalize file: URI to plain local path") {
+        val tempDir = Files.createTempDirectory("sedona-libpostal-test").toFile
+        try {
+          val fileUri = tempDir.toURI.toString
+          val result = LibPostalDataLoader.resolveDataDir(fileUri)
+          result should not startWith "file:"
+          result shouldBe tempDir.getAbsolutePath
+        } finally {
+          tempDir.delete()
+        }
+      }
+
+      it("should throw IllegalStateException when remote data not found in 
SparkFiles") {
+        val remoteUri = "hdfs:///data/nonexistent-libpostal-data/"
+
+        val exception = intercept[IllegalStateException] {
+          LibPostalDataLoader.resolveDataDir(remoteUri)
+        }
+        exception.getMessage should include("not found via SparkFiles")
+        exception.getMessage should include("sc.addFile")
+        exception.getMessage should include("recursive = true")
+      }
+
+      it("should resolve data uploaded to S3 via sc.addFile end-to-end") {
+        val container = new MinIOContainer("minio/minio:latest")
+        container.start()
+        try {

Review Comment:
   `container.start()` is executed before entering the `try/finally`, so if 
`start()` throws, `container.stop()` won’t run and resources may leak. Wrap the 
`start()` inside the `try` (or use a resource-management helper) so `stop()` is 
guaranteed to execute after a partial/failed start.
   ```suggestion
           try {
             container.start()
   ```



##########
spark/common/src/main/scala/org/apache/spark/sql/sedona_sql/expressions/LibPostalDataLoader.scala:
##########
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.spark.sql.sedona_sql.expressions
+
+import org.apache.spark.SparkFiles
+import org.slf4j.LoggerFactory
+
+import java.io.File
+import java.net.URI
+
+/**
+ * Resolves libpostal data directory paths. When the configured data directory 
points to a remote
+ * filesystem (HDFS, S3, GCS, ABFS, etc.), the data is expected to have been 
distributed to
+ * executors via `SparkContext.addFile()` and is resolved through 
`SparkFiles.get()`.
+ */
+object LibPostalDataLoader {
+
+  private val logger = LoggerFactory.getLogger(getClass)
+
+  /**
+   * Resolve the data directory to a local filesystem path. If the configured 
path already points
+   * to the local filesystem, it is returned as-is. If it points to a remote 
filesystem, the data
+   * is looked up via Spark's `SparkFiles` mechanism (the user must have called
+   * `sc.addFile(remotePath, recursive = true)` before running queries).
+   *
+   * @param configuredDir
+   *   the data directory path from Sedona configuration (may be local or 
remote)
+   * @return
+   *   a local filesystem path suitable for jpostal
+   */
+  def resolveDataDir(configuredDir: String): String = {
+    if (isRemotePath(configuredDir)) {
+      resolveFromSparkFiles(configuredDir)
+    } else {
+      normalizeLocalPath(configuredDir)
+    }
+  }
+
+  /**
+   * Normalize a local path. Converts `file:` URIs (e.g. 
`file:///tmp/libpostal`) to plain
+   * filesystem paths (`/tmp/libpostal`) so that jpostal receives a path it 
can use directly.
+   * Non-URI paths are returned unchanged.
+   */
+  private[expressions] def normalizeLocalPath(path: String): String = {
+    try {
+      val uri = new URI(path)
+      if (uri.getScheme != null && uri.getScheme.equalsIgnoreCase("file")) {
+        new File(uri).getAbsolutePath
+      } else {
+        path
+      }
+    } catch {
+      case _: Exception => path
+    }
+  }
+
+  /**
+   * Determine whether a path string refers to a remote (non-local) filesystem.
+   */
+  def isRemotePath(path: String): Boolean = {
+    try {
+      val uri = new URI(path)
+      val scheme = uri.getScheme
+      scheme != null && !scheme.equalsIgnoreCase("file") && scheme.length > 1
+    } catch {
+      case _: Exception => false
+    }
+  }
+
+  /**
+   * Resolve a remote data directory via Spark's file distribution mechanism. 
Extracts the
+   * basename (last path component) from the remote URI and looks it up 
through `SparkFiles.get`.
+   * The user must have previously called `sc.addFile(remotePath, recursive = 
true)`.
+   *
+   * @throws IllegalStateException
+   *   if the data directory was not found via SparkFiles
+   */
+  private def resolveFromSparkFiles(remotePath: String): String = {
+    val basename = extractBasename(remotePath)
+
+    try {
+      val localPath = SparkFiles.get(basename)
+      val localFile = new File(localPath)
+
+      if (localFile.exists() && localFile.isDirectory) {
+        logger.info(
+          "Resolved libpostal data from SparkFiles: {} -> {}",
+          remotePath: Any,
+          localPath: Any)
+        ensureTrailingSlash(localPath)

Review Comment:
   `resolveFromSparkFiles` always returns a path with a trailing separator 
(`ensureTrailingSlash`), but local paths (including normalized `file:` URIs) 
may or may not have one. This inconsistency can lead to subtle downstream bugs 
if any code concatenates paths (even outside this diff). Consider normalizing 
both branches to the same convention (either always ensure a trailing 
separator, or never).



##########
spark/common/src/test/scala/org/apache/sedona/sql/LibPostalDataLoaderTest.scala:
##########
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.sedona.sql
+
+import io.minio.{MakeBucketArgs, MinioClient, PutObjectArgs}
+import org.apache.spark.sql.sedona_sql.expressions.LibPostalDataLoader
+import org.scalatest.matchers.should.Matchers
+import org.testcontainers.containers.MinIOContainer
+
+import java.io.{ByteArrayInputStream, File}
+import java.nio.file.Files
+
+class LibPostalDataLoaderTest extends TestBaseScala with Matchers {
+
+  describe("LibPostalDataLoader") {
+
+    describe("isRemotePath") {
+      it("should return false for local paths") {
+        LibPostalDataLoader.isRemotePath("/tmp/libpostal/") shouldBe false
+      }
+
+      it("should return false for relative paths") {
+        LibPostalDataLoader.isRemotePath("data/libpostal/") shouldBe false
+      }
+
+      it("should return false for file:// URIs") {
+        LibPostalDataLoader.isRemotePath("file:///tmp/libpostal/") shouldBe 
false
+      }
+
+      it("should return true for hdfs:// URIs") {
+        LibPostalDataLoader.isRemotePath("hdfs:///data/libpostal/") shouldBe 
true
+      }
+
+      it("should return true for hdfs:// URIs with host") {
+        
LibPostalDataLoader.isRemotePath("hdfs://namenode:9000/data/libpostal/") 
shouldBe true
+      }
+
+      it("should return true for s3a:// URIs") {
+        LibPostalDataLoader.isRemotePath("s3a://my-bucket/libpostal/") 
shouldBe true
+      }
+
+      it("should return true for gs:// URIs") {
+        LibPostalDataLoader.isRemotePath("gs://my-bucket/libpostal/") shouldBe 
true
+      }
+
+      it("should return true for abfs:// URIs") {
+        LibPostalDataLoader.isRemotePath(
+          "abfs://[email protected]/libpostal/") shouldBe 
true
+      }
+
+      it("should return true for wasb:// URIs") {
+        LibPostalDataLoader.isRemotePath(
+          "wasb://[email protected]/libpostal/") 
shouldBe true
+      }
+
+      it("should return false for empty string") {
+        LibPostalDataLoader.isRemotePath("") shouldBe false
+      }
+
+      it("should return false for Windows-like paths") {
+        // Single-letter scheme like C: should not be treated as remote
+        LibPostalDataLoader.isRemotePath("C:\\libpostal\\data\\") shouldBe 
false
+      }
+    }
+
+    describe("resolveDataDir") {
+      it("should return local path unchanged") {
+        val tempDir = Files.createTempDirectory("sedona-libpostal-test").toFile
+        try {
+          val result = 
LibPostalDataLoader.resolveDataDir(tempDir.getAbsolutePath)
+          result shouldBe tempDir.getAbsolutePath
+        } finally {
+          tempDir.delete()
+        }
+      }
+
+      it("should normalize file: URI to plain local path") {
+        val tempDir = Files.createTempDirectory("sedona-libpostal-test").toFile
+        try {
+          val fileUri = tempDir.toURI.toString
+          val result = LibPostalDataLoader.resolveDataDir(fileUri)
+          result should not startWith "file:"
+          result shouldBe tempDir.getAbsolutePath
+        } finally {
+          tempDir.delete()
+        }
+      }
+
+      it("should throw IllegalStateException when remote data not found in 
SparkFiles") {
+        val remoteUri = "hdfs:///data/nonexistent-libpostal-data/"
+
+        val exception = intercept[IllegalStateException] {
+          LibPostalDataLoader.resolveDataDir(remoteUri)
+        }
+        exception.getMessage should include("not found via SparkFiles")
+        exception.getMessage should include("sc.addFile")
+        exception.getMessage should include("recursive = true")
+      }
+
+      it("should resolve data uploaded to S3 via sc.addFile end-to-end") {
+        val container = new MinIOContainer("minio/minio:latest")

Review Comment:
   Using the `latest` tag makes the test non-reproducible and can introduce 
flaky CI failures when the upstream image changes. Pin the MinIO image to a 
specific version/tag (and ideally one known to work with your testcontainer + 
client setup).
   ```suggestion
           val container = new 
MinIOContainer("minio/minio:RELEASE.2024-01-18T22-51-28Z")
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to