Repository: spark Updated Branches: refs/heads/branch-2.0 1f3b5a5ac -> ab9a304a7
[SPARK-15929] Fix portability of DataFrameSuite path globbing tests The DataFrameSuite regression tests for SPARK-13774 fail in my environment because they attempt to glob over all of `/mnt` and some of the subdirectories restrictive permissions which cause the test to fail. This patch rewrites those tests to remove all environment-specific assumptions; the tests now create their own unique temporary paths for use in the tests. Author: Josh Rosen <joshro...@databricks.com> Closes #13649 from JoshRosen/SPARK-15929. (cherry picked from commit a6babca1bf76e70488ce6005ec3b8b53afc7edfd) Signed-off-by: Cheng Lian <l...@databricks.com> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ab9a304a Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ab9a304a Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ab9a304a Branch: refs/heads/branch-2.0 Commit: ab9a304a7c690c748d3d99f1d933116e25ea0a73 Parents: 1f3b5a5 Author: Josh Rosen <joshro...@databricks.com> Authored: Mon Jun 13 17:06:22 2016 -0700 Committer: Cheng Lian <l...@databricks.com> Committed: Mon Jun 13 17:06:36 2016 -0700 ---------------------------------------------------------------------- .../org/apache/spark/sql/DataFrameSuite.scala | 45 ++++++++++++++++---- 1 file changed, 36 insertions(+), 9 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/ab9a304a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 6bb0ce9..c8a0f71 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql import java.io.File import java.nio.charset.StandardCharsets +import java.util.UUID import scala.language.postfixOps import scala.util.Random @@ -35,6 +36,7 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT, SharedSQLContext} import org.apache.spark.sql.test.SQLTestData.TestData2 import org.apache.spark.sql.types._ +import org.apache.spark.util.Utils class DataFrameSuite extends QueryTest with SharedSQLContext { import testImplicits._ @@ -1495,18 +1497,43 @@ class DataFrameSuite extends QueryTest with SharedSQLContext { } test("SPARK-13774: Check error message for non existent path without globbed paths") { - val e = intercept[AnalysisException] (spark.read.format("csv"). - load("/xyz/file2", "/xyz/file21", "/abc/files555", "a")).getMessage() - assert(e.startsWith("Path does not exist")) + val uuid = UUID.randomUUID().toString + val baseDir = Utils.createTempDir() + try { + val e = intercept[AnalysisException] { + spark.read.format("csv").load( + new File(baseDir, "file").getAbsolutePath, + new File(baseDir, "file2").getAbsolutePath, + new File(uuid, "file3").getAbsolutePath, + uuid).rdd + } + assert(e.getMessage.startsWith("Path does not exist")) + } finally { + + } + } test("SPARK-13774: Check error message for not existent globbed paths") { - val e = intercept[AnalysisException] (spark.read.format("text"). - load( "/xyz/*")).getMessage() - assert(e.startsWith("Path does not exist")) + // Non-existent initial path component: + val nonExistentBasePath = "/" + UUID.randomUUID().toString + assert(!new File(nonExistentBasePath).exists()) + val e = intercept[AnalysisException] { + spark.read.format("text").load(s"$nonExistentBasePath/*") + } + assert(e.getMessage.startsWith("Path does not exist")) - val e1 = intercept[AnalysisException] (spark.read.json("/mnt/*/*-xyz.json").rdd). - getMessage() - assert(e1.startsWith("Path does not exist")) + // Existent initial path component, but no matching files: + val baseDir = Utils.createTempDir() + val childDir = Utils.createTempDir(baseDir.getAbsolutePath) + assert(childDir.exists()) + try { + val e1 = intercept[AnalysisException] { + spark.read.json(s"${baseDir.getAbsolutePath}/*/*-xyz.json").rdd + } + assert(e1.getMessage.startsWith("Path does not exist")) + } finally { + Utils.deleteRecursively(baseDir) + } } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org