This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 017919b  [SPARK-27383][SQL][TEST] Avoid using hard-coded jar names in 
Hive tests
017919b is described below

commit 017919b636fd3ce43ccf5ec57f1c1842aa2130db
Author: Yuming Wang <yumw...@ebay.com>
AuthorDate: Sat Apr 6 18:06:52 2019 -0500

    [SPARK-27383][SQL][TEST] Avoid using hard-coded jar names in Hive tests
    
    ## What changes were proposed in this pull request?
    
    This pr avoid using hard-coded jar names(`hive-contrib-0.13.1.jar` and 
`hive-hcatalog-core-0.13.1.jar`) in Hive tests. This change makes it easy to 
change when upgrading the built-in Hive to 2.3.4.
    
    ## How was this patch tested?
    
    Existing test
    
    Closes #24294 from wangyum/SPARK-27383.
    
    Authored-by: Yuming Wang <yumw...@ebay.com>
    Signed-off-by: Sean Owen <sean.o...@databricks.com>
---
 .../main/scala/org/apache/spark/sql/hive/test/TestHive.scala | 11 +++++++++++
 .../org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala     | 12 ++++++------
 .../org/apache/spark/sql/hive/execution/HiveQuerySuite.scala |  6 +++---
 .../org/apache/spark/sql/hive/execution/SQLQuerySuite.scala  |  2 +-
 4 files changed, 21 insertions(+), 10 deletions(-)

diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index c45f3e7..1515807 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -116,6 +116,9 @@ class TestHiveContext(
     @transient override val sparkSession: TestHiveSparkSession)
   extends SQLContext(sparkSession) {
 
+  val HIVE_CONTRIB_JAR: String = "hive-contrib-0.13.1.jar"
+  val HIVE_HCATALOG_CORE_JAR: String = "hive-hcatalog-core-0.13.1.jar"
+
   /**
    * If loadTestTables is false, no test tables are loaded. Note that this 
flag can only be true
    * when running in the JVM, i.e. it needs to be false when calling from 
Python.
@@ -142,6 +145,14 @@ class TestHiveContext(
     sparkSession.getHiveFile(path)
   }
 
+  def getHiveContribJar(): File = {
+    sparkSession.getHiveFile(HIVE_CONTRIB_JAR)
+  }
+
+  def getHiveHcatalogCoreJar(): File = {
+    sparkSession.getHiveFile(HIVE_HCATALOG_CORE_JAR)
+  }
+
   def loadTestTable(name: String): Unit = {
     sparkSession.loadTestTable(name)
   }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index 8a36148..3b64a6b 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -108,8 +108,8 @@ class HiveSparkSubmitSuite
     val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
     val jar1 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassA"))
     val jar2 = TestUtils.createJarWithClasses(Seq("SparkSubmitClassB"))
-    val jar3 = TestHive.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
-    val jar4 = 
TestHive.getHiveFile("hive-hcatalog-core-0.13.1.jar").getCanonicalPath
+    val jar3 = TestHive.getHiveContribJar().getCanonicalPath
+    val jar4 = TestHive.getHiveHcatalogCoreJar().getCanonicalPath
     val jarsString = Seq(jar1, jar2, jar3, jar4).map(j => 
j.toString).mkString(",")
     val args = Seq(
       "--class", SparkSubmitClassLoaderTest.getClass.getName.stripSuffix("$"),
@@ -315,7 +315,7 @@ class HiveSparkSubmitSuite
       "--master", "local-cluster[2,1,1024]",
       "--conf", "spark.ui.enabled=false",
       "--conf", "spark.master.rest.enabled=false",
-      "--jars", 
TestHive.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath,
+      "--jars", TestHive.getHiveContribJar().getCanonicalPath,
       unusedJar.toString)
     runSparkSubmit(argsForCreateTable)
 
@@ -457,7 +457,7 @@ object TemporaryHiveUDFTest extends Logging {
 
     // Load a Hive UDF from the jar.
     logInfo("Registering a temporary Hive UDF provided in a jar.")
-    val jar = 
hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
+    val jar = hiveContext.getHiveContribJar().getCanonicalPath
     hiveContext.sql(
       s"""
          |CREATE TEMPORARY FUNCTION example_max
@@ -495,7 +495,7 @@ object PermanentHiveUDFTest1 extends Logging {
 
     // Load a Hive UDF from the jar.
     logInfo("Registering a permanent Hive UDF provided in a jar.")
-    val jar = 
hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
+    val jar = hiveContext.getHiveContribJar().getCanonicalPath
     hiveContext.sql(
       s"""
          |CREATE FUNCTION example_max
@@ -532,7 +532,7 @@ object PermanentHiveUDFTest2 extends Logging {
     val hiveContext = new TestHiveContext(sc)
     // Load a Hive UDF from the jar.
     logInfo("Write the metadata of a permanent Hive UDF into metastore.")
-    val jar = 
hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath
+    val jar = hiveContext.getHiveContribJar().getCanonicalPath
     val function = CatalogFunction(
       FunctionIdentifier("example_max"),
       "org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax",
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index a5cff35..c35ff80 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -816,7 +816,7 @@ class HiveQuerySuite extends HiveComparisonTest with 
SQLTestUtils with BeforeAnd
 
   test("ADD JAR command 2") {
     // this is a test case from mapjoin_addjar.q
-    val testJar = TestHive.getHiveFile("hive-hcatalog-core-0.13.1.jar").toURI
+    val testJar = TestHive.getHiveHcatalogCoreJar().toURI
     val testData = TestHive.getHiveFile("data/files/sample.json").toURI
     sql(s"ADD JAR $testJar")
     sql(
@@ -826,9 +826,9 @@ class HiveQuerySuite extends HiveComparisonTest with 
SQLTestUtils with BeforeAnd
     sql("select * from src join t1 on src.key = t1.a")
     sql("DROP TABLE t1")
     assert(sql("list jars").
-      filter(_.getString(0).contains("hive-hcatalog-core-0.13.1.jar")).count() 
> 0)
+      filter(_.getString(0).contains(TestHive.HIVE_HCATALOG_CORE_JAR)).count() 
> 0)
     assert(sql("list jar").
-      filter(_.getString(0).contains("hive-hcatalog-core-0.13.1.jar")).count() 
> 0)
+      filter(_.getString(0).contains(TestHive.HIVE_HCATALOG_CORE_JAR)).count() 
> 0)
     val testJar2 = TestHive.getHiveFile("TestUDTF.jar").getCanonicalPath
     sql(s"ADD JAR $testJar2")
     assert(sql(s"list jar $testJar").count() == 1)
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index aad34a3..29de55f 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -1105,7 +1105,7 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils 
with TestHiveSingleton {
       override def run() {
         // To make sure this test works, this jar should not be loaded in 
another place.
         sql(
-          s"ADD JAR 
${hiveContext.getHiveFile("hive-contrib-0.13.1.jar").getCanonicalPath()}")
+          s"ADD JAR ${hiveContext.getHiveContribJar().getCanonicalPath}")
         try {
           sql(
             """


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to