Repository: spark
Updated Branches:
  refs/heads/master e988adb58 -> f9b397f54


[SPARK-8567] [SQL] Add logs to record the progress of HiveSparkSubmitSuite.

Author: Yin Huai <yh...@databricks.com>

Closes #7009 from yhuai/SPARK-8567 and squashes the following commits:

62fb1f9 [Yin Huai] Add sc.stop().
b22cf7d [Yin Huai] Add logs.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f9b397f5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f9b397f5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f9b397f5

Branch: refs/heads/master
Commit: f9b397f54d1c491680d70aba210bb8211fd249c1
Parents: e988adb
Author: Yin Huai <yh...@databricks.com>
Authored: Thu Jun 25 06:52:03 2015 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Thu Jun 25 06:52:03 2015 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala    | 9 +++++++++
 1 file changed, 9 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f9b397f5/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index b875e52..a38ed23 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -115,6 +115,7 @@ object SparkSubmitClassLoaderTest extends Logging {
     val sc = new SparkContext(conf)
     val hiveContext = new TestHiveContext(sc)
     val df = hiveContext.createDataFrame((1 to 100).map(i => (i, 
i))).toDF("i", "j")
+    logInfo("Testing load classes at the driver side.")
     // First, we load classes at driver side.
     try {
       Class.forName(args(0), true, 
Thread.currentThread().getContextClassLoader)
@@ -124,6 +125,7 @@ object SparkSubmitClassLoaderTest extends Logging {
         throw new Exception("Could not load user class from jar:\n", t)
     }
     // Second, we load classes at the executor side.
+    logInfo("Testing load classes at the executor side.")
     val result = df.mapPartitions { x =>
       var exception: String = null
       try {
@@ -141,6 +143,7 @@ object SparkSubmitClassLoaderTest extends Logging {
     }
 
     // Load a Hive UDF from the jar.
+    logInfo("Registering temporary Hive UDF provided in a jar.")
     hiveContext.sql(
       """
         |CREATE TEMPORARY FUNCTION example_max
@@ -150,18 +153,23 @@ object SparkSubmitClassLoaderTest extends Logging {
       hiveContext.createDataFrame((1 to 10).map(i => (i, 
s"str$i"))).toDF("key", "val")
     source.registerTempTable("sourceTable")
     // Load a Hive SerDe from the jar.
+    logInfo("Creating a Hive table with a SerDe provided in a jar.")
     hiveContext.sql(
       """
         |CREATE TABLE t1(key int, val string)
         |ROW FORMAT SERDE 'org.apache.hive.hcatalog.data.JsonSerDe'
       """.stripMargin)
     // Actually use the loaded UDF and SerDe.
+    logInfo("Writing data into the table.")
     hiveContext.sql(
       "INSERT INTO TABLE t1 SELECT example_max(key) as key, val FROM 
sourceTable GROUP BY val")
+    logInfo("Running a simple query on the table.")
     val count = hiveContext.table("t1").orderBy("key", "val").count()
     if (count != 10) {
       throw new Exception(s"table t1 should have 10 rows instead of $count 
rows")
     }
+    logInfo("Test finishes.")
+    sc.stop()
   }
 }
 
@@ -199,5 +207,6 @@ object SparkSQLConfTest extends Logging {
     val hiveContext = new TestHiveContext(sc)
     // Run a simple command to make sure all lazy vals in hiveContext get 
instantiated.
     hiveContext.tables().collect()
+    sc.stop()
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to