Repository: phoenix
Updated Branches:
  refs/heads/master 8c90a0bf4 -> ad52201e0


PHOENIX-4159 phoenix-spark tests are failing

Fix usage of underlying JUnit 'TemporaryFolder' in phoenix-spark
tests. Need to disable parallel execution until JUnit 4.13 is
released (https://github.com/junit-team/junit4/issues/1223)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ad52201e
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ad52201e
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ad52201e

Branch: refs/heads/master
Commit: ad52201e07670d342ef33c5e8bd2ee595fe559cc
Parents: 8c90a0b
Author: Josh Mahonin <jmaho...@gmail.com>
Authored: Wed Sep 6 13:21:57 2017 -0400
Committer: Josh Mahonin <jmaho...@gmail.com>
Committed: Wed Sep 6 13:21:57 2017 -0400

----------------------------------------------------------------------
 phoenix-spark/pom.xml                              |  5 ++++-
 .../phoenix/spark/AbstractPhoenixSparkIT.scala     | 17 ++++++++++-------
 2 files changed, 14 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/ad52201e/phoenix-spark/pom.xml
----------------------------------------------------------------------
diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml
index 1f2741e..dfc7b87 100644
--- a/phoenix-spark/pom.xml
+++ b/phoenix-spark/pom.xml
@@ -549,7 +549,10 @@
               <goal>test</goal>
             </goals>
             <configuration>
-              <parallel>true</parallel>
+              <!-- Need this false until we can switch to JUnit 4.13 due to
+              https://github.com/junit-team/junit4/issues/1223
+              -->
+              <parallel>false</parallel>
               <tagsToExclude>Integration-Test</tagsToExclude>
               <argLine>-Xmx1536m -XX:MaxPermSize=512m 
-XX:ReservedCodeCacheSize=512m</argLine>
             </configuration>

http://git-wip-us.apache.org/repos/asf/phoenix/blob/ad52201e/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
----------------------------------------------------------------------
diff --git 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
index ecaedc7..ca3470f 100644
--- 
a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
+++ 
b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala
@@ -20,7 +20,7 @@ import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT
 import org.apache.phoenix.query.BaseTest
 import org.apache.phoenix.util.PhoenixRuntime
 import org.apache.spark.{SparkConf, SparkContext}
-import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
+import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite, Matchers}
 
 
 // Helper object to access the protected abstract static methods hidden in 
BaseHBaseManagedTimeIT
@@ -33,7 +33,10 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT {
     BaseHBaseManagedTimeIT.doSetup()
   }
 
-  def doTeardown = BaseHBaseManagedTimeIT.doTeardown()
+  def doTeardown = {
+    BaseHBaseManagedTimeIT.doTeardown()
+    BaseTest.tmpFolder.delete()
+  }
 
   def getUrl = BaseTest.getUrl
 }
@@ -41,7 +44,7 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT {
 /**
   * Base class for PhoenixSparkIT
   */
-class AbstractPhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfterAll {
+class AbstractPhoenixSparkIT extends FunSuite with Matchers with 
BeforeAndAfter with BeforeAndAfterAll {
 
   // A global tenantId we can use across tests
   final val TenantId = "theTenant"
@@ -62,9 +65,8 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers 
with BeforeAndAfterA
   // Optional argument tenantId used for running tenant-specific SQL
   def setupTables(sqlSource: String, tenantId: Option[String]): Unit = {
     val props = new Properties
-    val id = tenantId match {
-      case Some(tid) => props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tid)
-      case _ =>
+    if(tenantId.isDefined) {
+      props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId.get)
     }
 
     conn = DriverManager.getConnection(PhoenixSparkITHelper.getUrl, props)
@@ -88,7 +90,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers 
with BeforeAndAfterA
     PhoenixSparkITHelper.doSetup
 
     // We pass in null for TenantId here since these tables will be globally 
visible
-    setupTables("globalSetup.sql", null)
+    setupTables("globalSetup.sql", None)
     // We pass in a TenantId to allow the DDL to create tenant-specific 
tables/views
     setupTables("tenantSetup.sql", Some(TenantId))
 
@@ -103,6 +105,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers 
with BeforeAndAfterA
   override def afterAll() {
     conn.close()
     sc.stop()
+    PhoenixSparkITHelper.cleanUpAfterTest()
     PhoenixSparkITHelper.doTeardown
   }
 }

Reply via email to