phoenix git commit: PHOENIX-4159 phoenix-spark tests are failing
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 94ef3eb3b -> 20aaf633c PHOENIX-4159 phoenix-spark tests are failing Fix usage of underlying JUnit 'TemporaryFolder' in phoenix-spark tests. Need to disable parallel execution until JUnit 4.13 is released (https://github.com/junit-team/junit4/issues/1223) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/20aaf633 Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/20aaf633 Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/20aaf633 Branch: refs/heads/4.x-HBase-0.98 Commit: 20aaf633c00baced6ebe12cf1ece33e69db4a423 Parents: 94ef3eb Author: Josh MahoninAuthored: Wed Sep 6 13:21:57 2017 -0400 Committer: Josh Mahonin Committed: Wed Sep 6 13:25:05 2017 -0400 -- phoenix-spark/pom.xml | 5 - .../phoenix/spark/AbstractPhoenixSparkIT.scala | 17 ++--- 2 files changed, 14 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/20aaf633/phoenix-spark/pom.xml -- diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index 2202c5d..d67c8b0 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -542,7 +542,10 @@ test - true + + false Integration-Test -Xmx1536m -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m http://git-wip-us.apache.org/repos/asf/phoenix/blob/20aaf633/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala -- diff --git a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala index ecaedc7..ca3470f 100644 --- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala +++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala @@ -20,7 +20,7 @@ import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT import org.apache.phoenix.query.BaseTest import org.apache.phoenix.util.PhoenixRuntime import org.apache.spark.{SparkConf, SparkContext} -import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} +import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite, Matchers} // Helper object to access the protected abstract static methods hidden in BaseHBaseManagedTimeIT @@ -33,7 +33,10 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { BaseHBaseManagedTimeIT.doSetup() } - def doTeardown = BaseHBaseManagedTimeIT.doTeardown() + def doTeardown = { +BaseHBaseManagedTimeIT.doTeardown() +BaseTest.tmpFolder.delete() + } def getUrl = BaseTest.getUrl } @@ -41,7 +44,7 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { /** * Base class for PhoenixSparkIT */ -class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterAll { +class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfter with BeforeAndAfterAll { // A global tenantId we can use across tests final val TenantId = "theTenant" @@ -62,9 +65,8 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA // Optional argument tenantId used for running tenant-specific SQL def setupTables(sqlSource: String, tenantId: Option[String]): Unit = { val props = new Properties -val id = tenantId match { - case Some(tid) => props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tid) - case _ => +if(tenantId.isDefined) { + props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId.get) } conn = DriverManager.getConnection(PhoenixSparkITHelper.getUrl, props) @@ -88,7 +90,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA PhoenixSparkITHelper.doSetup // We pass in null for TenantId here since these tables will be globally visible -setupTables("globalSetup.sql", null) +setupTables("globalSetup.sql", None) // We pass in a TenantId to allow the DDL to create tenant-specific tables/views setupTables("tenantSetup.sql", Some(TenantId)) @@ -103,6 +105,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA override def afterAll() { conn.close() sc.stop() +PhoenixSparkITHelper.cleanUpAfterTest() PhoenixSparkITHelper.doTeardown } }
phoenix git commit: PHOENIX-4159 phoenix-spark tests are failing
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-1.1 9e5cb5d2e -> 9296254e2 PHOENIX-4159 phoenix-spark tests are failing Fix usage of underlying JUnit 'TemporaryFolder' in phoenix-spark tests. Need to disable parallel execution until JUnit 4.13 is released (https://github.com/junit-team/junit4/issues/1223) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/9296254e Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/9296254e Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/9296254e Branch: refs/heads/4.x-HBase-1.1 Commit: 9296254e2f7b08be2a5314952fad9acf717bdcf9 Parents: 9e5cb5d Author: Josh MahoninAuthored: Wed Sep 6 13:21:57 2017 -0400 Committer: Josh Mahonin Committed: Wed Sep 6 13:24:42 2017 -0400 -- phoenix-spark/pom.xml | 5 - .../phoenix/spark/AbstractPhoenixSparkIT.scala | 17 ++--- 2 files changed, 14 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/9296254e/phoenix-spark/pom.xml -- diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index 89c5ba5..7e804a8 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -549,7 +549,10 @@ test - true + + false Integration-Test -Xmx1536m -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m http://git-wip-us.apache.org/repos/asf/phoenix/blob/9296254e/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala -- diff --git a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala index ecaedc7..ca3470f 100644 --- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala +++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala @@ -20,7 +20,7 @@ import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT import org.apache.phoenix.query.BaseTest import org.apache.phoenix.util.PhoenixRuntime import org.apache.spark.{SparkConf, SparkContext} -import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} +import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite, Matchers} // Helper object to access the protected abstract static methods hidden in BaseHBaseManagedTimeIT @@ -33,7 +33,10 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { BaseHBaseManagedTimeIT.doSetup() } - def doTeardown = BaseHBaseManagedTimeIT.doTeardown() + def doTeardown = { +BaseHBaseManagedTimeIT.doTeardown() +BaseTest.tmpFolder.delete() + } def getUrl = BaseTest.getUrl } @@ -41,7 +44,7 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { /** * Base class for PhoenixSparkIT */ -class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterAll { +class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfter with BeforeAndAfterAll { // A global tenantId we can use across tests final val TenantId = "theTenant" @@ -62,9 +65,8 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA // Optional argument tenantId used for running tenant-specific SQL def setupTables(sqlSource: String, tenantId: Option[String]): Unit = { val props = new Properties -val id = tenantId match { - case Some(tid) => props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tid) - case _ => +if(tenantId.isDefined) { + props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId.get) } conn = DriverManager.getConnection(PhoenixSparkITHelper.getUrl, props) @@ -88,7 +90,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA PhoenixSparkITHelper.doSetup // We pass in null for TenantId here since these tables will be globally visible -setupTables("globalSetup.sql", null) +setupTables("globalSetup.sql", None) // We pass in a TenantId to allow the DDL to create tenant-specific tables/views setupTables("tenantSetup.sql", Some(TenantId)) @@ -103,6 +105,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA override def afterAll() { conn.close() sc.stop() +PhoenixSparkITHelper.cleanUpAfterTest() PhoenixSparkITHelper.doTeardown } }
phoenix git commit: PHOENIX-4159 phoenix-spark tests are failing
Repository: phoenix Updated Branches: refs/heads/4.x-HBase-1.2 0718e01e1 -> 37150ecfd PHOENIX-4159 phoenix-spark tests are failing Fix usage of underlying JUnit 'TemporaryFolder' in phoenix-spark tests. Need to disable parallel execution until JUnit 4.13 is released (https://github.com/junit-team/junit4/issues/1223) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/37150ecf Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/37150ecf Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/37150ecf Branch: refs/heads/4.x-HBase-1.2 Commit: 37150ecfd50ae82c74a66f2d49b2bca0bb1d3e95 Parents: 0718e01 Author: Josh MahoninAuthored: Wed Sep 6 13:21:57 2017 -0400 Committer: Josh Mahonin Committed: Wed Sep 6 13:24:25 2017 -0400 -- phoenix-spark/pom.xml | 5 - .../phoenix/spark/AbstractPhoenixSparkIT.scala | 17 ++--- 2 files changed, 14 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/37150ecf/phoenix-spark/pom.xml -- diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index 7518ebc..f298f34 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -549,7 +549,10 @@ test - true + + false Integration-Test -Xmx1536m -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m http://git-wip-us.apache.org/repos/asf/phoenix/blob/37150ecf/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala -- diff --git a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala index ecaedc7..ca3470f 100644 --- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala +++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala @@ -20,7 +20,7 @@ import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT import org.apache.phoenix.query.BaseTest import org.apache.phoenix.util.PhoenixRuntime import org.apache.spark.{SparkConf, SparkContext} -import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} +import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite, Matchers} // Helper object to access the protected abstract static methods hidden in BaseHBaseManagedTimeIT @@ -33,7 +33,10 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { BaseHBaseManagedTimeIT.doSetup() } - def doTeardown = BaseHBaseManagedTimeIT.doTeardown() + def doTeardown = { +BaseHBaseManagedTimeIT.doTeardown() +BaseTest.tmpFolder.delete() + } def getUrl = BaseTest.getUrl } @@ -41,7 +44,7 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { /** * Base class for PhoenixSparkIT */ -class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterAll { +class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfter with BeforeAndAfterAll { // A global tenantId we can use across tests final val TenantId = "theTenant" @@ -62,9 +65,8 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA // Optional argument tenantId used for running tenant-specific SQL def setupTables(sqlSource: String, tenantId: Option[String]): Unit = { val props = new Properties -val id = tenantId match { - case Some(tid) => props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tid) - case _ => +if(tenantId.isDefined) { + props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId.get) } conn = DriverManager.getConnection(PhoenixSparkITHelper.getUrl, props) @@ -88,7 +90,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA PhoenixSparkITHelper.doSetup // We pass in null for TenantId here since these tables will be globally visible -setupTables("globalSetup.sql", null) +setupTables("globalSetup.sql", None) // We pass in a TenantId to allow the DDL to create tenant-specific tables/views setupTables("tenantSetup.sql", Some(TenantId)) @@ -103,6 +105,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA override def afterAll() { conn.close() sc.stop() +PhoenixSparkITHelper.cleanUpAfterTest() PhoenixSparkITHelper.doTeardown } }
phoenix git commit: PHOENIX-4159 phoenix-spark tests are failing
Repository: phoenix Updated Branches: refs/heads/master 8c90a0bf4 -> ad52201e0 PHOENIX-4159 phoenix-spark tests are failing Fix usage of underlying JUnit 'TemporaryFolder' in phoenix-spark tests. Need to disable parallel execution until JUnit 4.13 is released (https://github.com/junit-team/junit4/issues/1223) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/ad52201e Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/ad52201e Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/ad52201e Branch: refs/heads/master Commit: ad52201e07670d342ef33c5e8bd2ee595fe559cc Parents: 8c90a0b Author: Josh MahoninAuthored: Wed Sep 6 13:21:57 2017 -0400 Committer: Josh Mahonin Committed: Wed Sep 6 13:21:57 2017 -0400 -- phoenix-spark/pom.xml | 5 - .../phoenix/spark/AbstractPhoenixSparkIT.scala | 17 ++--- 2 files changed, 14 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/phoenix/blob/ad52201e/phoenix-spark/pom.xml -- diff --git a/phoenix-spark/pom.xml b/phoenix-spark/pom.xml index 1f2741e..dfc7b87 100644 --- a/phoenix-spark/pom.xml +++ b/phoenix-spark/pom.xml @@ -549,7 +549,10 @@ test - true + + false Integration-Test -Xmx1536m -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m http://git-wip-us.apache.org/repos/asf/phoenix/blob/ad52201e/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala -- diff --git a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala index ecaedc7..ca3470f 100644 --- a/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala +++ b/phoenix-spark/src/it/scala/org/apache/phoenix/spark/AbstractPhoenixSparkIT.scala @@ -20,7 +20,7 @@ import org.apache.phoenix.end2end.BaseHBaseManagedTimeIT import org.apache.phoenix.query.BaseTest import org.apache.phoenix.util.PhoenixRuntime import org.apache.spark.{SparkConf, SparkContext} -import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} +import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite, Matchers} // Helper object to access the protected abstract static methods hidden in BaseHBaseManagedTimeIT @@ -33,7 +33,10 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { BaseHBaseManagedTimeIT.doSetup() } - def doTeardown = BaseHBaseManagedTimeIT.doTeardown() + def doTeardown = { +BaseHBaseManagedTimeIT.doTeardown() +BaseTest.tmpFolder.delete() + } def getUrl = BaseTest.getUrl } @@ -41,7 +44,7 @@ object PhoenixSparkITHelper extends BaseHBaseManagedTimeIT { /** * Base class for PhoenixSparkIT */ -class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterAll { +class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfter with BeforeAndAfterAll { // A global tenantId we can use across tests final val TenantId = "theTenant" @@ -62,9 +65,8 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA // Optional argument tenantId used for running tenant-specific SQL def setupTables(sqlSource: String, tenantId: Option[String]): Unit = { val props = new Properties -val id = tenantId match { - case Some(tid) => props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tid) - case _ => +if(tenantId.isDefined) { + props.setProperty(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId.get) } conn = DriverManager.getConnection(PhoenixSparkITHelper.getUrl, props) @@ -88,7 +90,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA PhoenixSparkITHelper.doSetup // We pass in null for TenantId here since these tables will be globally visible -setupTables("globalSetup.sql", null) +setupTables("globalSetup.sql", None) // We pass in a TenantId to allow the DDL to create tenant-specific tables/views setupTables("tenantSetup.sql", Some(TenantId)) @@ -103,6 +105,7 @@ class AbstractPhoenixSparkIT extends FunSuite with Matchers with BeforeAndAfterA override def afterAll() { conn.close() sc.stop() +PhoenixSparkITHelper.cleanUpAfterTest() PhoenixSparkITHelper.doTeardown } }