This is an automated email from the ASF dual-hosted git repository. garyli pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hudi.git
commit 452f5e2d661ee667cdd1348c0f580fcc650318b7 Author: Gary Li <yanjia.gary...@gmail.com> AuthorDate: Fri Mar 26 20:58:29 2021 -0700 [HOTFIX] close spark session in functional test suite and disable spark3 test for spark2 (#2727) --- .../org/apache/hudi/testutils/FunctionalTestHarness.java | 16 ++++++++++++++++ hudi-spark-datasource/hudi-spark2/pom.xml | 2 +- hudi-spark-datasource/hudi-spark3/pom.xml | 7 +++++++ .../hudi/utilities/testutils/UtilitiesTestBase.java | 9 +++++++++ pom.xml | 2 ++ 5 files changed, 35 insertions(+), 1 deletion(-) diff --git a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/FunctionalTestHarness.java b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/FunctionalTestHarness.java index fc02e6d..e391abf 100644 --- a/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/FunctionalTestHarness.java +++ b/hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/testutils/FunctionalTestHarness.java @@ -152,6 +152,8 @@ public class FunctionalTestHarness implements SparkProvider, DFSProvider, Hoodie hdfsTestService.stop(); hdfsTestService = null; + jsc.close(); + jsc = null; spark.stop(); spark = null; })); @@ -166,5 +168,19 @@ public class FunctionalTestHarness implements SparkProvider, DFSProvider, Hoodie for (FileStatus f : fileStatuses) { fs.delete(f.getPath(), true); } + if (hdfsTestService != null) { + hdfsTestService.stop(); + hdfsTestService = null; + } + if (spark != null) { + spark.stop(); + spark = null; + } + if (jsc != null) { + jsc.close(); + jsc = null; + } + sqlContext = null; + context = null; } } diff --git a/hudi-spark-datasource/hudi-spark2/pom.xml b/hudi-spark-datasource/hudi-spark2/pom.xml index 9a232d1..c27bb40 100644 --- a/hudi-spark-datasource/hudi-spark2/pom.xml +++ b/hudi-spark-datasource/hudi-spark2/pom.xml @@ -151,7 +151,7 @@ <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> - <version>${scala11.version}</version> + <version>${scala.version}</version> </dependency> <!-- Hoodie --> diff --git a/hudi-spark-datasource/hudi-spark3/pom.xml b/hudi-spark-datasource/hudi-spark3/pom.xml index d47e90e..f3c25a8 100644 --- a/hudi-spark-datasource/hudi-spark3/pom.xml +++ b/hudi-spark-datasource/hudi-spark3/pom.xml @@ -125,6 +125,13 @@ </configuration> </plugin> <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <skipTests>${skip.hudi-spark3.unit.tests}</skipTests> + </configuration> + </plugin> + <plugin> <groupId>org.apache.rat</groupId> <artifactId>apache-rat-plugin</artifactId> </plugin> diff --git a/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java b/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java index 6efd468..0adef52 100644 --- a/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java +++ b/hudi-utilities/src/test/java/org/apache/hudi/utilities/testutils/UtilitiesTestBase.java @@ -124,15 +124,19 @@ public class UtilitiesTestBase { public static void cleanupClass() { if (hdfsTestService != null) { hdfsTestService.stop(); + hdfsTestService = null; } if (hiveServer != null) { hiveServer.stop(); + hiveServer = null; } if (hiveTestService != null) { hiveTestService.stop(); + hiveTestService = null; } if (zookeeperTestService != null) { zookeeperTestService.stop(); + zookeeperTestService = null; } } @@ -150,6 +154,11 @@ public class UtilitiesTestBase { TestDataSource.resetDataGen(); if (jsc != null) { jsc.stop(); + jsc = null; + } + if (sparkSession != null) { + sparkSession.close(); + sparkSession = null; } if (context != null) { context = null; diff --git a/pom.xml b/pom.xml index 4c950fe..61e3ac8 100644 --- a/pom.xml +++ b/pom.xml @@ -133,6 +133,7 @@ <skipFTs>${skipTests}</skipFTs> <skipITs>${skipTests}</skipITs> <skip.hudi-spark2.unit.tests>${skipTests}</skip.hudi-spark2.unit.tests> + <skip.hudi-spark3.unit.tests>${skipTests}</skip.hudi-spark3.unit.tests> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <main.basedir>${project.basedir}</main.basedir> <spark.bundle.hive.scope>provided</spark.bundle.hive.scope> @@ -1424,6 +1425,7 @@ <properties> <scala.version>${scala12.version}</scala.version> <scala.binary.version>2.12</scala.binary.version> + <skip.hudi-spark3.unit.tests>true</skip.hudi-spark3.unit.tests> </properties> <activation> <property>