This is an automated email from the ASF dual-hosted git repository.
nihaljain pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase-connectors.git
The following commit(s) were added to refs/heads/master by this push:
new ac5d120 HBASE-28382 Support building hbase-connectors with JDK17
(#132)
ac5d120 is described below
commit ac5d120d45c2cc0db9be4bd009dd68934f782915
Author: Nihal Jain <[email protected]>
AuthorDate: Fri May 16 16:43:53 2025 +0530
HBASE-28382 Support building hbase-connectors with JDK17 (#132)
- Remove EOL'ed gmaven-plugin which is unnecessarily too complex and does
not work with JDK17
- Refactor flags for UT and IT for ease of profile based run
- Copy JDK17 flags from HBase and Spark
- Add profile for JDK11 and JDK17 build
Signed-off-by: Istvan Toth <[email protected]>
Reviewed-by: Nick Dimiduk <[email protected]>
---
pom.xml | 73 ++++++++++++++++++++++++++++++++++++++++++++
spark/hbase-spark-it/pom.xml | 6 +---
spark/hbase-spark/pom.xml | 25 +--------------
spark/pom.xml | 1 +
4 files changed, 76 insertions(+), 29 deletions(-)
diff --git a/pom.xml b/pom.xml
index 90608e9..8335e7e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -125,6 +125,7 @@
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm</maven.build.timestamp.format>
<buildDate>${maven.build.timestamp}</buildDate>
<compileSource>1.8</compileSource>
+ <releaseTarget>8</releaseTarget>
<java.min.version>${compileSource}</java.min.version>
<!-- override on command line to have generated LICENSE files include
diagnostic info for verifying notice requirements -->
@@ -173,6 +174,44 @@
<!-- Default to redirecting test logs
This only applies to the java tests, it's not obvious how get scalatest to
do this. -->
<test.output.tofile>true</test.output.tofile>
+ <jacocoArgLine/>
+ <argLine/>
+ <spark-ut.args>-Xmx1536m -XX:ReservedCodeCacheSize=512m</spark-ut.args>
+ <spark-ut.argLine>${argLine} ${spark-ut.args}</spark-ut.argLine>
+ <!-- To Run IT Tests with a particular Xmx Value use -Dfailsafe.Xmx=XXXg
-->
+ <failsafe.Xmx>4g</failsafe.Xmx>
+ <spark-it.args>-enableassertions -Xmx${failsafe.Xmx}
+ -Djava.security.egd=file:/dev/./urandom -XX:+CMSClassUnloadingEnabled
+ -verbose:gc -XX:+PrintCommandLineFlags -XX:+PrintFlagsFinal
-XX:+IgnoreUnrecognizedVMOptions</spark-it.args>
+ <spark-it.argLine>${argLine} ${spark-it.args}</spark-it.argLine>
+
<hbase-surefire.argLine>-Djava.security.manager=allow</hbase-surefire.argLine>
+ <!-- Keep in sync with jvm flags in bin/hbase in main repo; Copied from
there! -->
+
<hbase-surefire.jdk11.flags>-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true
+ --add-modules jdk.unsupported
+ --add-opens java.base/java.io=ALL-UNNAMED
+ --add-opens java.base/java.nio=ALL-UNNAMED
+ --add-opens java.base/sun.nio.ch=ALL-UNNAMED
+ --add-opens java.base/java.lang=ALL-UNNAMED
+ --add-opens java.base/jdk.internal.ref=ALL-UNNAMED
+ --add-opens java.base/java.lang.reflect=ALL-UNNAMED
+ --add-opens java.base/java.util=ALL-UNNAMED
+ --add-opens java.base/java.util.concurrent=ALL-UNNAMED
+ --add-exports java.base/jdk.internal.misc=ALL-UNNAMED
+ --add-exports java.security.jgss/sun.security.krb5=ALL-UNNAMED
+ --add-exports java.base/sun.net.dns=ALL-UNNAMED
+ --add-exports
java.base/sun.net.util=ALL-UNNAMED</hbase-surefire.jdk11.flags>
+ <hbase-surefire.jdk17.flags/>
+ <!-- Keep in sync with JavaModuleOptions.java from spark codebase;
+ Copied from there and next removed all duplicate hbase jvm flags and
retained remaining ones -->
+
<spark-surefire.jdk11.flags>--add-opens=java.base/java.lang.invoke=ALL-UNNAMED
+ --add-opens=java.base/java.net=ALL-UNNAMED
+ --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
+ --add-opens=java.base/sun.nio.cs=ALL-UNNAMED
+ --add-opens=java.base/sun.security.action=ALL-UNNAMED
+ --add-opens=java.base/sun.util.calendar=ALL-UNNAMED
+ -Djdk.reflect.useDirectMethodHandle=false
+ -Dio.netty.tryReflectionSetAccessible=true</spark-surefire.jdk11.flags>
+ <spark-surefire.jdk17.flags/>
</properties>
<dependencyManagement>
<dependencies>
@@ -935,6 +974,40 @@
</plugins>
</build>
</profile>
+ <profile>
+ <id>build-with-jdk8</id>
+ <activation>
+ <jdk>1.8</jdk>
+ </activation>
+ <properties>
+ <maven.compiler.source>${compileSource}</maven.compiler.source>
+ <maven.compiler.target>${compileSource}</maven.compiler.target>
+ </properties>
+ </profile>
+ <profile>
+ <id>build-with-jdk11</id>
+ <activation>
+ <jdk>[11,)</jdk>
+ </activation>
+ <properties>
+ <maven.compiler.release>${releaseTarget}</maven.compiler.release>
+ <argLine>${hbase-surefire.jdk11.flags} ${spark-surefire.jdk11.flags}
+ ${hbase-surefire.argLine}
+ ${jacocoArgLine}</argLine>
+ </properties>
+ </profile>
+ <profile>
+ <id>build-with-jdk17</id>
+ <activation>
+ <jdk>[17,)</jdk>
+ </activation>
+ <properties>
+ <argLine>${hbase-surefire.jdk11.flags} ${spark-surefire.jdk11.flags}
+ ${hbase-surefire.jdk17.flags} ${spark-surefire.jdk17.flags}
+ ${hbase-surefire.argLine}
+ ${jacocoArgLine}</argLine>
+ </properties>
+ </profile>
<profile>
<!-- Use Mac x64 version of protoc for Apple Silicon (aarch64) Macs -->
<id>osx-aarch64</id>
diff --git a/spark/hbase-spark-it/pom.xml b/spark/hbase-spark-it/pom.xml
index d8d548d..31adbc7 100644
--- a/spark/hbase-spark-it/pom.xml
+++ b/spark/hbase-spark-it/pom.xml
@@ -34,8 +34,6 @@
<!-- Test inclusion patterns used by failsafe configuration -->
<unittest.include>**/Test*.java</unittest.include>
<integrationtest.include>**/IntegrationTest*.java</integrationtest.include>
- <!-- To Run Tests with a particular Xmx Value use -Dfailsafe.Xmx=XXXg -->
- <failsafe.Xmx>4g</failsafe.Xmx>
<!-- To run a single integration test, use -Dit.test=IntegrationTestXXX -->
</properties>
@@ -278,9 +276,7 @@
<!-- TODO: failsafe does timeout, but verify does not fail the build
because of the timeout.
I believe it is a failsafe bug, we may consider using surefire
-->
<forkedProcessTimeoutInSeconds>1800</forkedProcessTimeoutInSeconds>
- <argLine>-enableassertions -Xmx${failsafe.Xmx}
- -Djava.security.egd=file:/dev/./urandom
-XX:+CMSClassUnloadingEnabled
- -verbose:gc -XX:+PrintCommandLineFlags
-XX:+PrintFlagsFinal</argLine>
+ <argLine>${spark-it.argLine}</argLine>
</configuration>
</plugin>
<plugin>
diff --git a/spark/hbase-spark/pom.xml b/spark/hbase-spark/pom.xml
index 3b3670b..58fb5b0 100644
--- a/spark/hbase-spark/pom.xml
+++ b/spark/hbase-spark/pom.xml
@@ -438,27 +438,6 @@
</activation>
<build>
<plugins>
- <!--This is ugly but saves having to mess with profiles.
- This plugin uses groovy (yuck) just to set a single
- variable, target.jvm, dependent on what the value of
- scala version is.-->
- <plugin>
- <groupId>org.codehaus.gmaven</groupId>
- <artifactId>gmaven-plugin</artifactId>
- <version>1.5</version>
- <executions>
- <execution>
- <goals>
- <goal>execute</goal>
- </goals>
- <phase>validate</phase>
- <configuration>
- <source><![CDATA[pom.properties['target.jvm'] =
- pom.properties['scala.version'].startsWith('2.10')? '':
'-target:jvm-1.8';]]></source>
- </configuration>
- </execution>
- </executions>
- </plugin>
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
@@ -468,8 +447,6 @@
<scalaVersion>${scala.version}</scalaVersion>
<args>
<arg>-feature</arg>
- <!--The target.jvm variable gets set above by the groovy
- snippet in the gmaven-plugin.-->
<arg>${target.jvm}</arg>
</args>
<source>${compileSource}</source>
@@ -511,7 +488,7 @@
</goals>
<phase>test</phase>
<configuration>
- <argLine>-Xmx1536m -XX:ReservedCodeCacheSize=512m</argLine>
+ <argLine>${spark-ut.argLine}</argLine>
<parallel>false</parallel>
</configuration>
</execution>
diff --git a/spark/pom.xml b/spark/pom.xml
index b770e6b..573b904 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -49,6 +49,7 @@
Please take caution when this version is modified -->
<scala.version>2.12.15</scala.version>
<scala.binary.version>2.12</scala.binary.version>
+ <target.jvm>-target:jvm-1.8</target.jvm>
</properties>
<dependencyManagement>