[ 
https://issues.apache.org/jira/browse/SPARK-28685?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Dongjoon Hyun updated SPARK-28685:
----------------------------------
    Summary: Test HMS 2.0.0+ in VersionsSuite/HiveClientSuites on JDK 11  (was: 
VersionsSuite and HiveClientSuites support test on JDK 11)

> Test HMS 2.0.0+ in VersionsSuite/HiveClientSuites on JDK 11
> -----------------------------------------------------------
>
>                 Key: SPARK-28685
>                 URL: https://issues.apache.org/jira/browse/SPARK-28685
>             Project: Spark
>          Issue Type: Sub-task
>          Components: SQL, Tests
>    Affects Versions: 3.0.0
>            Reporter: Yuming Wang
>            Priority: Major
>
> {code:sh}
> export JAVA_HOME="/usr/lib/jdk-11.0.3"
> build/sbt "hive/test-only *.HiveClientSuites" -Phive -Phadoop-3.2
> {code}
> {noformat}
> [info] org.apache.spark.sql.hive.client.HiveClientSuites *** ABORTED *** (1 
> minute, 23 seconds)
> [info]   java.lang.reflect.InvocationTargetException:
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:296)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientBuilder$.buildClient(HiveClientBuilder.scala:58)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveVersionSuite.buildClient(HiveVersionSuite.scala:50)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.init(HiveClientSuite.scala:54)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.beforeAll(HiveClientSuite.scala:100)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
> [info]   at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at 
> scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> [info]   at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
> [info]   at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
> [info]   at org.scalatest.Suite.run(Suite.scala:1144)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> [info]   Cause: java.lang.RuntimeException: java.lang.RuntimeException: 
> Unable to instantiate org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> [info]   at 
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:346)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:193)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:125)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:296)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientBuilder$.buildClient(HiveClientBuilder.scala:58)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveVersionSuite.buildClient(HiveVersionSuite.scala:50)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.init(HiveClientSuite.scala:54)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.beforeAll(HiveClientSuite.scala:100)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
> [info]   at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at 
> scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> [info]   at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
> [info]   at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
> [info]   at org.scalatest.Suite.run(Suite.scala:1144)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> [info]   Cause: java.lang.RuntimeException: Unable to instantiate 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient
> [info]   at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1412)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
> [info]   at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
> [info]   at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
> [info]   at 
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:193)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:125)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:296)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientBuilder$.buildClient(HiveClientBuilder.scala:58)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveVersionSuite.buildClient(HiveVersionSuite.scala:50)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.init(HiveClientSuite.scala:54)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.beforeAll(HiveClientSuite.scala:100)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
> [info]   at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at 
> scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> [info]   at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
> [info]   at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
> [info]   at org.scalatest.Suite.run(Suite.scala:1144)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> [info]   Cause: java.lang.reflect.InvocationTargetException:
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1410)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
> [info]   at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
> [info]   at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
> [info]   at 
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:193)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:125)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:296)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientBuilder$.buildClient(HiveClientBuilder.scala:58)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveVersionSuite.buildClient(HiveVersionSuite.scala:50)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.init(HiveClientSuite.scala:54)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.beforeAll(HiveClientSuite.scala:100)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
> [info]   at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at 
> scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> [info]   at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
> [info]   at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
> [info]   at org.scalatest.Suite.run(Suite.scala:1144)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> [info]   Cause: javax.jdo.JDOFatalInternalException: The java type 
> java.lang.Long (jdbc-type="", sql-type="") cant be mapped for this datastore. 
> No mapping is available.
> [info]   at 
> org.datanucleus.api.jdo.NucleusJDOHelper.getJDOExceptionForNucleusException(NucleusJDOHelper.java:591)
> [info]   at 
> org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:732)
> [info]   at 
> org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)
> [info]   at 
> org.apache.hadoop.hive.metastore.ObjectStore.setMetaStoreSchemaVersion(ObjectStore.java:6389)
> [info]   at 
> org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6299)
> [info]   at 
> org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6277)
> [info]   at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> [info]   at java.base/java.lang.reflect.Method.invoke(Method.java:566)
> [info]   at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
> [info]   at com.sun.proxy.$Proxy13.verifySchema(Unknown Source)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:476)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:523)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:397)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:356)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4944)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:171)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1410)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
> [info]   at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
> [info]   at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
> [info]   at 
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:193)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:125)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:296)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientBuilder$.buildClient(HiveClientBuilder.scala:58)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveVersionSuite.buildClient(HiveVersionSuite.scala:50)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.init(HiveClientSuite.scala:54)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.beforeAll(HiveClientSuite.scala:100)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
> [info]   at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at 
> scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> [info]   at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
> [info]   at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
> [info]   at org.scalatest.Suite.run(Suite.scala:1144)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> [info]   Cause: org.datanucleus.exceptions.NucleusException: The java type 
> java.lang.Long (jdbc-type="", sql-type="") cant be mapped for this datastore. 
> No mapping is available.
> [info]   at 
> org.datanucleus.store.rdbms.mapping.RDBMSMappingManager.getDatastoreMappingClass(RDBMSMappingManager.java:1215)
> [info]   at 
> org.datanucleus.store.rdbms.mapping.RDBMSMappingManager.createDatastoreMapping(RDBMSMappingManager.java:1378)
> [info]   at 
> org.datanucleus.store.rdbms.table.AbstractClassTable.addDatastoreId(AbstractClassTable.java:392)
> [info]   at 
> org.datanucleus.store.rdbms.table.ClassTable.initializePK(ClassTable.java:1087)
> [info]   at 
> org.datanucleus.store.rdbms.table.ClassTable.preInitialize(ClassTable.java:247)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTable(RDBMSStoreManager.java:3118)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTables(RDBMSStoreManager.java:2909)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:3182)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2841)
> [info]   at 
> org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:122)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605)
> [info]   at 
> org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679)
> [info]   at 
> org.datanucleus.store.rdbms.RDBMSStoreManager.getPropertiesForGenerator(RDBMSStoreManager.java:2045)
> [info]   at 
> org.datanucleus.store.AbstractStoreManager.getStrategyValue(AbstractStoreManager.java:1365)
> [info]   at 
> org.datanucleus.ExecutionContextImpl.newObjectId(ExecutionContextImpl.java:3827)
> [info]   at 
> org.datanucleus.state.JDOStateManager.setIdentity(JDOStateManager.java:2571)
> [info]   at 
> org.datanucleus.state.JDOStateManager.initialiseForPersistentNew(JDOStateManager.java:513)
> [info]   at 
> org.datanucleus.state.ObjectProviderFactoryImpl.newForPersistentNew(ObjectProviderFactoryImpl.java:232)
> [info]   at 
> org.datanucleus.ExecutionContextImpl.newObjectProviderForPersistentNew(ExecutionContextImpl.java:1414)
> [info]   at 
> org.datanucleus.ExecutionContextImpl.persistObjectInternal(ExecutionContextImpl.java:2218)
> [info]   at 
> org.datanucleus.ExecutionContextImpl.persistObjectWork(ExecutionContextImpl.java:2065)
> [info]   at 
> org.datanucleus.ExecutionContextImpl.persistObject(ExecutionContextImpl.java:1913)
> [info]   at 
> org.datanucleus.ExecutionContextThreadedImpl.persistObject(ExecutionContextThreadedImpl.java:217)
> [info]   at 
> org.datanucleus.api.jdo.JDOPersistenceManager.jdoMakePersistent(JDOPersistenceManager.java:727)
> [info]   at 
> org.datanucleus.api.jdo.JDOPersistenceManager.makePersistent(JDOPersistenceManager.java:752)
> [info]   at 
> org.apache.hadoop.hive.metastore.ObjectStore.setMetaStoreSchemaVersion(ObjectStore.java:6389)
> [info]   at 
> org.apache.hadoop.hive.metastore.ObjectStore.checkSchema(ObjectStore.java:6299)
> [info]   at 
> org.apache.hadoop.hive.metastore.ObjectStore.verifySchema(ObjectStore.java:6277)
> [info]   at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> [info]   at java.base/java.lang.reflect.Method.invoke(Method.java:566)
> [info]   at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.invoke(RawStoreProxy.java:108)
> [info]   at com.sun.proxy.$Proxy13.verifySchema(Unknown Source)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:476)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:523)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:397)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.<init>(HiveMetaStore.java:356)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:54)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:59)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStore.newHMSHandler(HiveMetaStore.java:4944)
> [info]   at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:171)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1410)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:62)
> [info]   at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:72)
> [info]   at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:2453)
> [info]   at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:2465)
> [info]   at 
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:340)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.newState(HiveClientImpl.scala:193)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:125)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance0(Native
>  Method)
> [info]   at 
> java.base/jdk.internal.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> [info]   at 
> java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> [info]   at 
> java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
> [info]   at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:296)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientBuilder$.buildClient(HiveClientBuilder.scala:58)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveVersionSuite.buildClient(HiveVersionSuite.scala:50)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.init(HiveClientSuite.scala:54)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuite.beforeAll(HiveClientSuite.scala:100)
> [info]   at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
> [info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
> [info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
> [info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
> [info]   at org.scalatest.Suite.callExecuteOnSuite$1(Suite.scala:1210)
> [info]   at org.scalatest.Suite.$anonfun$runNestedSuites$1(Suite.scala:1257)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36)
> [info]   at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33)
> [info]   at 
> scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198)
> [info]   at org.scalatest.Suite.runNestedSuites(Suite.scala:1255)
> [info]   at org.scalatest.Suite.runNestedSuites$(Suite.scala:1189)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.runNestedSuites(HiveClientSuites.scala:24)
> [info]   at org.scalatest.Suite.run(Suite.scala:1144)
> [info]   at org.scalatest.Suite.run$(Suite.scala:1129)
> [info]   at 
> org.apache.spark.sql.hive.client.HiveClientSuites.run(HiveClientSuites.scala:24)
> [info]   at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
> [info]   at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
> [info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
> [info]   at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
> [info]   at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
> [info]   at java.base/java.lang.Thread.run(Thread.java:834)
> Exception in thread "Thread-69" java.io.InvalidClassException: 
> org.datanucleus.exceptions.NucleusException; local class incompatible: stream 
> classdesc serialVersionUID = 3837245687978546013, local class 
> serialVersionUID = 6533560396693164660
>       at 
> java.base/java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:689)
>       at 
> java.base/java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1903)
>       at 
> java.base/java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1772)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2060)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.readArray(ObjectInputStream.java:1993)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1588)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2249)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2249)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadObject(ObjectInputStream.java:566)
>       at java.base/java.lang.Throwable.readObject(Throwable.java:896)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.base/java.lang.reflect.Method.invoke(Method.java:566)
>       at 
> java.base/java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1160)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2216)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadObject(ObjectInputStream.java:566)
>       at java.base/java.lang.Throwable.readObject(Throwable.java:896)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.base/java.lang.reflect.Method.invoke(Method.java:566)
>       at 
> java.base/java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1160)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2216)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2249)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2249)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2355)
>       at 
> java.base/java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:2249)
>       at 
> java.base/java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:2087)
>       at 
> java.base/java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1594)
>       at 
> java.base/java.io.ObjectInputStream.readObject(ObjectInputStream.java:430)
>       at 
> org.scalatest.tools.Framework$ScalaTestRunner$Skeleton$1$React.react(Framework.scala:818)
>       at 
> org.scalatest.tools.Framework$ScalaTestRunner$Skeleton$1.run(Framework.scala:807)
>       at java.base/java.lang.Thread.run(Thread.java:834)
> {noformat}



--
This message was sent by Atlassian JIRA
(v7.6.14#76016)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to