[ 
https://issues.apache.org/jira/browse/SPARK-22918?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Damian Momot updated SPARK-22918:
---------------------------------
    Description: 
After upgrading 2.2.0 -> 2.2.1 sbt test command started to fail with following 
exception:

{noformat}
java.security.AccessControlException: access denied 
org.apache.derby.security.SystemPermission( "engine", "usederbyinternals" )
        at 
java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
        at 
java.security.AccessController.checkPermission(AccessController.java:884)
        at 
org.apache.derby.iapi.security.SecurityUtil.checkDerbyInternalsPrivilege(Unknown
 Source)
        at org.apache.derby.iapi.services.monitor.Monitor.startMonitor(Unknown 
Source)
        at org.apache.derby.iapi.jdbc.JDBCBoot$1.run(Unknown Source)
        at java.security.AccessController.doPrivileged(Native Method)
        at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
        at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
        at org.apache.derby.jdbc.EmbeddedDriver.boot(Unknown Source)
        at org.apache.derby.jdbc.EmbeddedDriver.<clinit>(Unknown Source)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at java.lang.Class.newInstance(Class.java:442)
        at 
org.datanucleus.store.rdbms.connectionpool.AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.java:47)
        at 
org.datanucleus.store.rdbms.connectionpool.BoneCPConnectionPoolFactory.createConnectionPool(BoneCPConnectionPoolFactory.java:54)
        at 
org.datanucleus.store.rdbms.ConnectionFactoryImpl.generateDataSources(ConnectionFactoryImpl.java:238)
        at 
org.datanucleus.store.rdbms.ConnectionFactoryImpl.initialiseDataSources(ConnectionFactoryImpl.java:131)
        at 
org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(ConnectionFactoryImpl.java:85)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
        at 
org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:325)
        at 
org.datanucleus.store.AbstractStoreManager.registerConnectionFactory(AbstractStoreManager.java:282)
        at 
org.datanucleus.store.AbstractStoreManager.<init>(AbstractStoreManager.java:240)
        at 
org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:286)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
        at 
org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
        at 
org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
        at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
        at 
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
        at 
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
        at 
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
        at 
javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
        at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
        at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
        at 
org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
        at 
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        at 
org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
        at 
org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
        at 
org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
        at 
org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
        at 
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
        at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
        at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
        at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
        at 
org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
        at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
        at 
org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
        at 
org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
        at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
        at 
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
        at 
org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
        at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
        at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
        at 
org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
        at 
org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
        at 
org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
        at 
org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
        at 
org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
        at 
org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
        at scala.Option.getOrElse(Option.scala:121)
        at 
org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
        at 
org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$$anonfun$beforeAll$2.apply(ClickstreamSuite.scala:26)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$$anonfun$beforeAll$2.apply(ClickstreamSuite.scala:25)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$class.beforeAll(ClickstreamSuite.scala:25)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.integration.ItemCategoryIdEnrichementIntegrationSpec.beforeAll(ItemCategoryIdEnrichementIntegrationSpec.scala:10)
        at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:212)
        at 
org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.integration.ItemCategoryIdEnrichementIntegrationSpec.run(ItemCategoryIdEnrichementIntegrationSpec.scala:10)
        at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
        at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
        at sbt.TestRunner.runTest$1(TestFramework.scala:76)
        at sbt.TestRunner.run(TestFramework.scala:85)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
        at 
sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
        at sbt.TestFunction.apply(TestFramework.scala:207)
        at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:239)
        at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:245)
        at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:245)
        at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
        at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
        at sbt.std.Transform$$anon$4.work(System.scala:63)
        at 
sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
        at 
sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
        at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
        at sbt.Execute.work(Execute.scala:237)
        at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
        at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
        at 
sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
        at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
{noformat}

It's most likely related to https://issues.apache.org/jira/browse/DERBY-6648

In my case following initialization is used:

{code}
    val session = SparkSession.builder()
      .enableHiveSupport()
      .master("local[4]")
      .config("spark.sql.warehouse.dir", tempDir.getAbsolutePath)
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.parquet.compression.codec", "snappy")
      .config("spark.kryo.registrator", 
classOf[BillingsKryoRegistrator].getCanonicalName)
      .config("spark.sql.shuffle.partitions", 10)
      .getOrCreate()

    
session.sparkContext.hadoopConfiguration.setStrings("parquet.enable.summary-metadata",
 "false")

    session
{code}


I've also found another report about same on SO: 
https://stackoverflow.com/questions/48008343/sbt-test-does-not-work-for-spark-test

  was:
After upgrading 2.2.0 -> 2.2.1 sbt test command started to fail with following 
exception:

{noformat}
java.security.AccessControlException: access denied 
org.apache.derby.security.SystemPermission( "engine", "usederbyinternals" )
        at 
java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
        at 
java.security.AccessController.checkPermission(AccessController.java:884)
        at 
org.apache.derby.iapi.security.SecurityUtil.checkDerbyInternalsPrivilege(Unknown
 Source)
        at org.apache.derby.iapi.services.monitor.Monitor.startMonitor(Unknown 
Source)
        at org.apache.derby.iapi.jdbc.JDBCBoot$1.run(Unknown Source)
        at java.security.AccessController.doPrivileged(Native Method)
        at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
        at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
        at org.apache.derby.jdbc.EmbeddedDriver.boot(Unknown Source)
        at org.apache.derby.jdbc.EmbeddedDriver.<clinit>(Unknown Source)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at java.lang.Class.newInstance(Class.java:442)
        at 
org.datanucleus.store.rdbms.connectionpool.AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.java:47)
        at 
org.datanucleus.store.rdbms.connectionpool.BoneCPConnectionPoolFactory.createConnectionPool(BoneCPConnectionPoolFactory.java:54)
        at 
org.datanucleus.store.rdbms.ConnectionFactoryImpl.generateDataSources(ConnectionFactoryImpl.java:238)
        at 
org.datanucleus.store.rdbms.ConnectionFactoryImpl.initialiseDataSources(ConnectionFactoryImpl.java:131)
        at 
org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(ConnectionFactoryImpl.java:85)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
        at 
org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:325)
        at 
org.datanucleus.store.AbstractStoreManager.registerConnectionFactory(AbstractStoreManager.java:282)
        at 
org.datanucleus.store.AbstractStoreManager.<init>(AbstractStoreManager.java:240)
        at 
org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:286)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
        at 
org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
        at 
org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
        at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
        at 
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
        at 
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
        at 
org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
        at 
javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
        at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
        at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
        at 
org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
        at 
org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
        at 
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
        at 
org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
        at 
org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
        at 
org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
        at 
org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
        at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
        at 
org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
        at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
        at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
        at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
        at 
org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
        at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
        at 
org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
        at 
org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
        at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
        at 
org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
        at 
org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
        at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
        at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
        at 
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
        at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
        at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
        at 
org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
        at 
org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
        at 
org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
        at 
org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
        at 
org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
        at 
org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
        at 
org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
        at 
org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
        at scala.Option.getOrElse(Option.scala:121)
        at 
org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
        at 
org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
        at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$$anonfun$beforeAll$2.apply(ClickstreamSuite.scala:26)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$$anonfun$beforeAll$2.apply(ClickstreamSuite.scala:25)
        at scala.collection.immutable.List.foreach(List.scala:392)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$class.beforeAll(ClickstreamSuite.scala:25)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.integration.ItemCategoryIdEnrichementIntegrationSpec.beforeAll(ItemCategoryIdEnrichementIntegrationSpec.scala:10)
        at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:212)
        at 
org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
        at 
pl.allegro.analytics.marvel.clickstreamenrichment.integration.ItemCategoryIdEnrichementIntegrationSpec.run(ItemCategoryIdEnrichementIntegrationSpec.scala:10)
        at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
        at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
        at sbt.TestRunner.runTest$1(TestFramework.scala:76)
        at sbt.TestRunner.run(TestFramework.scala:85)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
        at 
sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
        at 
sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
        at sbt.TestFunction.apply(TestFramework.scala:207)
        at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:239)
        at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:245)
        at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:245)
        at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
        at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
        at sbt.std.Transform$$anon$4.work(System.scala:63)
        at 
sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
        at 
sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
        at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
        at sbt.Execute.work(Execute.scala:237)
        at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
        at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
        at 
sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
        at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:748)
{noformat}

It's most likely related to https://issues.apache.org/jira/browse/DERBY-6648

In my case following initialization is used:

{code:scala}
    val session = SparkSession.builder()
      .enableHiveSupport()
      .master("local[4]")
      .config("spark.sql.warehouse.dir", tempDir.getAbsolutePath)
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.parquet.compression.codec", "snappy")
      .config("spark.kryo.registrator", 
classOf[BillingsKryoRegistrator].getCanonicalName)
      .config("spark.sql.shuffle.partitions", 10)
      .getOrCreate()

    
session.sparkContext.hadoopConfiguration.setStrings("parquet.enable.summary-metadata",
 "false")

    session
{code}


I've also found another report about same on SO: 
https://stackoverflow.com/questions/48008343/sbt-test-does-not-work-for-spark-test


> sbt test (spark - local) fail after upgrading to 2.2.1 with: 
> java.security.AccessControlException: access denied 
> org.apache.derby.security.SystemPermission( "engine", "usederbyinternals" )
> --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-22918
>                 URL: https://issues.apache.org/jira/browse/SPARK-22918
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 2.2.1
>            Reporter: Damian Momot
>
> After upgrading 2.2.0 -> 2.2.1 sbt test command started to fail with 
> following exception:
> {noformat}
> java.security.AccessControlException: access denied 
> org.apache.derby.security.SystemPermission( "engine", "usederbyinternals" )
>       at 
> java.security.AccessControlContext.checkPermission(AccessControlContext.java:472)
>       at 
> java.security.AccessController.checkPermission(AccessController.java:884)
>       at 
> org.apache.derby.iapi.security.SecurityUtil.checkDerbyInternalsPrivilege(Unknown
>  Source)
>       at org.apache.derby.iapi.services.monitor.Monitor.startMonitor(Unknown 
> Source)
>       at org.apache.derby.iapi.jdbc.JDBCBoot$1.run(Unknown Source)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
>       at org.apache.derby.iapi.jdbc.JDBCBoot.boot(Unknown Source)
>       at org.apache.derby.jdbc.EmbeddedDriver.boot(Unknown Source)
>       at org.apache.derby.jdbc.EmbeddedDriver.<clinit>(Unknown Source)
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at java.lang.Class.newInstance(Class.java:442)
>       at 
> org.datanucleus.store.rdbms.connectionpool.AbstractConnectionPoolFactory.loadDriver(AbstractConnectionPoolFactory.java:47)
>       at 
> org.datanucleus.store.rdbms.connectionpool.BoneCPConnectionPoolFactory.createConnectionPool(BoneCPConnectionPoolFactory.java:54)
>       at 
> org.datanucleus.store.rdbms.ConnectionFactoryImpl.generateDataSources(ConnectionFactoryImpl.java:238)
>       at 
> org.datanucleus.store.rdbms.ConnectionFactoryImpl.initialiseDataSources(ConnectionFactoryImpl.java:131)
>       at 
> org.datanucleus.store.rdbms.ConnectionFactoryImpl.<init>(ConnectionFactoryImpl.java:85)
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at 
> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>       at 
> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:325)
>       at 
> org.datanucleus.store.AbstractStoreManager.registerConnectionFactory(AbstractStoreManager.java:282)
>       at 
> org.datanucleus.store.AbstractStoreManager.<init>(AbstractStoreManager.java:240)
>       at 
> org.datanucleus.store.rdbms.RDBMSStoreManager.<init>(RDBMSStoreManager.java:286)
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at 
> org.datanucleus.plugin.NonManagedPluginRegistry.createExecutableExtension(NonManagedPluginRegistry.java:631)
>       at 
> org.datanucleus.plugin.PluginManager.createExecutableExtension(PluginManager.java:301)
>       at 
> org.datanucleus.NucleusContext.createStoreManagerForProperties(NucleusContext.java:1187)
>       at org.datanucleus.NucleusContext.initialise(NucleusContext.java:356)
>       at 
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.freezeConfiguration(JDOPersistenceManagerFactory.java:775)
>       at 
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.createPersistenceManagerFactory(JDOPersistenceManagerFactory.java:333)
>       at 
> org.datanucleus.api.jdo.JDOPersistenceManagerFactory.getPersistenceManagerFactory(JDOPersistenceManagerFactory.java:202)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at javax.jdo.JDOHelper$16.run(JDOHelper.java:1965)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.jdo.JDOHelper.invoke(JDOHelper.java:1960)
>       at 
> javax.jdo.JDOHelper.invokeGetPersistenceManagerFactoryOnImplementation(JDOHelper.java:1166)
>       at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:808)
>       at javax.jdo.JDOHelper.getPersistenceManagerFactory(JDOHelper.java:701)
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.getPMF(ObjectStore.java:365)
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.getPersistenceManager(ObjectStore.java:394)
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:291)
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
>       at 
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
>       at 
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
>       at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
>       at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
>       at 
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>       at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
>       at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
>       at 
> org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
>       at 
> org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
>       at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
>       at 
> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
>       at 
> org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
>       at 
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
>       at 
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
>       at 
> org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
>       at 
> org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
>       at 
> org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
>       at 
> org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:39)
>       at 
> org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog$lzycompute(HiveSessionStateBuilder.scala:54)
>       at 
> org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:52)
>       at 
> org.apache.spark.sql.hive.HiveSessionStateBuilder.catalog(HiveSessionStateBuilder.scala:35)
>       at 
> org.apache.spark.sql.internal.BaseSessionStateBuilder.build(BaseSessionStateBuilder.scala:289)
>       at 
> org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$instantiateSessionState(SparkSession.scala:1059)
>       at 
> org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:137)
>       at 
> org.apache.spark.sql.SparkSession$$anonfun$sessionState$2.apply(SparkSession.scala:136)
>       at scala.Option.getOrElse(Option.scala:121)
>       at 
> org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:136)
>       at 
> org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:133)
>       at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:632)
>       at 
> pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$$anonfun$beforeAll$2.apply(ClickstreamSuite.scala:26)
>       at 
> pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$$anonfun$beforeAll$2.apply(ClickstreamSuite.scala:25)
>       at scala.collection.immutable.List.foreach(List.scala:392)
>       at 
> pl.allegro.analytics.marvel.clickstreamenrichment.ClickstreamSuite$class.beforeAll(ClickstreamSuite.scala:25)
>       at 
> pl.allegro.analytics.marvel.clickstreamenrichment.integration.ItemCategoryIdEnrichementIntegrationSpec.beforeAll(ItemCategoryIdEnrichementIntegrationSpec.scala:10)
>       at 
> org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:212)
>       at 
> org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
>       at 
> pl.allegro.analytics.marvel.clickstreamenrichment.integration.ItemCategoryIdEnrichementIntegrationSpec.run(ItemCategoryIdEnrichementIntegrationSpec.scala:10)
>       at 
> org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
>       at 
> org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:480)
>       at sbt.TestRunner.runTest$1(TestFramework.scala:76)
>       at sbt.TestRunner.run(TestFramework.scala:85)
>       at 
> sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
>       at 
> sbt.TestFramework$$anon$2$$anonfun$$init$$1$$anonfun$apply$8.apply(TestFramework.scala:202)
>       at 
> sbt.TestFramework$.sbt$TestFramework$$withContextLoader(TestFramework.scala:185)
>       at 
> sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
>       at 
> sbt.TestFramework$$anon$2$$anonfun$$init$$1.apply(TestFramework.scala:202)
>       at sbt.TestFunction.apply(TestFramework.scala:207)
>       at sbt.Tests$.sbt$Tests$$processRunnable$1(Tests.scala:239)
>       at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:245)
>       at sbt.Tests$$anonfun$makeSerial$1.apply(Tests.scala:245)
>       at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
>       at sbt.std.Transform$$anon$3$$anonfun$apply$2.apply(System.scala:44)
>       at sbt.std.Transform$$anon$4.work(System.scala:63)
>       at 
> sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
>       at 
> sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
>       at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
>       at sbt.Execute.work(Execute.scala:237)
>       at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
>       at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
>       at 
> sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
>       at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>       at java.lang.Thread.run(Thread.java:748)
> {noformat}
> It's most likely related to https://issues.apache.org/jira/browse/DERBY-6648
> In my case following initialization is used:
> {code}
>     val session = SparkSession.builder()
>       .enableHiveSupport()
>       .master("local[4]")
>       .config("spark.sql.warehouse.dir", tempDir.getAbsolutePath)
>       .config("spark.serializer", 
> "org.apache.spark.serializer.KryoSerializer")
>       .config("spark.sql.parquet.compression.codec", "snappy")
>       .config("spark.kryo.registrator", 
> classOf[BillingsKryoRegistrator].getCanonicalName)
>       .config("spark.sql.shuffle.partitions", 10)
>       .getOrCreate()
>     
> session.sparkContext.hadoopConfiguration.setStrings("parquet.enable.summary-metadata",
>  "false")
>     session
> {code}
> I've also found another report about same on SO: 
> https://stackoverflow.com/questions/48008343/sbt-test-does-not-work-for-spark-test



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)


---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to