[ 
https://issues.apache.org/jira/browse/SPARK-24573?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Hyukjin Kwon reassigned SPARK-24573:
------------------------------------

    Assignee: Hyukjin Kwon

> SBT Java checkstyle affecting the build
> ---------------------------------------
>
>                 Key: SPARK-24573
>                 URL: https://issues.apache.org/jira/browse/SPARK-24573
>             Project: Spark
>          Issue Type: Bug
>          Components: Project Infra
>    Affects Versions: 2.4.0
>            Reporter: Hyukjin Kwon
>            Assignee: Hyukjin Kwon
>            Priority: Major
>             Fix For: 2.4.0
>
>
> Seems checkstyle affects the build in Jenkins. I can't reproduce in my local 
> but it can only be reproduced in Jenkins.
> When PR contains Java, this consistently fails on compilation as below:
> {code}
> [warn] 
> /home/jenkins/workspace/SparkPullRequestBuilder/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodsAllocatorSuite.scala:160:
>  non-variable type argument 
> org.apache.spark.deploy.k8s.KubernetesExecutorSpecificConf in type 
> org.apache.spark.deploy.k8s.KubernetesConf[org.apache.spark.deploy.k8s.KubernetesExecutorSpecificConf]
>  is unchecked since it is eliminated by erasure
> [warn]         if 
> (!argument.isInstanceOf[KubernetesConf[KubernetesExecutorSpecificConf]]) {
> [warn]                                   ^
> java.util.concurrent.ExecutionException: java.lang.OutOfMemoryError: GC 
> overhead limit exceeded
>       at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>       at java.util.concurrent.FutureTask.get(FutureTask.java:192)
>       at 
> sbt.ConcurrentRestrictions$$anon$4.take(ConcurrentRestrictions.scala:188)
>       at sbt.Execute.next$1(Execute.scala:85)
>       at sbt.Execute.processAll(Execute.scala:88)
>       at sbt.Execute.runKeep(Execute.scala:68)
>       at sbt.EvaluateTask$.liftedTree1$1(EvaluateTask.scala:359)
>       at sbt.EvaluateTask$.run$1(EvaluateTask.scala:358)
>       at sbt.EvaluateTask$.runTask(EvaluateTask.scala:378)
>       at sbt.Aggregation$$anonfun$3.apply(Aggregation.scala:69)
>       at sbt.Aggregation$$anonfun$3.apply(Aggregation.scala:67)
>       at sbt.EvaluateTask$.withStreams(EvaluateTask.scala:314)
>       at sbt.Aggregation$.timedRun(Aggregation.scala:67)
>       at sbt.Aggregation$.runTasks(Aggregation.scala:76)
>       at sbt.Aggregation$$anonfun$applyTasks$1.apply(Aggregation.scala:37)
>       at sbt.Aggregation$$anonfun$applyTasks$1.apply(Aggregation.scala:36)
>       at 
> sbt.Command$$anonfun$applyEffect$2$$anonfun$apply$3.apply(Command.scala:61)
>       at 
> sbt.Command$$anonfun$applyEffect$2$$anonfun$apply$3.apply(Command.scala:61)
>       at 
> sbt.Aggregation$$anonfun$evaluatingParser$4$$anonfun$apply$5.apply(Aggregation.scala:158)
>       at 
> sbt.Aggregation$$anonfun$evaluatingParser$4$$anonfun$apply$5.apply(Aggregation.scala:157)
>       at 
> sbt.Act$$anonfun$sbt$Act$$actParser0$1$$anonfun$sbt$Act$$anonfun$$evaluate$1$1$$anonfun$apply$10.apply(Act.scala:253)
>       at 
> sbt.Act$$anonfun$sbt$Act$$actParser0$1$$anonfun$sbt$Act$$anonfun$$evaluate$1$1$$anonfun$apply$10.apply(Act.scala:250)
>       at sbt.Command$.process(Command.scala:93)
>       at sbt.MainLoop$$anonfun$1$$anonfun$apply$1.apply(MainLoop.scala:96)
>       at sbt.MainLoop$$anonfun$1$$anonfun$apply$1.apply(MainLoop.scala:96)
>       at sbt.State$$anon$1.runCmd$1(State.scala:183)
>       at sbt.State$$anon$1.process(State.scala:187)
>       at sbt.MainLoop$$anonfun$1.apply(MainLoop.scala:96)
>       at sbt.MainLoop$$anonfun$1.apply(MainLoop.scala:96)
>       at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
>       at sbt.MainLoop$.next(MainLoop.scala:96)
>       at sbt.MainLoop$.run(MainLoop.scala:89)
>       at sbt.MainLoop$$anonfun$runWithNewLog$1.apply(MainLoop.scala:68)
>       at sbt.MainLoop$$anonfun$runWithNewLog$1.apply(MainLoop.scala:63)
>       at sbt.Using.apply(Using.scala:24)
>       at sbt.MainLoop$.runWithNewLog(MainLoop.scala:63)
>       at sbt.MainLoop$.runAndClearLast(MainLoop.scala:46)
>       at sbt.MainLoop$.runLoggedLoop(MainLoop.scala:30)
>       at sbt.MainLoop$.runLogged(MainLoop.scala:22)
>       at sbt.StandardMain$.runManaged(Main.scala:61)
>       at sbt.xMain.run(Main.scala:35)
>       at xsbt.boot.Launch$$anonfun$run$1.apply(Launch.scala:109)
>       at xsbt.boot.Launch$.withContextLoader(Launch.scala:128)
>       at xsbt.boot.Launch$.run(Launch.scala:109)
>       at xsbt.boot.Launch$$anonfun$apply$1.apply(Launch.scala:35)
>       at xsbt.boot.Launch$.launch(Launch.scala:117)
>       at xsbt.boot.Launch$.apply(Launch.scala:18)
>       at xsbt.boot.Boot$.runImpl(Boot.scala:41)
>       at xsbt.boot.Boot$.main(Boot.scala:17)
>       at xsbt.boot.Boot.main(Boot.scala)
> Caused by: java.lang.OutOfMemoryError: GC overhead limit exceeded
>       at 
> scala.tools.nsc.symtab.classfile.ClassfileParser$ConstantPool.<init>(ClassfileParser.scala:170)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$ClassfileLoader$classfileParser$.newConstantPool(SymbolLoaders.scala:317)
>       at 
> scala.tools.nsc.symtab.classfile.ClassfileParser.parse(ClassfileParser.scala:139)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$ClassfileLoader$$anonfun$doComplete$2.apply$mcV$sp(SymbolLoaders.scala:347)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$ClassfileLoader$$anonfun$doComplete$2.apply(SymbolLoaders.scala:347)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$ClassfileLoader$$anonfun$doComplete$2.apply(SymbolLoaders.scala:347)
>       at 
> scala.reflect.internal.SymbolTable.enteringPhase(SymbolTable.scala:235)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$ClassfileLoader.doComplete(SymbolLoaders.scala:347)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.complete(SymbolLoaders.scala:211)
>       at 
> scala.tools.nsc.symtab.SymbolLoaders$SymbolLoader.load(SymbolLoaders.scala:227)
>       at scala.reflect.internal.Symbols$Symbol.typeParams(Symbols.scala:1733)
>       at 
> scala.reflect.internal.Types$class.isRawIfWithoutArgs(Types.scala:3756)
>       at 
> scala.reflect.internal.SymbolTable.isRawIfWithoutArgs(SymbolTable.scala:16)
>       at scala.reflect.internal.tpe.TypeMaps$$anon$1.apply(TypeMaps.scala:328)
>       at 
> scala.reflect.internal.tpe.TypeMaps$TypeMap.applyToSymbolInfo(TypeMaps.scala:218)
>       at 
> scala.reflect.internal.tpe.TypeMaps$TypeMap.loop$1(TypeMaps.scala:227)
>       at 
> scala.reflect.internal.tpe.TypeMaps$TypeMap.noChangeToSymbols(TypeMaps.scala:229)
>       at 
> scala.reflect.internal.tpe.TypeMaps$TypeMap.mapOver(TypeMaps.scala:243)
>       at 
> scala.reflect.internal.tpe.TypeMaps$TypeMap.mapOver(TypeMaps.scala:128)
>       at scala.reflect.internal.tpe.TypeMaps$$anon$1.apply(TypeMaps.scala:338)
>       at scala.reflect.internal.tpe.TypeMaps$$anon$1.apply(TypeMaps.scala:325)
>       at scala.reflect.internal.Symbols$Symbol.modifyInfo(Symbols.scala:1542)
>       at 
> scala.reflect.internal.Symbols$Symbol$$anonfun$cookJavaRawInfo$2.apply(Symbols.scala:1690)
>       at 
> scala.reflect.internal.Symbols$Symbol$$anonfun$cookJavaRawInfo$2.apply(Symbols.scala:1690)
>       at 
> scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
>       at scala.collection.immutable.List.foreach(List.scala:381)
>       at 
> scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
>       at 
> scala.reflect.internal.Symbols$Symbol.cookJavaRawInfo(Symbols.scala:1690)
>       at 
> scala.tools.nsc.typechecker.Infer$Inferencer.checkAccessible(Infer.scala:270)
>       at 
> scala.tools.nsc.typechecker.Typers$Typer.scala$tools$nsc$typechecker$Typers$Typer$$makeAccessible(Typers.scala:559)
>       at 
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$106.apply(Typers.scala:4757)
>       at 
> scala.tools.nsc.typechecker.Typers$Typer$$anonfun$106.apply(Typers.scala:4757)
> [error] java.util.concurrent.ExecutionException: java.lang.OutOfMemoryError: 
> GC overhead limit exceeded
> [error] Use 'last' for the full log.
> [error] running /home/jenkins/workspace/SparkPullRequestBuilder/build/sbt 
> -Phadoop-2.6 -Pkubernetes -Phive-thriftserver -Pflume -Pkinesis-asl -Pyarn 
> -Pkafka-0-8 -Phive -Pmesos test:package streaming-kafka-0-8-assembly/assembly 
> streaming-flume-assembly/assembly streaming-kinesis-asl-assembly/assembly ; 
> received return code 1
> {code} 
> See also 
> https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/91980/console



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to