[ 
https://issues.apache.org/jira/browse/SPARK-26842?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Marcelo Vanzin resolved SPARK-26842.
------------------------------------
    Resolution: Invalid

Java 11 is not supported in 2.4.

>  java.lang.IllegalArgumentException: Unsupported class file major version 55 
> -----------------------------------------------------------------------------
>
>                 Key: SPARK-26842
>                 URL: https://issues.apache.org/jira/browse/SPARK-26842
>             Project: Spark
>          Issue Type: Bug
>          Components: DStreams
>    Affects Versions: 2.4.0
>         Environment: Java:
> openjdk version "11.0.1" 2018-10-16 LTS
> OpenJDK Runtime Environment Zulu11.2+3 (build 11.0.1+13-LTS) OpenJDK 64-Bit 
> Server VM Zulu11.2+3 (build 11.0.1+13-LTS, mixed mode)
>  
> Maven: (Spark Streaming)
> <dependency>
>                 <groupId>org.apache.spark</groupId>
>                 <artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
>                 <version>2.4.0</version>
> </dependency>
> <dependency>
>                 <groupId>org.apache.spark</groupId>
>                 <artifactId>spark-streaming_2.11</artifactId>
>                 <version>2.4.0</version>
> </dependency>
>            Reporter: Ranjit Hande
>            Priority: Major
>
> Getting following Runtime Error with Java 11:
> {"@timestamp":"2019-02-07T11:54:30.624+05:30","@version":"1","message":"Application
>  run 
> failed","logger_name":"org.springframework.boot.SpringApplication","thread_name":"main","level":"ERROR","level_value":40000,"stack_trace":"java.lang.IllegalStateException:
>  Failed to execute CommandLineRunner at 
> org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:816)
>  at 
> org.springframework.boot.SpringApplication.callRunners(SpringApplication.java:797)
>  at 
> org.springframework.boot.SpringApplication.run(SpringApplication.java:324) at 
> com.avaya.measures.AgentMeasures.AgentMeasuresApplication.main(AgentMeasuresApplication.java:41)
>  at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native 
> Method) at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>  at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.base/java.lang.reflect.Method.invoke(Method.java:566) at 
> org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:48)
>  at org.springframework.boot.loader.Launcher.launch(Launcher.java:87) at
> org.springframework.boot.loader.Launcher.launch(Launcher.java:50) at 
> org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:51)\r\n*{color:#FF0000}Caused
>  by: java.lang.IllegalArgumentException: Unsupported class file major version 
> 55{color}* at 
>  org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:166) at 
> org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:148) at 
> org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:136) at 
> org.apache.xbean.asm6.ClassReader.<init>(ClassReader.java:237) at 
> org.apache.spark.util.ClosureCleaner$.getClassReader(ClosureCleaner.scala:49) 
> at 
> org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:517)
>  at 
> org.apache.spark.util.FieldAccessFinder$$anon$3$$anonfun$visitMethodInsn$2.apply(ClosureCleaner.scala:500)
>  at 
> scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
>  at 
> scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
>  at 
> scala.collection.mutable.HashMap$$anon$1$$anonfun$foreach$2.apply(HashMap.scala:134)
>  at 
> scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:236) at 
> scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40) at 
> scala.collection.mutable.HashMap$$anon$1.foreach(HashMap.scala:134) at 
> scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
>  at 
> org.apache.spark.util.FieldAccessFinder$$anon$3.visitMethodInsn(ClosureCleaner.scala:500)
>  at org.apache.xbean.asm6.ClassReader.readCode(ClassReader.java:2175) at 
> org.apache.xbean.asm6.ClassReader.readMethod(ClassReader.java:1238) at 
> org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:631) at 
> org.apache.xbean.asm6.ClassReader.accept(ClassReader.java:355) at 
> org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:307)
>  at 
> org.apache.spark.util.ClosureCleaner$$anonfun$org$apache$spark$util$ClosureCleaner$$clean$14.apply(ClosureCleaner.scala:306)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:306)
>  at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:162) at 
> org.apache.spark.SparkContext.clean(SparkContext.scala:2326) at 
> org.apache.spark.SparkContext.runJob(SparkContext.scala:2100) at 
> org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1364) at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
>  at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at 
> org.apache.spark.rdd.RDD.take(RDD.scala:1337) at 
> org.apache.spark.streaming.dstream.DStream$$anonfun$print$2$$anonfun$foreachFunc$3$1.apply(DStream.scala:735)
>  at 
> org.apache.spark.streaming.dstream.DStream$$anonfun$print$2$$anonfun$foreachFunc$3$1.apply(DStream.scala:734)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:51)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
>  at 
> org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:416)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:50)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
>  at scala.util.Try$.apply(Try.scala:192) at 
> org.apache.spark.streaming.scheduler.Job.run(Job.scala:39) at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:257)
>  at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
>  at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
>  at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58) at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:256)
>  at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
>  at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
>  at java.base/java.lang.Thread.run(Thread.java:834)\r\n"}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to