This is an known issue:
https://issues.apache.org/jira/browse/SPARK-8461?filter=-1

Will be fixed soon by https://github.com/apache/spark/pull/6898

On Fri, Jun 19, 2015 at 5:50 AM, Animesh Baranawal
<animeshbarana...@gmail.com> wrote:
> I am trying to perform some insert column operations in dataframe. Following
> is the code I used:
>
>> val df = sqlContext.read.json("examples/src/main/resources/people.json")
>> df.show() { works correctly }
>> df.withColumn("age", df.col("name") ) { works correctly }
>> df.withColumn("age", df.col("name") ).show() { gives ERROR }
>> df.withColumn("arbitrary", df.col("name") ).show() { gives ERROR }
>
> This is the ERROR LOG :
>
> """
> ERROR GenerateMutableProjection: failed to compile:
>
>       import org.apache.spark.sql.catalyst.InternalRow;
>
>       public SpecificProjection
> generate(org.apache.spark.sql.catalyst.expressions.Expression[] expr) {
>         return new SpecificProjection(expr);
>       }
>
>       class SpecificProjection extends
> org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection {
>
>         private org.apache.spark.sql.catalyst.expressions.Expression[]
> expressions = null;
>         private org.apache.spark.sql.catalyst.expressions.MutableRow
> mutableRow = null;
>
>         public
> SpecificProjection(org.apache.spark.sql.catalyst.expressions.Expression[]
> expr) {
>           expressions = expr;
>           mutableRow = new
> org.apache.spark.sql.catalyst.expressions.GenericMutableRow(3);
>         }
>
>         public
> org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection
> target(org.apache.spark.sql.catalyst.expressions.MutableRow row) {
>           mutableRow = row;
>           return this;
>         }
>
>         /* Provide immutable access to the last projected row. */
>         public InternalRow currentValue() {
>           return (InternalRow) mutableRow;
>         }
>
>         public Object apply(Object _i) {
>           InternalRow i = (InternalRow) _i;
>
>         boolean isNull0 = i.isNullAt(0);
>         long primitive1 = isNull0 ?
>             -1L : (i.getLong(0));
>
>           if(isNull0)
>             mutableRow.setNullAt(0);
>           else
>             mutableRow.setLong(0, primitive1);
>
>
>         boolean isNull2 = i.isNullAt(1);
>         org.apache.spark.unsafe.types.UTF8String primitive3 = isNull2 ?
>             null : ((org.apache.spark.unsafe.types.UTF8String)i.apply(1));
>
>           if(isNull2)
>             mutableRow.setNullAt(1);
>           else
>             mutableRow.update(1, primitive3);
>
>
>         boolean isNull4 = i.isNullAt(1);
>         org.apache.spark.unsafe.types.UTF8String primitive5 = isNull4 ?
>             null : ((org.apache.spark.unsafe.types.UTF8String)i.apply(1));
>
>           if(isNull4)
>             mutableRow.setNullAt(2);
>           else
>             mutableRow.update(2, primitive5);
>
>
>           return mutableRow;
>         }
>       }
>
> org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
> at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
> at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
> at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
> at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
> at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
> at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
> at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
> at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> at
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> at
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> at
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
> at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
> at
> org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> at
> org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at
> org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at
> org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
> at
> org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
> at
> org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
> at org.apache.spark.scheduler.Task.run(Task.scala:70)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: java.lang.ClassNotFoundException: Object
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:274)
> at
> org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
> at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
> ... 68 more
> Caused by: java.lang.ClassNotFoundException: Object
> at java.lang.ClassLoader.findClass(ClassLoader.java:531)
> at
> org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64)
> ... 75 more
> 15/06/19 18:17:03 ERROR Executor: Exception in task 0.0 in stage 15.0 (TID
> 16)
> java.util.concurrent.ExecutionException:
> org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
> at
> org.spark-project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
> at
> org.spark-project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
> at
> org.spark-project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
> at
> org.spark-project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
> at
> org.spark-project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
> at
> org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at
> org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at
> org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
> at
> org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
> at
> org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
> at org.apache.spark.scheduler.Task.run(Task.scala:70)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column
> 35: Object
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
> at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
> at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
> at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
> at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
> at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
> at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
> at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
> at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> at
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> at
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> at
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
> at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
> at
> org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> ... 26 more
> Caused by: java.lang.ClassNotFoundException: Object
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:274)
> at
> org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
> at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
> ... 68 more
> Caused by: java.lang.ClassNotFoundException: Object
> at java.lang.ClassLoader.findClass(ClassLoader.java:531)
> at
> org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64)
> ... 75 more
> 15/06/19 18:17:03 WARN TaskSetManager: Lost task 0.0 in stage 15.0 (TID 16,
> localhost): java.util.concurrent.ExecutionException:
> org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
> at
> org.spark-project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
> at
> org.spark-project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
> at
> org.spark-project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
> at
> org.spark-project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
> at
> org.spark-project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
> at
> org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at
> org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at
> org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
> at
> org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
> at
> org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
> at org.apache.spark.scheduler.Task.run(Task.scala:70)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column
> 35: Object
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
> at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
> at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
> at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
> at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
> at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
> at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
> at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
> at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> at
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> at
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> at
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
> at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
> at
> org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> ... 26 more
> Caused by: java.lang.ClassNotFoundException: Object
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:274)
> at
> org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
> at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
> ... 68 more
> Caused by: java.lang.ClassNotFoundException: Object
> at java.lang.ClassLoader.findClass(ClassLoader.java:531)
> at
> org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64)
> ... 75 more
>
> 15/06/19 18:17:03 ERROR TaskSetManager: Task 0 in stage 15.0 failed 1 times;
> aborting job
> org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in
> stage 15.0 failed 1 times, most recent failure: Lost task 0.0 in stage 15.0
> (TID 16, localhost): java.util.concurrent.ExecutionException:
> org.codehaus.commons.compiler.CompileException: Line 28, Column 35: Object
> at
> org.spark-project.guava.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
> at
> org.spark-project.guava.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
> at
> org.spark-project.guava.util.concurrent.AbstractFuture.get(AbstractFuture.java:116)
> at
> org.spark-project.guava.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
> at
> org.spark-project.guava.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2380)
> at
> org.spark-project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
> at
> org.spark-project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
> at org.spark-project.guava.cache.LocalCache.get(LocalCache.java:4000)
> at org.spark-project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
> at
> org.spark-project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:285)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:282)
> at
> org.apache.spark.sql.execution.SparkPlan.newMutableProjection(SparkPlan.scala:173)
> at
> org.apache.spark.sql.execution.Project.buildProjection$lzycompute(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project.buildProjection(basicOperators.scala:39)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:42)
> at
> org.apache.spark.sql.execution.Project$$anonfun$1.apply(basicOperators.scala:41)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at
> org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$17.apply(RDD.scala:686)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
> at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
> at org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
> at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
> at org.apache.spark.scheduler.Task.run(Task.scala:70)
> at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
> at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
> at java.lang.Thread.run(Thread.java:745)
> Caused by: org.codehaus.commons.compiler.CompileException: Line 28, Column
> 35: Object
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6897)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5331)
> at org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:5207)
> at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:5188)
> at org.codehaus.janino.UnitCompiler.access$12600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$16.visitReferenceType(UnitCompiler.java:5119)
> at org.codehaus.janino.Java$ReferenceType.accept(Java.java:2880)
> at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:5159)
> at org.codehaus.janino.UnitCompiler.access$16700(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$31.getParameterTypes2(UnitCompiler.java:8533)
> at org.codehaus.janino.IClass$IInvocable.getParameterTypes(IClass.java:835)
> at org.codehaus.janino.IClass$IMethod.getDescriptor2(IClass.java:1063)
> at org.codehaus.janino.IClass$IInvocable.getDescriptor(IClass.java:849)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:211)
> at org.codehaus.janino.IClass.getIMethods(IClass.java:199)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:409)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:658)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:662)
> at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:350)
> at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1035)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:769)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:532)
> at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:393)
> at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:185)
> at
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:347)
> at
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1139)
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:354)
> at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:322)
> at
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:383)
> at
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:315)
> at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:233)
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:192)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:84)
> at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:77)
> at org.codehaus.janino.ClassBodyEvaluator.<init>(ClassBodyEvaluator.java:72)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.compile(CodeGenerator.scala:245)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:87)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.GenerateMutableProjection$.create(GenerateMutableProjection.scala:29)
> at
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:272)
> at
> org.spark-project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
> at
> org.spark-project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
> ... 26 more
> Caused by: java.lang.ClassNotFoundException: Object
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:69)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at java.lang.Class.forName0(Native Method)
> at java.lang.Class.forName(Class.java:274)
> at
> org.codehaus.janino.ClassLoaderIClassLoader.findIClass(ClassLoaderIClassLoader.java:78)
> at org.codehaus.janino.IClassLoader.loadIClass(IClassLoader.java:254)
> at org.codehaus.janino.UnitCompiler.findTypeByName(UnitCompiler.java:6893)
> ... 68 more
> Caused by: java.lang.ClassNotFoundException: Object
> at java.lang.ClassLoader.findClass(ClassLoader.java:531)
> at
> org.apache.spark.util.ParentClassLoader.findClass(ParentClassLoader.scala:26)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:34)
> at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
> at
> org.apache.spark.util.ParentClassLoader.loadClass(ParentClassLoader.scala:30)
> at
> org.apache.spark.repl.ExecutorClassLoader.findClass(ExecutorClassLoader.scala:64)
> ... 75 more
>
> Driver stacktrace:
> at
> org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1285)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1276)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1275)
> at
> scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
> at
> org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1275)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
> at
> org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:749)
> at scala.Option.foreach(Option.scala:236)
> at
> org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:749)
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1484)
> at
> org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1445)
> at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
> """
>
> What am I doing wrong ?
> P.S. I am working on spark master branch. The same code works fine on
> pyspark.
>
>
>
> Sincerely,
> Animesh

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to