[ 
https://issues.apache.org/jira/browse/SPARK-18147?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16320159#comment-16320159
 ] 

Alexander Chermenin commented on SPARK-18147:
---------------------------------------------

Is it resolved for the 2.2.1 version? I have a similar problem with PySpark and 
Structured Streaming:

{noformat}
18/01/10 15:12:37 ERROR CodeGenerator: failed to compile: 
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
174, Column 113: Expression "isNull3" is not an rvalue
/* 001 */ public java.lang.Object generate(Object[] references) {
/* 002 */   return new SpecificUnsafeProjection(references);
/* 003 */ }
/* 004 */
/* 005 */ class SpecificUnsafeProjection extends 
org.apache.spark.sql.catalyst.expressions.UnsafeProjection {
/* 006 */
/* 007 */   private Object[] references;
/* 008 */   private UTF8String lastRegex;
/* 009 */   private java.util.regex.Pattern pattern;
/* 010 */   private String lastReplacement;
/* 011 */   private UTF8String lastReplacementInUTF8;
/* 012 */   private java.lang.StringBuffer result;
/* 013 */   private UTF8String lastRegex1;
/* 014 */   private java.util.regex.Pattern pattern1;
/* 015 */   private String lastReplacement1;
/* 016 */   private UTF8String lastReplacementInUTF81;
/* 017 */   private java.lang.StringBuffer result1;
/* 018 */   private UnsafeRow result2;
/* 019 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder holder;
/* 020 */   private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter rowWriter;
/* 021 */
/* 022 */   public SpecificUnsafeProjection(Object[] references) {
/* 023 */     this.references = references;
/* 024 */     lastRegex = null;
/* 025 */     pattern = null;
/* 026 */     lastReplacement = null;
/* 027 */     lastReplacementInUTF8 = null;
/* 028 */     result = new java.lang.StringBuffer();
/* 029 */     lastRegex1 = null;
/* 030 */     pattern1 = null;
/* 031 */     lastReplacement1 = null;
/* 032 */     lastReplacementInUTF81 = null;
/* 033 */     result1 = new java.lang.StringBuffer();
/* 034 */     result2 = new UnsafeRow(3);
/* 035 */     this.holder = new 
org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder(result2, 64);
/* 036 */     this.rowWriter = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(holder, 3);
/* 037 */
/* 038 */   }
/* 039 */
/* 040 */   public void initialize(int partitionIndex) {
/* 041 */
/* 042 */   }
/* 043 */
/* 044 */
/* 045 */   private void apply_1(InternalRow i) {
/* 046 */
/* 047 */     UTF8String[] args1 = new UTF8String[3];
/* 048 */
/* 049 */
/* 050 */     if (!false) {
/* 051 */       args1[0] = ((UTF8String) references[6]);
/* 052 */     }
/* 053 */
/* 054 */
/* 055 */     if (!false) {
/* 056 */       args1[1] = ((UTF8String) references[7]);
/* 057 */     }
/* 058 */
/* 059 */
/* 060 */     boolean isNull13 = false;
/* 061 */
/* 062 */     UTF8String value14 = i.getUTF8String(2);
/* 063 */
/* 064 */
/* 065 */     UTF8String value13 = null;
/* 066 */
/* 067 */     if (!((UTF8String) references[8]).equals(lastRegex1)) {
/* 068 */       // regex value changed
/* 069 */       lastRegex1 = ((UTF8String) references[8]).clone();
/* 070 */       pattern1 = 
java.util.regex.Pattern.compile(lastRegex1.toString());
/* 071 */     }
/* 072 */     if (!((UTF8String) references[9]).equals(lastReplacementInUTF81)) 
{
/* 073 */       // replacement string changed
/* 074 */       lastReplacementInUTF81 = ((UTF8String) references[9]).clone();
/* 075 */       lastReplacement1 = lastReplacementInUTF81.toString();
/* 076 */     }
/* 077 */     result1.delete(0, result1.length());
/* 078 */     java.util.regex.Matcher matcher1 = 
pattern1.matcher(value14.toString());
/* 079 */
/* 080 */     while (matcher1.find()) {
/* 081 */       matcher1.appendReplacement(result1, lastReplacement1);
/* 082 */     }
/* 083 */     matcher1.appendTail(result1);
/* 084 */     value13 = UTF8String.fromString(result1.toString());
/* 085 */     if (!false) {
/* 086 */       args1[2] = value13;
/* 087 */     }
/* 088 */
/* 089 */     UTF8String value10 = UTF8String.concat(args1);
/* 090 */     boolean isNull10 = value10 == null;
/* 091 */   }
/* 092 */
/* 093 */
/* 094 */   private void apply1_1(InternalRow i) {
/* 095 */
/* 096 */
/* 097 */     final Object value20 = null;
/* 098 */     if (true) {
/* 099 */       rowWriter.setNullAt(2);
/* 100 */     } else {
/* 101 */
/* 102 */     }
/* 103 */
/* 104 */   }
/* 105 */
/* 106 */
/* 107 */   private void apply_0(InternalRow i) {
/* 108 */
/* 109 */     UTF8String[] args = new UTF8String[3];
/* 110 */
/* 111 */
/* 112 */     if (!false) {
/* 113 */       args[0] = ((UTF8String) references[2]);
/* 114 */     }
/* 115 */
/* 116 */
/* 117 */     if (!false) {
/* 118 */       args[1] = ((UTF8String) references[3]);
/* 119 */     }
/* 120 */
/* 121 */
/* 122 */     boolean isNull6 = true;
/* 123 */     UTF8String value6 = null;
/* 124 */
/* 125 */     boolean isNull7 = i.isNullAt(1);
/* 126 */     UTF8String value7 = isNull7 ? null : (i.getUTF8String(1));
/* 127 */     if (!isNull7) {
/* 128 */
/* 129 */
/* 130 */
/* 131 */       isNull6 = false; // resultCode could change nullability.
/* 132 */
/* 133 */       if (!((UTF8String) references[4]).equals(lastRegex)) {
/* 134 */         // regex value changed
/* 135 */         lastRegex = ((UTF8String) references[4]).clone();
/* 136 */         pattern = 
java.util.regex.Pattern.compile(lastRegex.toString());
/* 137 */       }
/* 138 */       if (!((UTF8String) 
references[5]).equals(lastReplacementInUTF8)) {
/* 139 */         // replacement string changed
/* 140 */         lastReplacementInUTF8 = ((UTF8String) references[5]).clone();
/* 141 */         lastReplacement = lastReplacementInUTF8.toString();
/* 142 */       }
/* 143 */       result.delete(0, result.length());
/* 144 */       java.util.regex.Matcher matcher = 
pattern.matcher(value7.toString());
/* 145 */
/* 146 */       while (matcher.find()) {
/* 147 */         matcher.appendReplacement(result, lastReplacement);
/* 148 */       }
/* 149 */       matcher.appendTail(result);
/* 150 */       value6 = UTF8String.fromString(result.toString());
/* 151 */       isNull6 = false;
/* 152 */
/* 153 */
/* 154 */     }
/* 155 */     if (!isNull6) {
/* 156 */       args[2] = value6;
/* 157 */     }
/* 158 */
/* 159 */     UTF8String value3 = UTF8String.concat(args);
/* 160 */     boolean isNull3 = value3 == null;
/* 161 */   }
/* 162 */
/* 163 */
/* 164 */   private void apply1_0(InternalRow i) {
/* 165 */
/* 166 */
/* 167 */     apply_0(i);
/* 168 */     apply_1(i);
/* 169 */     apply_2(i);
/* 170 */     int varargNum = 4;
/* 171 */     int idxInVararg = 0;
/* 172 */
/* 173 */     UTF8String[] array = new UTF8String[varargNum];
/* 174 */     array[idxInVararg ++] = false ? (UTF8String) null : ((UTF8String) 
references[1]);array[idxInVararg ++] = isNull3 ? (UTF8String) null : 
value3;array[idxInVararg ++] = isNull10 ? (UTF8String) null : 
value10;array[idxInVararg ++] = false ? (UTF8String) null : ((UTF8String) 
references[10]);
/* 175 */     UTF8String value = UTF8String.concatWs(((UTF8String) 
references[0]), array);
/* 176 */     boolean isNull = value == null;
/* 177 */     if (isNull) {
/* 178 */       rowWriter.setNullAt(0);
/* 179 */     } else {
/* 180 */       rowWriter.write(0, value);
/* 181 */     }
/* 182 */
/* 183 */
/* 184 */     Object obj = ((Expression) references[11]).eval(i);
/* 185 */     boolean isNull19 = obj == null;
/* 186 */     UTF8String value19 = null;
/* 187 */     if (!isNull19) {
/* 188 */       value19 = (UTF8String) obj;
/* 189 */     }
/* 190 */     if (isNull19) {
/* 191 */       rowWriter.setNullAt(1);
/* 192 */     } else {
/* 193 */       rowWriter.write(1, value19);
/* 194 */     }
/* 195 */
/* 196 */   }
/* 197 */
/* 198 */
/* 199 */   private void apply_2(InternalRow i) {
/* 200 */
/* 201 */     final ArrayData value17 = null;
/* 202 */   }
/* 203 */
/* 204 */
/* 205 */   // Scala.Function1 need this
/* 206 */   public java.lang.Object apply(java.lang.Object row) {
/* 207 */     return apply((InternalRow) row);
/* 208 */   }
/* 209 */
/* 210 */   public UnsafeRow apply(InternalRow i) {
/* 211 */     holder.reset();
/* 212 */
/* 213 */     rowWriter.zeroOutNullBytes();
/* 214 */     apply1_0(i);
/* 215 */     apply1_1(i);
/* 216 */     result2.setTotalSize(holder.totalSize());
/* 217 */     return result2;
/* 218 */   }
/* 219 */ }

org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
174, Column 113: Expression "isNull3" is not an rvalue
        at 
org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:11004)
        at 
org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:6639)
        at 
org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5001)
        at org.codehaus.janino.UnitCompiler.access$10500(UnitCompiler.java:206)
        at 
org.codehaus.janino.UnitCompiler$13.visitAmbiguousName(UnitCompiler.java:4984)
        at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:3633)
        at org.codehaus.janino.Java$Lvalue.accept(Java.java:3563)
        at 
org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
        at 
org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5027)
        at org.codehaus.janino.UnitCompiler.access$9700(UnitCompiler.java:206)
        at 
org.codehaus.janino.UnitCompiler$13.visitConditionalExpression(UnitCompiler.java:4964)
        at org.codehaus.janino.Java$ConditionalExpression.accept(Java.java:3877)
        at 
org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:4956)
        at 
org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:4925)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3189)
        at org.codehaus.janino.UnitCompiler.access$5100(UnitCompiler.java:206)
        at 
org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3143)
        at 
org.codehaus.janino.UnitCompiler$9.visitAssignment(UnitCompiler.java:3139)
        at org.codehaus.janino.Java$Assignment.accept(Java.java:3847)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3139)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2112)
        at org.codehaus.janino.UnitCompiler.access$1700(UnitCompiler.java:206)
        at 
org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1377)
        at 
org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1370)
        at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2558)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1370)
        at 
org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1450)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:2811)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1262)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1234)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:538)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:890)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:894)
        at org.codehaus.janino.UnitCompiler.access$600(UnitCompiler.java:206)
        at 
org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:377)
        at 
org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:369)
        at 
org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1128)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
        at 
org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1209)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:564)
        at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:420)
        at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:206)
        at 
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:374)
        at 
org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:369)
        at 
org.codehaus.janino.Java$AbstractPackageMemberClassDeclaration.accept(Java.java:1309)
        at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:369)
        at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:345)
        at 
org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:396)
        at 
org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:311)
        at 
org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:229)
        at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:196)
        at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:91)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1000)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1067)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1064)
        at 
org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
        at 
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
        at 
org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
        at 
org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257)
        at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000)
        at 
org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004)
        at 
org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:946)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:412)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:366)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection$.create(GenerateUnsafeProjection.scala:32)
        at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator.generate(CodeGenerator.scala:930)
        at 
org.apache.spark.sql.catalyst.expressions.UnsafeProjection$.create(Projection.scala:130)
        at 
org.apache.spark.sql.catalyst.expressions.UnsafeProjection$.create(Projection.scala:140)
        at 
org.apache.spark.sql.execution.aggregate.AggregationIterator.generateResultProjection(AggregationIterator.scala:219)
        at 
org.apache.spark.sql.execution.aggregate.TungstenAggregationIterator.generateResultProjection(TungstenAggregationIterator.scala:144)
        at 
org.apache.spark.sql.execution.aggregate.AggregationIterator.<init>(AggregationIterator.scala:266)
        at 
org.apache.spark.sql.execution.aggregate.TungstenAggregationIterator.<init>(TungstenAggregationIterator.scala:92)
        at 
org.apache.spark.sql.execution.aggregate.HashAggregateExec$$anonfun$doExecute$1$$anonfun$4.apply(HashAggregateExec.scala:106)
        at 
org.apache.spark.sql.execution.aggregate.HashAggregateExec$$anonfun$doExecute$1$$anonfun$4.apply(HashAggregateExec.scala:97)
        at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:797)
        at 
org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1$$anonfun$apply$23.apply(RDD.scala:797)
        at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
        at 
org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
        at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:323)
        at org.apache.spark.rdd.RDD.iterator(RDD.scala:287)
        at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
        at org.apache.spark.scheduler.Task.run(Task.scala:108)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
18/01/10 15:12:37 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 1)
{noformat}


> Broken Spark SQL Codegen
> ------------------------
>
>                 Key: SPARK-18147
>                 URL: https://issues.apache.org/jira/browse/SPARK-18147
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 2.0.1
>            Reporter: koert kuipers
>            Assignee: Liang-Chi Hsieh
>            Priority: Critical
>             Fix For: 2.1.0
>
>
> this is me on purpose trying to break spark sql codegen to uncover potential 
> issues, by creating arbitrately complex data structures using primitives, 
> strings, basic collections (map, seq, option), tuples, and case classes.
> first example: nested case classes
> code:
> {noformat}
> class ComplexResultAgg[B: TypeTag, C: TypeTag](val zero: B, result: C) 
> extends Aggregator[Row, B, C] {
>   override def reduce(b: B, input: Row): B = b
>   override def merge(b1: B, b2: B): B = b1
>   override def finish(reduction: B): C = result
>   override def bufferEncoder: Encoder[B] = ExpressionEncoder[B]()
>   override def outputEncoder: Encoder[C] = ExpressionEncoder[C]()
> }
> case class Struct2(d: Double = 0.0, s1: Seq[Double] = Seq.empty, s2: 
> Seq[Long] = Seq.empty)
> case class Struct3(a: Struct2 = Struct2(), b: Struct2 = Struct2())
> val df1 = Seq(("a", "aa"), ("a", "aa"), ("b", "b"), ("b", null)).toDF("x", 
> "y").groupBy("x").agg(
>   new ComplexResultAgg("boo", Struct3()).toColumn
> )
> df1.printSchema
> df1.show
> {noformat}
> the result is:
> {noformat}
> [info]   Cause: java.util.concurrent.ExecutionException: java.lang.Exception: 
> failed to compile: org.codehaus.commons.compiler.CompileException: File 
> 'generated.java', Line 33, Column 12: Expression "isNull1" is not an rvalue
> [info] /* 001 */ public java.lang.Object generate(Object[] references) {
> [info] /* 002 */   return new SpecificMutableProjection(references);
> [info] /* 003 */ }
> [info] /* 004 */
> [info] /* 005 */ class SpecificMutableProjection extends 
> org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection {
> [info] /* 006 */
> [info] /* 007 */   private Object[] references;
> [info] /* 008 */   private MutableRow mutableRow;
> [info] /* 009 */   private Object[] values;
> [info] /* 010 */   private java.lang.String errMsg;
> [info] /* 011 */   private Object[] values1;
> [info] /* 012 */   private java.lang.String errMsg1;
> [info] /* 013 */   private boolean[] argIsNulls;
> [info] /* 014 */   private scala.collection.Seq argValue;
> [info] /* 015 */   private java.lang.String errMsg2;
> [info] /* 016 */   private boolean[] argIsNulls1;
> [info] /* 017 */   private scala.collection.Seq argValue1;
> [info] /* 018 */   private java.lang.String errMsg3;
> [info] /* 019 */   private java.lang.String errMsg4;
> [info] /* 020 */   private Object[] values2;
> [info] /* 021 */   private java.lang.String errMsg5;
> [info] /* 022 */   private boolean[] argIsNulls2;
> [info] /* 023 */   private scala.collection.Seq argValue2;
> [info] /* 024 */   private java.lang.String errMsg6;
> [info] /* 025 */   private boolean[] argIsNulls3;
> [info] /* 026 */   private scala.collection.Seq argValue3;
> [info] /* 027 */   private java.lang.String errMsg7;
> [info] /* 028 */   private boolean isNull_0;
> [info] /* 029 */   private InternalRow value_0;
> [info] /* 030 */
> [info] /* 031 */   private void apply_1(InternalRow i) {
> [info] /* 032 */
> [info] /* 033 */     if (isNull1) {
> [info] /* 034 */       throw new RuntimeException(errMsg3);
> [info] /* 035 */     }
> [info] /* 036 */
> [info] /* 037 */     boolean isNull24 = false;
> [info] /* 038 */     final com.tresata.spark.sql.Struct2 value24 = isNull24 ? 
> null : (com.tresata.spark.sql.Struct2) value1.a();
> [info] /* 039 */     isNull24 = value24 == null;
> [info] /* 040 */
> [info] /* 041 */     boolean isNull23 = isNull24;
> [info] /* 042 */     final scala.collection.Seq value23 = isNull23 ? null : 
> (scala.collection.Seq) value24.s2();
> [info] /* 043 */     isNull23 = value23 == null;
> [info] /* 044 */     argIsNulls1[0] = isNull23;
> [info] /* 045 */     argValue1 = value23;
> [info] /* 046 */
> [info] /* 047 */
> [info] /* 048 */
> [info] /* 049 */     boolean isNull22 = false;
> [info] /* 050 */     for (int idx = 0; idx < 1; idx++) {
> [info] /* 051 */       if (argIsNulls1[idx]) { isNull22 = true; break; }
> [info] /* 052 */     }
> [info] /* 053 */
> [info] /* 054 */     final ArrayData value22 = isNull22 ? null : new 
> org.apache.spark.sql.catalyst.util.GenericArrayData(argValue1);
> [info] /* 055 */     if (isNull22) {
> [info] /* 056 */       values1[2] = null;
> [info] /* 057 */     } else {
> [info] /* 058 */       values1[2] = value22;
> [info] /* 059 */     }
> [info] /* 060 */   }
> [info] /* 061 */
> [info] /* 062 */
> [info] /* 063 */   private void apply1_1(InternalRow i) {
> [info] /* 064 */
> [info] /* 065 */     if (isNull1) {
> [info] /* 066 */       throw new RuntimeException(errMsg7);
> [info] /* 067 */     }
> [info] /* 068 */
> [info] /* 069 */     boolean isNull41 = false;
> [info] /* 070 */     final com.tresata.spark.sql.Struct2 value41 = isNull41 ? 
> null : (com.tresata.spark.sql.Struct2) value1.b();
> [info] /* 071 */     isNull41 = value41 == null;
> [info] /* 072 */
> [info] /* 073 */     boolean isNull40 = isNull41;
> [info] /* 074 */     final scala.collection.Seq value40 = isNull40 ? null : 
> (scala.collection.Seq) value41.s2();
> [info] /* 075 */     isNull40 = value40 == null;
> [info] /* 076 */     argIsNulls3[0] = isNull40;
> [info] /* 077 */     argValue3 = value40;
> [info] /* 078 */
> [info] /* 079 */
> [info] /* 080 */
> [info] /* 081 */     boolean isNull39 = false;
> [info] /* 082 */     for (int idx = 0; idx < 1; idx++) {
> [info] /* 083 */       if (argIsNulls3[idx]) { isNull39 = true; break; }
> [info] /* 084 */     }
> [info] /* 085 */
> [info] /* 086 */     final ArrayData value39 = isNull39 ? null : new 
> org.apache.spark.sql.catalyst.util.GenericArrayData(argValue3);
> [info] /* 087 */     if (isNull39) {
> [info] /* 088 */       values2[2] = null;
> [info] /* 089 */     } else {
> [info] /* 090 */       values2[2] = value39;
> [info] /* 091 */     }
> [info] /* 092 */   }
> [info] /* 093 */
> [info] /* 094 */
> [info] /* 095 */   private void apply_0(InternalRow i) {
> [info] /* 096 */
> [info] /* 097 */     if (isNull1) {
> [info] /* 098 */       throw new RuntimeException(errMsg1);
> [info] /* 099 */     }
> [info] /* 100 */
> [info] /* 101 */     boolean isNull16 = false;
> [info] /* 102 */     final com.tresata.spark.sql.Struct2 value16 = isNull16 ? 
> null : (com.tresata.spark.sql.Struct2) value1.a();
> [info] /* 103 */     isNull16 = value16 == null;
> [info] /* 104 */
> [info] /* 105 */     boolean isNull15 = isNull16;
> [info] /* 106 */     final double value15 = isNull15 ? -1.0 : value16.d();
> [info] /* 107 */     if (isNull15) {
> [info] /* 108 */       values1[0] = null;
> [info] /* 109 */     } else {
> [info] /* 110 */       values1[0] = value15;
> [info] /* 111 */     }
> [info] /* 112 */     if (isNull1) {
> [info] /* 113 */       throw new RuntimeException(errMsg2);
> [info] /* 114 */     }
> [info] /* 115 */
> [info] /* 116 */     boolean isNull20 = false;
> [info] /* 117 */     final com.tresata.spark.sql.Struct2 value20 = isNull20 ? 
> null : (com.tresata.spark.sql.Struct2) value1.a();
> [info] /* 118 */     isNull20 = value20 == null;
> [info] /* 119 */
> [info] /* 120 */     boolean isNull19 = isNull20;
> [info] /* 121 */     final scala.collection.Seq value19 = isNull19 ? null : 
> (scala.collection.Seq) value20.s1();
> [info] /* 122 */     isNull19 = value19 == null;
> [info] /* 123 */     argIsNulls[0] = isNull19;
> [info] /* 124 */     argValue = value19;
> [info] /* 125 */
> [info] /* 126 */
> [info] /* 127 */
> [info] /* 128 */     boolean isNull18 = false;
> [info] /* 129 */     for (int idx = 0; idx < 1; idx++) {
> [info] /* 130 */       if (argIsNulls[idx]) { isNull18 = true; break; }
> [info] /* 131 */     }
> [info] /* 132 */
> [info] /* 133 */     final ArrayData value18 = isNull18 ? null : new 
> org.apache.spark.sql.catalyst.util.GenericArrayData(argValue);
> [info] /* 134 */     if (isNull18) {
> [info] /* 135 */       values1[1] = null;
> [info] /* 136 */     } else {
> [info] /* 137 */       values1[1] = value18;
> [info] /* 138 */     }
> [info] /* 139 */   }
> [info] /* 140 */
> [info] /* 141 */
> [info] /* 142 */   private void apply1_0(InternalRow i) {
> [info] /* 143 */
> [info] /* 144 */     if (isNull1) {
> [info] /* 145 */       throw new RuntimeException(errMsg5);
> [info] /* 146 */     }
> [info] /* 147 */
> [info] /* 148 */     boolean isNull33 = false;
> [info] /* 149 */     final com.tresata.spark.sql.Struct2 value33 = isNull33 ? 
> null : (com.tresata.spark.sql.Struct2) value1.b();
> [info] /* 150 */     isNull33 = value33 == null;
> [info] /* 151 */
> [info] /* 152 */     boolean isNull32 = isNull33;
> [info] /* 153 */     final double value32 = isNull32 ? -1.0 : value33.d();
> [info] /* 154 */     if (isNull32) {
> [info] /* 155 */       values2[0] = null;
> [info] /* 156 */     } else {
> [info] /* 157 */       values2[0] = value32;
> [info] /* 158 */     }
> [info] /* 159 */     if (isNull1) {
> [info] /* 160 */       throw new RuntimeException(errMsg6);
> [info] /* 161 */     }
> [info] /* 162 */
> [info] /* 163 */     boolean isNull37 = false;
> [info] /* 164 */     final com.tresata.spark.sql.Struct2 value37 = isNull37 ? 
> null : (com.tresata.spark.sql.Struct2) value1.b();
> [info] /* 165 */     isNull37 = value37 == null;
> [info] /* 166 */
> [info] /* 167 */     boolean isNull36 = isNull37;
> [info] /* 168 */     final scala.collection.Seq value36 = isNull36 ? null : 
> (scala.collection.Seq) value37.s1();
> [info] /* 169 */     isNull36 = value36 == null;
> [info] /* 170 */     argIsNulls2[0] = isNull36;
> [info] /* 171 */     argValue2 = value36;
> [info] /* 172 */
> [info] /* 173 */
> [info] /* 174 */
> [info] /* 175 */     boolean isNull35 = false;
> [info] /* 176 */     for (int idx = 0; idx < 1; idx++) {
> [info] /* 177 */       if (argIsNulls2[idx]) { isNull35 = true; break; }
> [info] /* 178 */     }
> [info] /* 179 */
> [info] /* 180 */     final ArrayData value35 = isNull35 ? null : new 
> org.apache.spark.sql.catalyst.util.GenericArrayData(argValue2);
> [info] /* 181 */     if (isNull35) {
> [info] /* 182 */       values2[1] = null;
> [info] /* 183 */     } else {
> [info] /* 184 */       values2[1] = value35;
> [info] /* 185 */     }
> [info] /* 186 */   }
> [info] /* 187 */
> [info] /* 188 */
> [info] /* 189 */   public SpecificMutableProjection(Object[] references) {
> [info] /* 190 */     this.references = references;
> [info] /* 191 */     mutableRow = new 
> org.apache.spark.sql.catalyst.expressions.GenericMutableRow(1);
> [info] /* 192 */     this.values = null;
> [info] /* 193 */     this.errMsg = (java.lang.String) references[1];
> [info] /* 194 */     this.values1 = null;
> [info] /* 195 */     this.errMsg1 = (java.lang.String) references[2];
> [info] /* 196 */     argIsNulls = new boolean[1];
> [info] /* 197 */
> [info] /* 198 */     this.errMsg2 = (java.lang.String) references[3];
> [info] /* 199 */     argIsNulls1 = new boolean[1];
> [info] /* 200 */
> [info] /* 201 */     this.errMsg3 = (java.lang.String) references[4];
> [info] /* 202 */     this.errMsg4 = (java.lang.String) references[5];
> [info] /* 203 */     this.values2 = null;
> [info] /* 204 */     this.errMsg5 = (java.lang.String) references[6];
> [info] /* 205 */     argIsNulls2 = new boolean[1];
> [info] /* 206 */
> [info] /* 207 */     this.errMsg6 = (java.lang.String) references[7];
> [info] /* 208 */     argIsNulls3 = new boolean[1];
> [info] /* 209 */
> [info] /* 210 */     this.errMsg7 = (java.lang.String) references[8];
> [info] /* 211 */     this.isNull_0 = true;
> [info] /* 212 */     this.value_0 = null;
> [info] /* 213 */   }
> [info] /* 214 */
> [info] /* 215 */   public 
> org.apache.spark.sql.catalyst.expressions.codegen.BaseMutableProjection 
> target(MutableRow row) {
> [info] /* 216 */     mutableRow = row;
> [info] /* 217 */     return this;
> [info] /* 218 */   }
> [info] /* 219 */
> [info] /* 220 */   /* Provide immutable access to the last projected row. */
> [info] /* 221 */   public InternalRow currentValue() {
> [info] /* 222 */     return (InternalRow) mutableRow;
> [info] /* 223 */   }
> [info] /* 224 */
> [info] /* 225 */   public java.lang.Object apply(java.lang.Object _i) {
> [info] /* 226 */     InternalRow i = (InternalRow) _i;
> [info] /* 227 */
> [info] /* 228 */
> [info] /* 229 */
> [info] /* 230 */     Object obj = ((Expression) references[0]).eval(null);
> [info] /* 231 */     org.apache.spark.sql.expressions.Aggregator value2 = 
> (org.apache.spark.sql.expressions.Aggregator) obj;
> [info] /* 232 */
> [info] /* 233 */     boolean isNull4 = i.isNullAt(0);
> [info] /* 234 */     UTF8String value4 = isNull4 ? null : 
> (i.getUTF8String(0));
> [info] /* 235 */
> [info] /* 236 */     boolean isNull3 = isNull4;
> [info] /* 237 */     final java.lang.String value3 = isNull3 ? null : 
> (java.lang.String) value4.toString();
> [info] /* 238 */     isNull3 = value3 == null;
> [info] /* 239 */     boolean isNull1 = false || isNull3;
> [info] /* 240 */     final com.tresata.spark.sql.Struct3 value1 = isNull1 ? 
> null : (com.tresata.spark.sql.Struct3) value2.finish(value3);
> [info] /* 241 */     isNull1 = value1 == null;
> [info] /* 242 */
> [info] /* 243 */     boolean isNull5 = false;
> [info] /* 244 */     InternalRow value5 = null;
> [info] /* 245 */     if (!false && isNull1) {
> [info] /* 246 */
> [info] /* 247 */       final InternalRow value7 = null;
> [info] /* 248 */       isNull5 = true;
> [info] /* 249 */       value5 = value7;
> [info] /* 250 */     } else {
> [info] /* 251 */
> [info] /* 252 */       boolean isNull8 = false;
> [info] /* 253 */       this.values = new Object[2];
> [info] /* 254 */       if (isNull1) {
> [info] /* 255 */         throw new RuntimeException(errMsg);
> [info] /* 256 */       }
> [info] /* 257 */
> [info] /* 258 */       boolean isNull11 = false;
> [info] /* 259 */       final com.tresata.spark.sql.Struct2 value11 = isNull11 
> ? null : (com.tresata.spark.sql.Struct2) value1.a();
> [info] /* 260 */       isNull11 = value11 == null;
> [info] /* 261 */       boolean isNull9 = false;
> [info] /* 262 */       InternalRow value9 = null;
> [info] /* 263 */       if (!false && isNull11) {
> [info] /* 264 */
> [info] /* 265 */         final InternalRow value13 = null;
> [info] /* 266 */         isNull9 = true;
> [info] /* 267 */         value9 = value13;
> [info] /* 268 */       } else {
> [info] /* 269 */
> [info] /* 270 */         boolean isNull14 = false;
> [info] /* 271 */         values1 = new Object[3];apply_0(i);
> [info] /* 272 */         apply_1(i);
> [info] /* 273 */         final InternalRow value14 = new 
> org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values1);
> [info] /* 274 */         this.values1 = null;
> [info] /* 275 */         isNull9 = isNull14;
> [info] /* 276 */         value9 = value14;
> [info] /* 277 */       }
> [info] /* 278 */       if (isNull9) {
> [info] /* 279 */         values[0] = null;
> [info] /* 280 */       } else {
> [info] /* 281 */         values[0] = value9;
> [info] /* 282 */       }
> [info] /* 283 */       if (isNull1) {
> [info] /* 284 */         throw new RuntimeException(errMsg4);
> [info] /* 285 */       }
> [info] /* 286 */
> [info] /* 287 */       boolean isNull28 = false;
> [info] /* 288 */       final com.tresata.spark.sql.Struct2 value28 = isNull28 
> ? null : (com.tresata.spark.sql.Struct2) value1.b();
> [info] /* 289 */       isNull28 = value28 == null;
> [info] /* 290 */       boolean isNull26 = false;
> [info] /* 291 */       InternalRow value26 = null;
> [info] /* 292 */       if (!false && isNull28) {
> [info] /* 293 */
> [info] /* 294 */         final InternalRow value30 = null;
> [info] /* 295 */         isNull26 = true;
> [info] /* 296 */         value26 = value30;
> [info] /* 297 */       } else {
> [info] /* 298 */
> [info] /* 299 */         boolean isNull31 = false;
> [info] /* 300 */         values2 = new Object[3];apply1_0(i);
> [info] /* 301 */         apply1_1(i);
> [info] /* 302 */         final InternalRow value31 = new 
> org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values2);
> [info] /* 303 */         this.values2 = null;
> [info] /* 304 */         isNull26 = isNull31;
> [info] /* 305 */         value26 = value31;
> [info] /* 306 */       }
> [info] /* 307 */       if (isNull26) {
> [info] /* 308 */         values[1] = null;
> [info] /* 309 */       } else {
> [info] /* 310 */         values[1] = value26;
> [info] /* 311 */       }
> [info] /* 312 */       final InternalRow value8 = new 
> org.apache.spark.sql.catalyst.expressions.GenericInternalRow(values);
> [info] /* 313 */       this.values = null;
> [info] /* 314 */       isNull5 = isNull8;
> [info] /* 315 */       value5 = value8;
> [info] /* 316 */     }
> [info] /* 317 */     this.isNull_0 = isNull5;
> [info] /* 318 */     this.value_0 = value5;
> [info] /* 319 */
> [info] /* 320 */     // copy all the results into MutableRow
> [info] /* 321 */
> [info] /* 322 */     if (!this.isNull_0) {
> [info] /* 323 */       mutableRow.update(0, this.value_0);
> [info] /* 324 */     } else {
> [info] /* 325 */       mutableRow.setNullAt(0);
> [info] /* 326 */     }
> [info] /* 327 */
> [info] /* 328 */     return mutableRow;
> [info] /* 329 */   }
> [info] /* 330 */ }
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to