[ 
https://issues.apache.org/jira/browse/SPARK-36862?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17599013#comment-17599013
 ] 

Lukas Waldmann edited comment on SPARK-36862 at 9/1/22 2:55 PM:
----------------------------------------------------------------

I managed to reproduce the issue in my environment. Problem is on line 192 - 
variable name in function header having array index

Here is the generated code
{code:java}
/* 001 */ public Object generate(Object[] references) {
/* 002 */ return new GeneratedIteratorForCodegenStage636(references);
/* 003 */ }
/* 004 */
/* 005 */ // codegenStageId=636
/* 006 */ final class GeneratedIteratorForCodegenStage636 extends 
org.apache.spark.sql.execution.BufferedRowIterator {
/* 007 */ private Object[] references;
/* 008 */ private scala.collection.Iterator[] inputs;
/* 009 */ private scala.collection.Iterator smj_leftInput_0;
/* 010 */ private scala.collection.Iterator smj_rightInput_0;
/* 011 */ private InternalRow smj_leftRow_0;
/* 012 */ private InternalRow smj_rightRow_0;
/* 013 */ private boolean smj_globalIsNull_0;
/* 014 */ private boolean smj_globalIsNull_1;
/* 015 */ private double smj_value_27;
/* 016 */ private 
org.apache.spark.sql.execution.ExternalAppendOnlyUnsafeRowArray smj_matches_0;
/* 017 */ private double smj_value_28;
/* 018 */ private boolean smj_isNull_25;
/* 019 */ private boolean smj_isNull_26;
/* 020 */ private boolean smj_isNull_27;
/* 021 */ private boolean smj_isNull_28;
/* 022 */ private boolean smj_isNull_29;
/* 023 */ private boolean smj_isNull_30;
/* 024 */ private boolean project_subExprIsNull_0;
/* 025 */ private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] 
smj_mutableStateArray_2 = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2];
/* 026 */ private java.util.regex.Pattern[] project_mutableStateArray_0 = new 
java.util.regex.Pattern[1];
/* 027 */ private Decimal[] smj_mutableStateArray_1 = new Decimal[1];
/* 028 */ private String[] project_mutableStateArray_1 = new String[1];
/* 029 */ private UTF8String[] smj_mutableStateArray_0 = new UTF8String[7];
/* 030 */
/* 031 */ public GeneratedIteratorForCodegenStage636(Object[] references) {
/* 032 */ this.references = references;
/* 033 */ }
/* 034 */
/* 035 */ public void init(int index, scala.collection.Iterator[] inputs) {
/* 036 */ partitionIndex = index;
/* 037 */ this.inputs = inputs;
/* 038 */ smj_leftInput_0 = inputs[0];
/* 039 */ smj_rightInput_0 = inputs[1];
/* 040 */
/* 041 */ smj_matches_0 = new 
org.apache.spark.sql.execution.ExternalAppendOnlyUnsafeRowArray(2147483632, 
2147483647);
/* 042 */ smj_mutableStateArray_2[0] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(6, 192);
/* 043 */ smj_mutableStateArray_2[1] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(6, 192);
/* 044 */
/* 045 */ }
/* 046 */
/* 047 */ private boolean smj_findNextOuterJoinRows_0(
/* 048 */ scala.collection.Iterator leftIter,
/* 049 */ scala.collection.Iterator rightIter) {
/* 050 */ smj_leftRow_0 = null;
/* 051 */ int comp = 0;
/* 052 */ while (smj_leftRow_0 == null) {
/* 053 */ if (!leftIter.hasNext()) return false;
/* 054 */ smj_leftRow_0 = (InternalRow) leftIter.next();
/* 055 */ UTF8String smj_value_22 = smj_If_0(smj_leftRow_0);
/* 056 */ boolean smj_isNull_2 = smj_globalIsNull_1;
/* 057 */ double smj_value_2 = -1.0;
/* 058 */ if (!smj_globalIsNull_1) {
/* 059 */ final String smj_doubleStr_0 = smj_value_22.toString();
/* 060 */ try {
/* 061 */ smj_value_2 = Double.valueOf(smj_doubleStr_0);
/* 062 */ } catch (java.lang.NumberFormatException e) {
/* 063 */ final Double d = (Double) 
Cast.processFloatingPointSpecialLiterals(smj_doubleStr_0, false);
/* 064 */ if (d == null) {
/* 065 */ smj_isNull_2 = true;
/* 066 */ } else {
/* 067 */ smj_value_2 = d.doubleValue();
/* 068 */ }
/* 069 */ }
/* 070 */ }
/* 071 */ boolean smj_isNull_1 = smj_isNull_2;
/* 072 */ double smj_value_1 = -1.0;
/* 073 */
/* 074 */ if (!smj_isNull_2) {
/* 075 */ if (Double.isNaN(smj_value_2)) {
/* 076 */ smj_value_1 = Double.NaN;
/* 077 */ } else if (smj_value_2 == -0.0d) {
/* 078 */ smj_value_1 = 0.0d;
/* 079 */ } else {
/* 080 */ smj_value_1 = smj_value_2;
/* 081 */ }
/* 082 */
/* 083 */ }
/* 084 */ if (smj_isNull_1) {
/* 085 */ if (!smj_matches_0.isEmpty()) {
/* 086 */ smj_matches_0.clear();
/* 087 */ }
/* 088 */ return true;
/* 089 */ }
/* 090 */ if (!smj_matches_0.isEmpty()) {
/* 091 */ comp = 0;
/* 092 */ if (comp == 0) {
/* 093 */ comp = 
org.apache.spark.sql.catalyst.util.SQLOrderingUtil.compareDoubles(smj_value_1, 
smj_value_28);
/* 094 */ }
/* 095 */
/* 096 */ if (comp == 0) {
/* 097 */ return true;
/* 098 */ }
/* 099 */ smj_matches_0.clear();
/* 100 */ }
/* 101 */
/* 102 */ do {
/* 103 */ if (smj_rightRow_0 == null) {
/* 104 */ if (!rightIter.hasNext()) {
/* 105 */ if (!smj_matches_0.isEmpty()) {
/* 106 */ smj_value_28 = smj_value_1;
/* 107 */ }
/* 108 */ return true;
/* 109 */ }
/* 110 */ smj_rightRow_0 = (InternalRow) rightIter.next();
/* 111 */ Decimal smj_value_26 = smj_rightRow_0.getDecimal(1, 38, 0);
/* 112 */ boolean smj_isNull_23 = false;
/* 113 */ double smj_value_25 = -1.0;
/* 114 */ if (!false) {
/* 115 */ smj_value_25 = smj_value_26.toDouble();
/* 116 */ }
/* 117 */ double smj_value_24 = -1.0;
/* 118 */
/* 119 */ if (Double.isNaN(smj_value_25)) {
/* 120 */ smj_value_24 = Double.NaN;
/* 121 */ } else if (smj_value_25 == -0.0d) {
/* 122 */ smj_value_24 = 0.0d;
/* 123 */ } else {
/* 124 */ smj_value_24 = smj_value_25;
/* 125 */ }
/* 126 */ if (false) {
/* 127 */ smj_rightRow_0 = null;
/* 128 */ continue;
/* 129 */ }
/* 130 */ smj_value_27 = smj_value_24;
/* 131 */ }
/* 132 */
/* 133 */ comp = 0;
/* 134 */ if (comp == 0) {
/* 135 */ comp = 
org.apache.spark.sql.catalyst.util.SQLOrderingUtil.compareDoubles(smj_value_1, 
smj_value_27);
/* 136 */ }
/* 137 */
/* 138 */ if (comp > 0) {
/* 139 */ smj_rightRow_0 = null;
/* 140 */ } else if (comp < 0) {
/* 141 */ if (!smj_matches_0.isEmpty()) {
/* 142 */ smj_value_28 = smj_value_1;
/* 143 */ }
/* 144 */ return true;
/* 145 */ } else {
/* 146 */ smj_matches_0.add((UnsafeRow) smj_rightRow_0);
/* 147 */ smj_rightRow_0 = null;
/* 148 */ }
/* 149 */ } while (true);
/* 150 */ }
/* 151 */ return false; // unreachable
/* 152 */ }
/* 153 */
/* 154 */ private UTF8String smj_If_0(InternalRow smj_leftRow_0) {
/* 155 */ boolean smj_isNull_5 = false;
/* 156 */ boolean smj_isNull_6 = true;
/* 157 */ ArrayData smj_value_6 = null;
/* 158 */ boolean smj_isNull_7 = smj_leftRow_0.isNullAt(2);
/* 159 */ UTF8String smj_value_7 = smj_isNull_7 ?
/* 160 */ null : (smj_leftRow_0.getUTF8String(2));
/* 161 */ if (!smj_isNull_7) {
/* 162 */ smj_isNull_6 = false; // resultCode could change nullability.
/* 163 */ smj_value_6 = new 
org.apache.spark.sql.catalyst.util.GenericArrayData(smj_value_7.split(((UTF8String)
 references[0] /* literal */),-1));
/* 164 */
/* 165 */ }
/* 166 */ int smj_value_5 = smj_isNull_6 ? -1 :
/* 167 */ (smj_value_6).numElements();
/* 168 */
/* 169 */ boolean smj_value_4 = false;
/* 170 */ smj_value_4 = smj_value_5 > 2;
/* 171 */ boolean smj_isNull_3 = false;
/* 172 */ UTF8String smj_value_3 = null;
/* 173 */ if (!false && smj_value_4) {
/* 174 */ boolean smj_isNull_11 = true;
/* 175 */ UTF8String smj_value_11 = null;
/* 176 */ UTF8String smj_value_18 = smj_GetArrayItem_0(smj_leftRow_0);
/* 177 */ if (!smj_globalIsNull_0) {
/* 178 */ smj_isNull_11 = false; // resultCode could change nullability.
/* 179 */ smj_value_11 = smj_value_18.substringSQL(1, 10);
/* 180 */
/* 181 */ }
/* 182 */ smj_isNull_3 = smj_isNull_11;
/* 183 */ smj_value_3 = smj_value_11;
/* 184 */ } else {
/* 185 */ smj_isNull_3 = true;
/* 186 */ smj_value_3 = ((UTF8String)null);
/* 187 */ }
/* 188 */ smj_globalIsNull_1 = smj_isNull_3;
/* 189 */ return smj_value_3;
/* 190 */ }
/* 191 */
/* 192 */ private UTF8String 
project_subExpr_0(org.apache.spark.unsafe.types.UTF8String 
smj_mutableStateArray_0[3], boolean smj_isNull_28) {
/* 193 */ boolean project_isNull_0 = true;
/* 194 */ UTF8String project_value_0 = null;
/* 195 */
/* 196 */ if (!smj_isNull_28) {
/* 197 */ project_isNull_0 = false; // resultCode could change nullability.
/* 198 */
/* 199 */ if (!((UTF8String) references[3] /* literal 
*/).equals(smj_mutableStateArray_0[5])) {
/* 200 */ // regex value changed
/* 201 */ smj_mutableStateArray_0[5] = ((UTF8String) references[3] /* literal 
*/).clone();
/* 202 */ project_mutableStateArray_0[0] = 
java.util.regex.Pattern.compile(smj_mutableStateArr",
 "ay_0[5].toString());
/* 203 */ }
/* 204 */ if (!((UTF8String) references[4] /* literal 
*/).equals(smj_mutableStateArray_0[6])) {
/* 205 */ // replacement string changed
/* 206 */ smj_mutableStateArray_0[6] = ((UTF8String) references[4] /* literal 
*/).clone();
/* 207 */ project_mutableStateArray_1[0] = 
smj_mutableStateArray_0[6].toString();
/* 208 */ }
/* 209 */ String project_source_0 = smj_mutableStateArray_0[3].toString();
/* 210 */ int project_position_0 = 1 - 1;
/* 211 */ if (project_position_0 < project_source_0.length()) {
/* 212 */ java.lang.StringBuffer project_termResult_0 = new 
java.lang.StringBuffer();
/* 213 */ java.util.regex.Matcher project_matcher_0 = 
project_mutableStateArray_0[0].matcher(project_source_0);
/* 214 */ project_matcher_0.region(project_position_0, 
project_source_0.length());
/* 215 */
/* 216 */ while (project_matcher_0.find()) {
/* 217 */ project_matcher_0.appendReplacement(project_termResult_0, 
project_mutableStateArray_1[0]);
/* 218 */ }
/* 219 */ project_matcher_0.appendTail(project_termResult_0);
/* 220 */ project_value_0 = 
UTF8String.fromString(project_termResult_0.toString());
/* 221 */ project_termResult_0 = null;
/* 222 */ } else {
/* 223 */ project_value_0 = smj_mutableStateArray_0[3];
/* 224 */ }
/* 225 */ project_isNull_0 = false;
/* 226 */
/* 227 */ }
/* 228 */ project_subExprIsNull_0 = project_isNull_0;
/* 229 */ return project_value_0;
/* 230 */ }
/* 231 */
/* 232 */ private UTF8String smj_GetArrayItem_0(InternalRow smj_leftRow_0) {
/* 233 */ boolean smj_isNull_12 = true;
/* 234 */ UTF8String smj_value_12 = null;
/* 235 */ boolean smj_isNull_13 = true;
/* 236 */ ArrayData smj_value_13 = null;
/* 237 */ boolean smj_isNull_14 = smj_leftRow_0.isNullAt(2);
/* 238 */ UTF8String smj_value_14 = smj_isNull_14 ?
/* 239 */ null : (smj_leftRow_0.getUTF8String(2));
/* 240 */ if (!smj_isNull_14) {
/* 241 */ smj_isNull_13 = false; // resultCode could change nullability.
/* 242 */ smj_value_13 = new 
org.apache.spark.sql.catalyst.util.GenericArrayData(smj_value_14.split(((UTF8String)
 references[1] /* literal */),-1));
/* 243 */
/* 244 */ }
/* 245 */ if (!smj_isNull_13) {
/* 246 */ smj_isNull_12 = false; // resultCode could change nullability.
/* 247 */
/* 248 */ final int smj_index_0 = (int) 2;
/* 249 */ if (smj_index_0 >= smj_value_13.numElements() || smj_index_0 < 0) {
/* 250 */ smj_isNull_12 = true;
/* 251 */ } else if (smj_value_13.isNullAt(smj_index_0)) {
/* 252 */ smj_isNull_12 = true;
/* 253 */ }
/* 254 */ else {
/* 255 */ smj_value_12 = smj_value_13.getUTF8String(smj_index_0);
/* 256 */ }
/* 257 */
/* 258 */ }
/* 259 */ smj_globalIsNull_0 = smj_isNull_12;
/* 260 */ return smj_value_12;
/* 261 */ }
/* 262 */
/* 263 */ protected void processNext() throws java.io.IOException {
/* 264 */ while (smj_findNextOuterJoinRows_0(smj_leftInput_0, 
smj_rightInput_0)) {
/* 265 */ boolean smj_loaded_0 = false;
/* 266 */ smj_isNull_25 = smj_leftRow_0.isNullAt(0);
/* 267 */ smj_mutableStateArray_0[0] = smj_isNull_25 ? null : 
(smj_leftRow_0.getUTF8String(0));
/* 268 */ smj_isNull_26 = smj_leftRow_0.isNullAt(1);
/* 269 */ smj_mutableStateArray_0[1] = smj_isNull_26 ? null : 
(smj_leftRow_0.getUTF8String(1));
/* 270 */ smj_isNull_27 = smj_leftRow_0.isNullAt(2);
/* 271 */ smj_mutableStateArray_0[2] = smj_isNull_27 ? null : 
(smj_leftRow_0.getUTF8String(2));
/* 272 */ smj_isNull_28 = smj_leftRow_0.isNullAt(3);
/* 273 */ smj_mutableStateArray_0[3] = smj_isNull_28 ? null : 
(smj_leftRow_0.getUTF8String(3));
/* 274 */ scala.collection.Iterator<UnsafeRow> smj_iterator_0 = 
smj_matches_0.generateIterator();
/* 275 */ while (smj_iterator_0.hasNext()) {
/* 276 */ InternalRow smj_rightRow_1 = (InternalRow) smj_iterator_0.next();
/* 277 */ smj_isNull_29 = smj_rightRow_1.isNullAt(0);
/* 278 */ smj_mutableStateArray_0[4] = smj_isNull_29 ? null : 
(smj_rightRow_1.getUTF8String(0));
/* 279 */ smj_isNull_30 = false;
/* 280 */ smj_mutableStateArray_1[0] = smj_rightRow_1.getDecimal(1, 38, 0);
/* 281 */
/* 282 */ smj_loaded_0 = true;
/* 283 */ smj_writeJoinRows_0();
/* 284 */ }
/* 285 */ if (!smj_loaded_0) {
/* 286 */ smj_isNull_29 = true;
/* 287 */ smj_isNull_30 = true;
/* 288 */ smj_writeJoinRows_0();
/* 289 */ }
/* 290 */ if (shouldStop()) return;
/* 291 */ }
/* 292 */ ((org.apache.spark.sql.execution.joins.SortMergeJoinExec) 
references[5] /* plan */).cleanupResources();
/* 293 */ }
/* 294 */
/* 295 */ private void smj_writeJoinRows_0() throws java.io.IOException {
/* 296 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* 
numOutputRows */).add(1);
/* 297 */
/* 298 */ // common sub-expressions
/* 299 */
/* 300 */ UTF8String project_subExprValue_0 = 
project_subExpr_0(smj_mutableStateArray_0[3], smj_isNull_28);
/* 301 */
/* 302 */ smj_mutableStateArray_2[1].reset();
/* 303 */
/* 304 */ smj_mutableStateArray_2[1].zeroOutNullBytes();
/* 305 */
/* 306 */ if (smj_isNull_25) {
/* 307 */ smj_mutableStateArray_2[1].setNullAt(0);
/* 308 */ } else {
/* 309 */ smj_mutableStateArray_2[1].write(0, smj_mutableStateArray_0[0]);
/* 310 */ }
/* 311 */
/* 312 */ if (smj_isNull_26) {
/* 313 */ smj_mutableStateArray_2[1].setNullAt(1);
/* 314 */ } else {
/* 315 */ smj_mutableStateArray_2[1].write(1, smj_mutableStateArray_0[1]);
/* 316 */ }
/* 317 */
/* 318 */ if (project_subExprIsNull_0) {
/* 319 */ smj_mutableStateArray_2[1].setNullAt(2);
/* 320 */ } else {
/* 321 */ smj_mutableStateArray_2[1].write(2, project_subExprValue_0);
/* 322 */ }
/* 323 */
/* 324 */ if (project_subExprIsNull_0) {
/* 325 */ smj_mutableStateArray_2[1].setNullAt(3);
/* 326 */ } else {
/* 327 */ smj_mutableStateArray_2[1].write(3, project_subExprValue_0);
/* 328 */ }
/* 329 */
/* 330 */ if (smj_isNull_29) {
/* 331 */ smj_mutableStateArray_2[1].setNullAt(4);
/* 332 */ } else {
/* 333 */ smj_mutableStateArray_2[1].write(4, smj_mutableStateArray_0[4]);
/* 334 */ }
/* 335 */
/* 336 */ if (smj_isNull_25) {
/* 337 */ smj_mutableStateArray_2[1].setNullAt(5);
/* 338 */ } else {
/* 339 */ smj_mutableStateArray_2[1].write(5, smj_mutableStateArray_0[0]);
/* 340 */ }
/* 341 */ append((smj_mutableStateArray_2[1].getRow()).copy());
/* 342 */
/* 343 */ }
/* 344 */
/* 345 */ } {code}
 


was (Author: luky):
I manage reproduce the issue in my environment. Problem is on line 192 - 
variable name in function header having array index

Here is the generated code
{code:java}
/* 001 */ public Object generate(Object[] references) {
/* 002 */ return new GeneratedIteratorForCodegenStage636(references);
/* 003 */ }
/* 004 */
/* 005 */ // codegenStageId=636
/* 006 */ final class GeneratedIteratorForCodegenStage636 extends 
org.apache.spark.sql.execution.BufferedRowIterator {
/* 007 */ private Object[] references;
/* 008 */ private scala.collection.Iterator[] inputs;
/* 009 */ private scala.collection.Iterator smj_leftInput_0;
/* 010 */ private scala.collection.Iterator smj_rightInput_0;
/* 011 */ private InternalRow smj_leftRow_0;
/* 012 */ private InternalRow smj_rightRow_0;
/* 013 */ private boolean smj_globalIsNull_0;
/* 014 */ private boolean smj_globalIsNull_1;
/* 015 */ private double smj_value_27;
/* 016 */ private 
org.apache.spark.sql.execution.ExternalAppendOnlyUnsafeRowArray smj_matches_0;
/* 017 */ private double smj_value_28;
/* 018 */ private boolean smj_isNull_25;
/* 019 */ private boolean smj_isNull_26;
/* 020 */ private boolean smj_isNull_27;
/* 021 */ private boolean smj_isNull_28;
/* 022 */ private boolean smj_isNull_29;
/* 023 */ private boolean smj_isNull_30;
/* 024 */ private boolean project_subExprIsNull_0;
/* 025 */ private 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] 
smj_mutableStateArray_2 = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[2];
/* 026 */ private java.util.regex.Pattern[] project_mutableStateArray_0 = new 
java.util.regex.Pattern[1];
/* 027 */ private Decimal[] smj_mutableStateArray_1 = new Decimal[1];
/* 028 */ private String[] project_mutableStateArray_1 = new String[1];
/* 029 */ private UTF8String[] smj_mutableStateArray_0 = new UTF8String[7];
/* 030 */
/* 031 */ public GeneratedIteratorForCodegenStage636(Object[] references) {
/* 032 */ this.references = references;
/* 033 */ }
/* 034 */
/* 035 */ public void init(int index, scala.collection.Iterator[] inputs) {
/* 036 */ partitionIndex = index;
/* 037 */ this.inputs = inputs;
/* 038 */ smj_leftInput_0 = inputs[0];
/* 039 */ smj_rightInput_0 = inputs[1];
/* 040 */
/* 041 */ smj_matches_0 = new 
org.apache.spark.sql.execution.ExternalAppendOnlyUnsafeRowArray(2147483632, 
2147483647);
/* 042 */ smj_mutableStateArray_2[0] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(6, 192);
/* 043 */ smj_mutableStateArray_2[1] = new 
org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(6, 192);
/* 044 */
/* 045 */ }
/* 046 */
/* 047 */ private boolean smj_findNextOuterJoinRows_0(
/* 048 */ scala.collection.Iterator leftIter,
/* 049 */ scala.collection.Iterator rightIter) {
/* 050 */ smj_leftRow_0 = null;
/* 051 */ int comp = 0;
/* 052 */ while (smj_leftRow_0 == null) {
/* 053 */ if (!leftIter.hasNext()) return false;
/* 054 */ smj_leftRow_0 = (InternalRow) leftIter.next();
/* 055 */ UTF8String smj_value_22 = smj_If_0(smj_leftRow_0);
/* 056 */ boolean smj_isNull_2 = smj_globalIsNull_1;
/* 057 */ double smj_value_2 = -1.0;
/* 058 */ if (!smj_globalIsNull_1) {
/* 059 */ final String smj_doubleStr_0 = smj_value_22.toString();
/* 060 */ try {
/* 061 */ smj_value_2 = Double.valueOf(smj_doubleStr_0);
/* 062 */ } catch (java.lang.NumberFormatException e) {
/* 063 */ final Double d = (Double) 
Cast.processFloatingPointSpecialLiterals(smj_doubleStr_0, false);
/* 064 */ if (d == null) {
/* 065 */ smj_isNull_2 = true;
/* 066 */ } else {
/* 067 */ smj_value_2 = d.doubleValue();
/* 068 */ }
/* 069 */ }
/* 070 */ }
/* 071 */ boolean smj_isNull_1 = smj_isNull_2;
/* 072 */ double smj_value_1 = -1.0;
/* 073 */
/* 074 */ if (!smj_isNull_2) {
/* 075 */ if (Double.isNaN(smj_value_2)) {
/* 076 */ smj_value_1 = Double.NaN;
/* 077 */ } else if (smj_value_2 == -0.0d) {
/* 078 */ smj_value_1 = 0.0d;
/* 079 */ } else {
/* 080 */ smj_value_1 = smj_value_2;
/* 081 */ }
/* 082 */
/* 083 */ }
/* 084 */ if (smj_isNull_1) {
/* 085 */ if (!smj_matches_0.isEmpty()) {
/* 086 */ smj_matches_0.clear();
/* 087 */ }
/* 088 */ return true;
/* 089 */ }
/* 090 */ if (!smj_matches_0.isEmpty()) {
/* 091 */ comp = 0;
/* 092 */ if (comp == 0) {
/* 093 */ comp = 
org.apache.spark.sql.catalyst.util.SQLOrderingUtil.compareDoubles(smj_value_1, 
smj_value_28);
/* 094 */ }
/* 095 */
/* 096 */ if (comp == 0) {
/* 097 */ return true;
/* 098 */ }
/* 099 */ smj_matches_0.clear();
/* 100 */ }
/* 101 */
/* 102 */ do {
/* 103 */ if (smj_rightRow_0 == null) {
/* 104 */ if (!rightIter.hasNext()) {
/* 105 */ if (!smj_matches_0.isEmpty()) {
/* 106 */ smj_value_28 = smj_value_1;
/* 107 */ }
/* 108 */ return true;
/* 109 */ }
/* 110 */ smj_rightRow_0 = (InternalRow) rightIter.next();
/* 111 */ Decimal smj_value_26 = smj_rightRow_0.getDecimal(1, 38, 0);
/* 112 */ boolean smj_isNull_23 = false;
/* 113 */ double smj_value_25 = -1.0;
/* 114 */ if (!false) {
/* 115 */ smj_value_25 = smj_value_26.toDouble();
/* 116 */ }
/* 117 */ double smj_value_24 = -1.0;
/* 118 */
/* 119 */ if (Double.isNaN(smj_value_25)) {
/* 120 */ smj_value_24 = Double.NaN;
/* 121 */ } else if (smj_value_25 == -0.0d) {
/* 122 */ smj_value_24 = 0.0d;
/* 123 */ } else {
/* 124 */ smj_value_24 = smj_value_25;
/* 125 */ }
/* 126 */ if (false) {
/* 127 */ smj_rightRow_0 = null;
/* 128 */ continue;
/* 129 */ }
/* 130 */ smj_value_27 = smj_value_24;
/* 131 */ }
/* 132 */
/* 133 */ comp = 0;
/* 134 */ if (comp == 0) {
/* 135 */ comp = 
org.apache.spark.sql.catalyst.util.SQLOrderingUtil.compareDoubles(smj_value_1, 
smj_value_27);
/* 136 */ }
/* 137 */
/* 138 */ if (comp > 0) {
/* 139 */ smj_rightRow_0 = null;
/* 140 */ } else if (comp < 0) {
/* 141 */ if (!smj_matches_0.isEmpty()) {
/* 142 */ smj_value_28 = smj_value_1;
/* 143 */ }
/* 144 */ return true;
/* 145 */ } else {
/* 146 */ smj_matches_0.add((UnsafeRow) smj_rightRow_0);
/* 147 */ smj_rightRow_0 = null;
/* 148 */ }
/* 149 */ } while (true);
/* 150 */ }
/* 151 */ return false; // unreachable
/* 152 */ }
/* 153 */
/* 154 */ private UTF8String smj_If_0(InternalRow smj_leftRow_0) {
/* 155 */ boolean smj_isNull_5 = false;
/* 156 */ boolean smj_isNull_6 = true;
/* 157 */ ArrayData smj_value_6 = null;
/* 158 */ boolean smj_isNull_7 = smj_leftRow_0.isNullAt(2);
/* 159 */ UTF8String smj_value_7 = smj_isNull_7 ?
/* 160 */ null : (smj_leftRow_0.getUTF8String(2));
/* 161 */ if (!smj_isNull_7) {
/* 162 */ smj_isNull_6 = false; // resultCode could change nullability.
/* 163 */ smj_value_6 = new 
org.apache.spark.sql.catalyst.util.GenericArrayData(smj_value_7.split(((UTF8String)
 references[0] /* literal */),-1));
/* 164 */
/* 165 */ }
/* 166 */ int smj_value_5 = smj_isNull_6 ? -1 :
/* 167 */ (smj_value_6).numElements();
/* 168 */
/* 169 */ boolean smj_value_4 = false;
/* 170 */ smj_value_4 = smj_value_5 > 2;
/* 171 */ boolean smj_isNull_3 = false;
/* 172 */ UTF8String smj_value_3 = null;
/* 173 */ if (!false && smj_value_4) {
/* 174 */ boolean smj_isNull_11 = true;
/* 175 */ UTF8String smj_value_11 = null;
/* 176 */ UTF8String smj_value_18 = smj_GetArrayItem_0(smj_leftRow_0);
/* 177 */ if (!smj_globalIsNull_0) {
/* 178 */ smj_isNull_11 = false; // resultCode could change nullability.
/* 179 */ smj_value_11 = smj_value_18.substringSQL(1, 10);
/* 180 */
/* 181 */ }
/* 182 */ smj_isNull_3 = smj_isNull_11;
/* 183 */ smj_value_3 = smj_value_11;
/* 184 */ } else {
/* 185 */ smj_isNull_3 = true;
/* 186 */ smj_value_3 = ((UTF8String)null);
/* 187 */ }
/* 188 */ smj_globalIsNull_1 = smj_isNull_3;
/* 189 */ return smj_value_3;
/* 190 */ }
/* 191 */
/* 192 */ private UTF8String 
project_subExpr_0(org.apache.spark.unsafe.types.UTF8String 
smj_mutableStateArray_0[3], boolean smj_isNull_28) {
/* 193 */ boolean project_isNull_0 = true;
/* 194 */ UTF8String project_value_0 = null;
/* 195 */
/* 196 */ if (!smj_isNull_28) {
/* 197 */ project_isNull_0 = false; // resultCode could change nullability.
/* 198 */
/* 199 */ if (!((UTF8String) references[3] /* literal 
*/).equals(smj_mutableStateArray_0[5])) {
/* 200 */ // regex value changed
/* 201 */ smj_mutableStateArray_0[5] = ((UTF8String) references[3] /* literal 
*/).clone();
/* 202 */ project_mutableStateArray_0[0] = 
java.util.regex.Pattern.compile(smj_mutableStateArr",
 "ay_0[5].toString());
/* 203 */ }
/* 204 */ if (!((UTF8String) references[4] /* literal 
*/).equals(smj_mutableStateArray_0[6])) {
/* 205 */ // replacement string changed
/* 206 */ smj_mutableStateArray_0[6] = ((UTF8String) references[4] /* literal 
*/).clone();
/* 207 */ project_mutableStateArray_1[0] = 
smj_mutableStateArray_0[6].toString();
/* 208 */ }
/* 209 */ String project_source_0 = smj_mutableStateArray_0[3].toString();
/* 210 */ int project_position_0 = 1 - 1;
/* 211 */ if (project_position_0 < project_source_0.length()) {
/* 212 */ java.lang.StringBuffer project_termResult_0 = new 
java.lang.StringBuffer();
/* 213 */ java.util.regex.Matcher project_matcher_0 = 
project_mutableStateArray_0[0].matcher(project_source_0);
/* 214 */ project_matcher_0.region(project_position_0, 
project_source_0.length());
/* 215 */
/* 216 */ while (project_matcher_0.find()) {
/* 217 */ project_matcher_0.appendReplacement(project_termResult_0, 
project_mutableStateArray_1[0]);
/* 218 */ }
/* 219 */ project_matcher_0.appendTail(project_termResult_0);
/* 220 */ project_value_0 = 
UTF8String.fromString(project_termResult_0.toString());
/* 221 */ project_termResult_0 = null;
/* 222 */ } else {
/* 223 */ project_value_0 = smj_mutableStateArray_0[3];
/* 224 */ }
/* 225 */ project_isNull_0 = false;
/* 226 */
/* 227 */ }
/* 228 */ project_subExprIsNull_0 = project_isNull_0;
/* 229 */ return project_value_0;
/* 230 */ }
/* 231 */
/* 232 */ private UTF8String smj_GetArrayItem_0(InternalRow smj_leftRow_0) {
/* 233 */ boolean smj_isNull_12 = true;
/* 234 */ UTF8String smj_value_12 = null;
/* 235 */ boolean smj_isNull_13 = true;
/* 236 */ ArrayData smj_value_13 = null;
/* 237 */ boolean smj_isNull_14 = smj_leftRow_0.isNullAt(2);
/* 238 */ UTF8String smj_value_14 = smj_isNull_14 ?
/* 239 */ null : (smj_leftRow_0.getUTF8String(2));
/* 240 */ if (!smj_isNull_14) {
/* 241 */ smj_isNull_13 = false; // resultCode could change nullability.
/* 242 */ smj_value_13 = new 
org.apache.spark.sql.catalyst.util.GenericArrayData(smj_value_14.split(((UTF8String)
 references[1] /* literal */),-1));
/* 243 */
/* 244 */ }
/* 245 */ if (!smj_isNull_13) {
/* 246 */ smj_isNull_12 = false; // resultCode could change nullability.
/* 247 */
/* 248 */ final int smj_index_0 = (int) 2;
/* 249 */ if (smj_index_0 >= smj_value_13.numElements() || smj_index_0 < 0) {
/* 250 */ smj_isNull_12 = true;
/* 251 */ } else if (smj_value_13.isNullAt(smj_index_0)) {
/* 252 */ smj_isNull_12 = true;
/* 253 */ }
/* 254 */ else {
/* 255 */ smj_value_12 = smj_value_13.getUTF8String(smj_index_0);
/* 256 */ }
/* 257 */
/* 258 */ }
/* 259 */ smj_globalIsNull_0 = smj_isNull_12;
/* 260 */ return smj_value_12;
/* 261 */ }
/* 262 */
/* 263 */ protected void processNext() throws java.io.IOException {
/* 264 */ while (smj_findNextOuterJoinRows_0(smj_leftInput_0, 
smj_rightInput_0)) {
/* 265 */ boolean smj_loaded_0 = false;
/* 266 */ smj_isNull_25 = smj_leftRow_0.isNullAt(0);
/* 267 */ smj_mutableStateArray_0[0] = smj_isNull_25 ? null : 
(smj_leftRow_0.getUTF8String(0));
/* 268 */ smj_isNull_26 = smj_leftRow_0.isNullAt(1);
/* 269 */ smj_mutableStateArray_0[1] = smj_isNull_26 ? null : 
(smj_leftRow_0.getUTF8String(1));
/* 270 */ smj_isNull_27 = smj_leftRow_0.isNullAt(2);
/* 271 */ smj_mutableStateArray_0[2] = smj_isNull_27 ? null : 
(smj_leftRow_0.getUTF8String(2));
/* 272 */ smj_isNull_28 = smj_leftRow_0.isNullAt(3);
/* 273 */ smj_mutableStateArray_0[3] = smj_isNull_28 ? null : 
(smj_leftRow_0.getUTF8String(3));
/* 274 */ scala.collection.Iterator<UnsafeRow> smj_iterator_0 = 
smj_matches_0.generateIterator();
/* 275 */ while (smj_iterator_0.hasNext()) {
/* 276 */ InternalRow smj_rightRow_1 = (InternalRow) smj_iterator_0.next();
/* 277 */ smj_isNull_29 = smj_rightRow_1.isNullAt(0);
/* 278 */ smj_mutableStateArray_0[4] = smj_isNull_29 ? null : 
(smj_rightRow_1.getUTF8String(0));
/* 279 */ smj_isNull_30 = false;
/* 280 */ smj_mutableStateArray_1[0] = smj_rightRow_1.getDecimal(1, 38, 0);
/* 281 */
/* 282 */ smj_loaded_0 = true;
/* 283 */ smj_writeJoinRows_0();
/* 284 */ }
/* 285 */ if (!smj_loaded_0) {
/* 286 */ smj_isNull_29 = true;
/* 287 */ smj_isNull_30 = true;
/* 288 */ smj_writeJoinRows_0();
/* 289 */ }
/* 290 */ if (shouldStop()) return;
/* 291 */ }
/* 292 */ ((org.apache.spark.sql.execution.joins.SortMergeJoinExec) 
references[5] /* plan */).cleanupResources();
/* 293 */ }
/* 294 */
/* 295 */ private void smj_writeJoinRows_0() throws java.io.IOException {
/* 296 */ ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* 
numOutputRows */).add(1);
/* 297 */
/* 298 */ // common sub-expressions
/* 299 */
/* 300 */ UTF8String project_subExprValue_0 = 
project_subExpr_0(smj_mutableStateArray_0[3], smj_isNull_28);
/* 301 */
/* 302 */ smj_mutableStateArray_2[1].reset();
/* 303 */
/* 304 */ smj_mutableStateArray_2[1].zeroOutNullBytes();
/* 305 */
/* 306 */ if (smj_isNull_25) {
/* 307 */ smj_mutableStateArray_2[1].setNullAt(0);
/* 308 */ } else {
/* 309 */ smj_mutableStateArray_2[1].write(0, smj_mutableStateArray_0[0]);
/* 310 */ }
/* 311 */
/* 312 */ if (smj_isNull_26) {
/* 313 */ smj_mutableStateArray_2[1].setNullAt(1);
/* 314 */ } else {
/* 315 */ smj_mutableStateArray_2[1].write(1, smj_mutableStateArray_0[1]);
/* 316 */ }
/* 317 */
/* 318 */ if (project_subExprIsNull_0) {
/* 319 */ smj_mutableStateArray_2[1].setNullAt(2);
/* 320 */ } else {
/* 321 */ smj_mutableStateArray_2[1].write(2, project_subExprValue_0);
/* 322 */ }
/* 323 */
/* 324 */ if (project_subExprIsNull_0) {
/* 325 */ smj_mutableStateArray_2[1].setNullAt(3);
/* 326 */ } else {
/* 327 */ smj_mutableStateArray_2[1].write(3, project_subExprValue_0);
/* 328 */ }
/* 329 */
/* 330 */ if (smj_isNull_29) {
/* 331 */ smj_mutableStateArray_2[1].setNullAt(4);
/* 332 */ } else {
/* 333 */ smj_mutableStateArray_2[1].write(4, smj_mutableStateArray_0[4]);
/* 334 */ }
/* 335 */
/* 336 */ if (smj_isNull_25) {
/* 337 */ smj_mutableStateArray_2[1].setNullAt(5);
/* 338 */ } else {
/* 339 */ smj_mutableStateArray_2[1].write(5, smj_mutableStateArray_0[0]);
/* 340 */ }
/* 341 */ append((smj_mutableStateArray_2[1].getRow()).copy());
/* 342 */
/* 343 */ }
/* 344 */
/* 345 */ } {code}
 

> ERROR CodeGenerator: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java'
> -------------------------------------------------------------------------------------------------------------
>
>                 Key: SPARK-36862
>                 URL: https://issues.apache.org/jira/browse/SPARK-36862
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Submit, SQL
>    Affects Versions: 3.1.1
>         Environment: Spark 3.1.1 and Spark 3.1.2
> hadoop 3.2.1
>            Reporter: Magdalena Pilawska
>            Priority: Major
>
> Hi,
> I am getting the following error running spark-submit command:
> ERROR CodeGenerator: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 321, Column 103: ')' expected instead of '['
>  
> It fails running the spark sql command on delta lake: 
> spark.sql(sqlTransformation)
> The template of sqlTransformation is as follows:
> MERGE INTO target_table AS d
>  USING source_table AS s 
>  on s.id = d.id
>  WHEN MATCHED AND d.hash_value <> s.hash_value
>  THEN UPDATE SET d.name =s.name, d.address = s.address
>  
> It is permanent error both for *spark 3.1.1* version.
>  
> The same works fine with spark 3.0.0.
>  
> Here is the full log:
> 2021-09-22 16:43:22,110 ERROR CodeGenerator: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 55, Column 103: ')' expected instead of '['2021-09-22 16:43:22,110 ERROR 
> CodeGenerator: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 55, Column 103: ')' expected instead of 
> '['org.codehaus.commons.compiler.CompileException: File 'generated.java', 
> Line 55, Column 103: ')' expected instead of '[' at 
> org.codehaus.janino.TokenStreamImpl.compileException(TokenStreamImpl.java:362)
>  at org.codehaus.janino.TokenStreamImpl.read(TokenStreamImpl.java:150) at 
> org.codehaus.janino.Parser.read(Parser.java:3703) at 
> org.codehaus.janino.Parser.parseFormalParameters(Parser.java:1622) at 
> org.codehaus.janino.Parser.parseMethodDeclarationRest(Parser.java:1518) at 
> org.codehaus.janino.Parser.parseClassBodyDeclaration(Parser.java:1028) at 
> org.codehaus.janino.Parser.parseClassBody(Parser.java:841) at 
> org.codehaus.janino.Parser.parseClassDeclarationRest(Parser.java:736) at 
> org.codehaus.janino.Parser.parseClassBodyDeclaration(Parser.java:941) at 
> org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:234) at 
> org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:205) at 
> org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1427)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1524)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1521)
>  at 
> org.sparkproject.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>  at 
> org.sparkproject.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379)
>  at 
> org.sparkproject.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>  at org.sparkproject.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) 
> at org.sparkproject.guava.cache.LocalCache.get(LocalCache.java:4000) at 
> org.sparkproject.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at 
> org.sparkproject.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1375)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:721)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:720)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:185)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:223)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:220) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:181) at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:160)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:160)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.mapOutputStatisticsFuture$lzycompute(ShuffleExchangeExec.scala:164)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.mapOutputStatisticsFuture(ShuffleExchangeExec.scala:163)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeLike.$anonfun$materializeFuture$2(ShuffleExchangeExec.scala:100)
>  at 
> org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:52) at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeLike.$anonfun$materializeFuture$1(ShuffleExchangeExec.scala:100)
>  at org.apache.spark.sql.util.LazyValue.getOrInit(LazyValue.scala:41) at 
> org.apache.spark.sql.execution.exchange.Exchange.getOrInitMaterializeFuture(Exchange.scala:68)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeLike.materializeFuture(ShuffleExchangeExec.scala:96)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeLike.materialize(ShuffleExchangeExec.scala:84)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeLike.materialize$(ShuffleExchangeExec.scala:83)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.materialize(ShuffleExchangeExec.scala:128)
>  at 
> org.apache.spark.sql.execution.adaptive.ShuffleQueryStageExec.doMaterialize(QueryStageExec.scala:161)
>  at 
> org.apache.spark.sql.execution.adaptive.QueryStageExec.$anonfun$materialize$1(QueryStageExec.scala:74)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:223)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:220) at 
> org.apache.spark.sql.execution.adaptive.QueryStageExec.materialize(QueryStageExec.scala:74)
>  at 
> org.apache.spark.sql.execution.adaptive.MaterializeExecutable.tryStart(AdaptiveExecutable.scala:396)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutorRuntime.startChild(AdaptiveExecutor.scala:225)
>  at 
> org.apache.spark.sql.execution.adaptive.ExecutionHelper.start(ExecutionHelper.scala:47)
>  at 
> org.apache.spark.sql.execution.adaptive.QueryStageExecutable$$anon$2.$anonfun$new$1(AdaptiveExecutable.scala:251)
>  at 
> org.apache.spark.sql.execution.adaptive.ExecutionHelper$Listener.$anonfun$onChildSuccess$2(ExecutionHelper.scala:55)
>  at 
> org.apache.spark.sql.execution.adaptive.ExecutionHelper$Listener.$anonfun$onChildSuccess$2$adapted(ExecutionHelper.scala:54)
>  at scala.Option.foreach(Option.scala:407) at 
> org.apache.spark.sql.execution.adaptive.ExecutionHelper$Listener.$anonfun$onChildSuccess$1(ExecutionHelper.scala:54)
>  at 
> org.apache.spark.sql.execution.adaptive.ExecutionHelper$Listener.$anonfun$onChildSuccess$1$adapted(ExecutionHelper.scala:53)
>  at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) 
> at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) 
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) at 
> org.apache.spark.sql.execution.adaptive.ExecutionHelper$Listener.onChildSuccess(ExecutionHelper.scala:53)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutorRuntime.$anonfun$onActiveChildSuccess$2(AdaptiveExecutor.scala:314)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutorRuntime.$anonfun$onActiveChildSuccess$2$adapted(AdaptiveExecutor.scala:314)
>  at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) 
> at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) 
> at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutorRuntime.onActiveChildSuccess(AdaptiveExecutor.scala:314)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutorRuntime.onChildSuccess(AdaptiveExecutor.scala:284)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutor.$anonfun$doRun$1(AdaptiveExecutor.scala:92)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutor.$anonfun$doRun$1$adapted(AdaptiveExecutor.scala:91)
>  at scala.collection.mutable.HashMap.$anonfun$foreach$1(HashMap.scala:149) at 
> scala.collection.mutable.HashTable.foreachEntry(HashTable.scala:237) at 
> scala.collection.mutable.HashTable.foreachEntry$(HashTable.scala:230) at 
> scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:44) at 
> scala.collection.mutable.HashMap.foreach(HashMap.scala:149) at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutor.doRun(AdaptiveExecutor.scala:91)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutor.tryRunningAndGetFuture(AdaptiveExecutor.scala:66)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveExecutor.execute(AdaptiveExecutor.scala:57)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$getFinalPhysicalPlan$1(AdaptiveSparkPlanExec.scala:184)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at 
> org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.getFinalPhysicalPlan(AdaptiveSparkPlanExec.scala:183)
>  at 
> org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.doExecute(AdaptiveSparkPlanExec.scala:434)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:185)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:223)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:220) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:181) at 
> org.apache.spark.sql.delta.constraints.DeltaInvariantCheckerExec.doExecute(DeltaInvariantCheckerExec.scala:78)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:185)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:223)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:220) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:181) at 
> org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:177)
>  at 
> org.apache.spark.sql.delta.files.TransactionalWrite.$anonfun$writeFiles$1(TransactionalWrite.scala:192)
>  at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:232)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:110)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:135)
>  at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:232)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:135)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:253)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:134)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
>  at 
> org.apache.spark.sql.delta.files.TransactionalWrite.writeFiles(TransactionalWrite.scala:163)
>  at 
> org.apache.spark.sql.delta.files.TransactionalWrite.writeFiles$(TransactionalWrite.scala:142)
>  at 
> org.apache.spark.sql.delta.OptimisticTransaction.writeFiles(OptimisticTransaction.scala:84)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.$anonfun$writeAllChanges$1(MergeIntoCommand.scala:552)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.recordMergeOperation(MergeIntoCommand.scala:654)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.writeAllChanges(MergeIntoCommand.scala:460)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.$anonfun$run$4(MergeIntoCommand.scala:274)
>  at 
> org.apache.spark.sql.delta.util.DeltaProgressReporter.withJobDescription(DeltaProgressReporter.scala:53)
>  at 
> org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode(DeltaProgressReporter.scala:32)
>  at 
> org.apache.spark.sql.delta.util.DeltaProgressReporter.withStatusCode$(DeltaProgressReporter.scala:27)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.withStatusCode(MergeIntoCommand.scala:201)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.$anonfun$run$2(MergeIntoCommand.scala:274)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.$anonfun$run$2$adapted(MergeIntoCommand.scala:255)
>  at 
> org.apache.spark.sql.delta.DeltaLog.withNewTransaction(DeltaLog.scala:187) at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.$anonfun$run$1(MergeIntoCommand.scala:255)
>  at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> com.databricks.spark.util.DatabricksLogging.recordOperation(DatabricksLogging.scala:77)
>  at 
> com.databricks.spark.util.DatabricksLogging.recordOperation$(DatabricksLogging.scala:67)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.recordOperation(MergeIntoCommand.scala:201)
>  at 
> org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation(DeltaLogging.scala:106)
>  at 
> org.apache.spark.sql.delta.metering.DeltaLogging.recordDeltaOperation$(DeltaLogging.scala:91)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.recordDeltaOperation(MergeIntoCommand.scala:201)
>  at 
> org.apache.spark.sql.delta.commands.MergeIntoCommand.run(MergeIntoCommand.scala:253)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)
>  at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:229) at 
> org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3724) at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:232)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.executeQuery$1(SQLExecution.scala:110)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:135)
>  at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:107)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withTracker(SQLExecution.scala:232)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:135)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:253)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:134)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)
>  at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3722) at 
> org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) at 
> org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:100) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at 
> org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:97) at 
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:615) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) at 
> org.apache.spark.sql.SparkSession.sql(SparkSession.scala:610) at 
> com.aup.daab.ds.cf.external.DeltaIntegration.executeDeltaQuery(DeltaIntegration.scala:214)
>  at com.aup.daab.ds.cf.TraServs.Step$.ExecuteTransformation(Step.scala:77) at 
> com.aup.daab.ds.cf.TraServs.TraServ.$anonfun$transformData$5(TraServ.scala:268)
>  at 
> com.aup.daab.ds.cf.TraServs.TraServ.$anonfun$transformData$5$adapted(TraServ.scala:240)
>  at 
> scala.collection.IndexedSeqOptimized.foreach(IndexedSeqOptimized.scala:36) at 
> scala.collection.IndexedSeqOptimized.foreach$(IndexedSeqOptimized.scala:33) 
> at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:198) at 
> com.aup.daab.ds.cf.TraServs.TraServ.transformData(TraServ.scala:240) at 
> com.aup.daab.ds.cf.driver.Driver$.main(Driver.scala:147) at 
> com.aup.daab.ds.cf.driver.Driver.main(Driver.scala) at 
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) 
> at 
> org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:959)
>  at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180) at 
> org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203) at 
> org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90) at 
> org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1038) 
> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1047) at 
> org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)2021-09-22 
> 16:43:22,112 WARN WholeStageCodegenExec: Whole-stage codegen disabled for 
> plan (id=9):



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to