Can someone help me understand this error which occurs while running a filter on a dataframe
2016-07-31 21:01:57 ERROR CodeGenerator:91 - failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 117, Column 58: Expression "mapelements_isNull" is not an rvalue /* 001 */ public Object generate(Object[] references) { /* 002 */ return new GeneratedIterator(references); /* 003 */ } /* 004 */ /* 005 */ /** Codegened pipeline for: /* 006 */ * TungstenAggregate(key=[], functions=[(count(1),mode=Partial,isDistinct=false)], output=[count#127L]) /* 007 */ +- Project /* 008 */ +- Filter (is... /* 009 */ */ /* 010 */ final class GeneratedIterator extends org.apache.spark.sql.execution.BufferedRowIterator { /* 011 */ private Object[] references; /* 012 */ private boolean agg_initAgg; /* 013 */ private boolean agg_bufIsNull; /* 014 */ private long agg_bufValue; /* 015 */ private scala.collection.Iterator inputadapter_input; /* 016 */ private Object[] deserializetoobject_values; /* 017 */ private org.apache.spark.sql.types.StructType deserializetoobject_schema; /* 018 */ private UnsafeRow deserializetoobject_result; /* 019 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder deserializetoobject_holder; /* 020 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter deserializetoobject_rowWriter; /* 021 */ private UnsafeRow mapelements_result; /* 022 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder mapelements_holder; /* 023 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter mapelements_rowWriter; /* 024 */ private Object[] serializefromobject_values; /* 025 */ private UnsafeRow serializefromobject_result; /* 026 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder serializefromobject_holder; /* 027 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter serializefromobject_rowWriter; /* 028 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter serializefromobject_rowWriter1; /* 029 */ private org.apache.spark.sql.execution.metric.SQLMetric filter_numOutputRows; /* 030 */ private UnsafeRow filter_result; /* 031 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder filter_holder; /* 032 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter filter_rowWriter; /* 033 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter filter_rowWriter1; /* 034 */ private org.apache.spark.sql.execution.metric.SQLMetric agg_numOutputRows; /* 035 */ private org.apache.spark.sql.execution.metric.SQLMetric agg_aggTime; /* 036 */ private UnsafeRow agg_result; /* 037 */ private org.apache.spark.sql.catalyst.expressions.codegen.BufferHolder agg_holder; /* 038 */ private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter agg_rowWriter; /* 039 */ /* 040 */ public GeneratedIterator(Object[] references) { /* 041 */ this.references = references; /* 042 */ } /* 043 */