Michael Armbrust created SPARK-8022:
---------------------------------------

             Summary: df.select(df.columnName) no longer works
                 Key: SPARK-8022
                 URL: https://issues.apache.org/jira/browse/SPARK-8022
             Project: Spark
          Issue Type: Bug
          Components: SQL
    Affects Versions: 1.4.0
            Reporter: Michael Armbrust
            Assignee: Davies Liu
            Priority: Blocker


This seems to be a pretty serious regression:

{{{
df = sqlContext.load("/home/michael/spark.json", "json")
df.select(df.committerEmail)
}}}
{{{
---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-2-76c71a2870a4> in <module>()
----> 1 df.select(df.committerEmail)

/home/ubuntu/databricks/spark/python/pyspark/sql/dataframe.pyc in select(self, 
*cols)
    736         [Row(name=u'Alice', age=12), Row(name=u'Bob', age=15)]
    737         """
--> 738         jdf = self._jdf.select(self._jcols(*cols))
    739         return DataFrame(jdf, self.sql_ctx)
    740 

/home/ubuntu/databricks/spark/python/pyspark/sql/dataframe.pyc in _jcols(self, 
*cols)
    628         if len(cols) == 1 and isinstance(cols[0], list):
    629             cols = cols[0]
--> 630         return self._jseq(cols, _to_java_column)
    631 
    632     @since("1.3.1")

/home/ubuntu/databricks/spark/python/pyspark/sql/dataframe.pyc in _jseq(self, 
cols, converter)
    615     def _jseq(self, cols, converter=None):
    616         """Return a JVM Seq of Columns from a list of Column or names"""
--> 617         return _to_seq(self.sql_ctx._sc, cols, converter)
    618 
    619     def _jmap(self, jm):

/home/ubuntu/databricks/spark/python/pyspark/sql/column.pyc in _to_seq(sc, 
cols, converter)
     58     if converter:
     59         cols = [converter(c) for c in cols]
---> 60     return sc._jvm.PythonUtils.toSeq(cols)
     61 
     62 

/home/ubuntu/databricks/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/java_gateway.py
 in __call__(self, *args)
    527 
    528         args_command = ''.join(
--> 529                 [get_command_part(arg, self.pool) for arg in new_args])
    530 
    531         command = CALL_COMMAND_NAME +\

/home/ubuntu/databricks/spark/python/lib/py4j-0.8.2.1-src.zip/py4j/protocol.py 
in get_command_part(parameter, python_proxy_pool)
    263             command_part += ';' + interface
    264     else:
--> 265         command_part = REFERENCE_TYPE + parameter._get_object_id()
    266 
    267     command_part += '\n'

AttributeError: 'list' object has no attribute '_get_object_id'
}}}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to