Github user HyukjinKwon commented on a diff in the pull request: https://github.com/apache/spark/pull/19646#discussion_r148771775 --- Diff: python/pyspark/sql/tests.py --- @@ -2592,6 +2592,16 @@ def test_create_dataframe_from_array_of_long(self): df = self.spark.createDataFrame(data) self.assertEqual(df.first(), Row(longarray=[-9223372036854775808, 0, 9223372036854775807])) + @unittest.skipIf(not _have_pandas, "Pandas not installed") + def test_create_dataframe_from_pandas_with_timestamp(self): + import pandas as pd + from datetime import datetime + pdf = pd.DataFrame({"ts": [datetime(2017, 10, 31, 1, 1, 1)], + "d": [pd.Timestamp.now().date()]}) + df = self.spark.createDataFrame(pdf) --- End diff -- I was checking this PR and was ran this for my curiosity. I got: ```python import pandas as pd from datetime import datetime pdf = pd.DataFrame({"ts": [datetime(2017, 10, 31, 1, 1, 1)], "d": [pd.Timestamp.now().date()]}) spark.createDataFrame(pdf, "d date, ts timestamp") ``` ``` Traceback (most recent call last): File "<stdin>", line 1, in <module> File "/.../spark/python/pyspark/sql/session.py", line 587, in createDataFrame rdd, schema = self._createFromLocal(map(prepare, data), schema) File "/.../spark/python/pyspark/sql/session.py", line 401, in _createFromLocal data = list(data) File "/.../spark/python/pyspark/sql/session.py", line 567, in prepare verify_func(obj) File "/.../spark/python/pyspark/sql/types.py", line 1411, in verify verify_value(obj) File "/.../spark/python/pyspark/sql/types.py", line 1392, in verify_struct verifier(v) File "/.../spark/python/pyspark/sql/types.py", line 1411, in verify verify_value(obj) File "/.../spark/python/pyspark/sql/types.py", line 1405, in verify_default verify_acceptable_types(obj) File "/.../spark/python/pyspark/sql/types.py", line 1300, in verify_acceptable_types % (dataType, obj, type(obj)))) TypeError: field ts: TimestampType can not accept object 1509411661000000000L in type <type 'long'> ```
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org