HyukjinKwon commented on a change in pull request #27169: [SPARK-30485][SQL] 
Remove SQL configs deprecated in Spark 2.1 and 2.3
URL: https://github.com/apache/spark/pull/27169#discussion_r365631834
 
 

 ##########
 File path: python/pyspark/sql/tests/test_pandas_udf_scalar.py
 ##########
 @@ -724,49 +724,6 @@ def iter_check_records_per_batch(it):
                 for (r,) in result:
                     self.assertTrue(r <= 3)
 
-    def test_vectorized_udf_timestamps_respect_session_timezone(self):
-        schema = StructType([
-            StructField("idx", LongType(), True),
-            StructField("timestamp", TimestampType(), True)])
-        data = [(1, datetime(1969, 1, 1, 1, 1, 1)),
-                (2, datetime(2012, 2, 2, 2, 2, 2)),
-                (3, None),
-                (4, datetime(2100, 3, 3, 3, 3, 3))]
-        df = self.spark.createDataFrame(data, schema=schema)
-
-        scalar_internal_value = pandas_udf(
-            lambda ts: ts.apply(lambda ts: ts.value if ts is not pd.NaT else 
None), LongType())
-
-        @pandas_udf(LongType(), PandasUDFType.SCALAR_ITER)
-        def iter_internal_value(it):
-            for ts in it:
-                yield ts.apply(lambda ts: ts.value if ts is not pd.NaT else 
None)
-
-        for internal_value, udf_type in [(scalar_internal_value, 
PandasUDFType.SCALAR),
-                                         (iter_internal_value, 
PandasUDFType.SCALAR_ITER)]:
-            f_timestamp_copy = pandas_udf(lambda ts: ts, TimestampType(), 
udf_type)
-            timezone = "America/New_York"
-            with self.sql_conf({
 
 Review comment:
   This, too.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to