This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 0d3093cc983 [SPARK-39029][PYTHON][TESTS] Improve the test coverage for pyspark/broadcast.py 0d3093cc983 is described below commit 0d3093cc983f8fe236cc99e546e0a154f696b3f9 Author: pralabhkumar <pralabhku...@gmail.com> AuthorDate: Wed May 4 09:14:43 2022 +0900 [SPARK-39029][PYTHON][TESTS] Improve the test coverage for pyspark/broadcast.py ### What changes were proposed in this pull request? This PR add test cases for broadcast.py ### Why are the changes needed? To cover corner test cases and increase coverage ### Does this PR introduce _any_ user-facing change? No - test only ### How was this patch tested? CI in this PR should test it out Closes #36432 from pralabhkumar/rk_test_broadcast. Lead-authored-by: pralabhkumar <pralabhku...@gmail.com> Co-authored-by: Kumar, Pralabh <pralk...@visa.com> Signed-off-by: Hyukjin Kwon <gurwls...@apache.org> --- python/pyspark/tests/test_broadcast.py | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/python/pyspark/tests/test_broadcast.py b/python/pyspark/tests/test_broadcast.py index 56763e8d80a..8185e812e66 100644 --- a/python/pyspark/tests/test_broadcast.py +++ b/python/pyspark/tests/test_broadcast.py @@ -15,12 +15,15 @@ # limitations under the License. # import os +import pickle import random import time import tempfile import unittest -from pyspark import SparkConf, SparkContext +from py4j.protocol import Py4JJavaError + +from pyspark import SparkConf, SparkContext, Broadcast from pyspark.java_gateway import launch_gateway from pyspark.serializers import ChunkedStream @@ -99,6 +102,30 @@ class BroadcastTest(unittest.TestCase): finally: b.destroy() + def test_broadcast_when_sc_none(self): + # SPARK-39029 : Test case to improve test coverage of broadcast.py + # It tests the case when SparkContext is none and Broadcast is called at executor + conf = SparkConf() + conf.setMaster("local-cluster[2,1,1024]") + self.sc = SparkContext(conf=conf) + bs = self.sc.broadcast([10]) + bs_sc_none = Broadcast(sc=None, path=bs._path) + self.assertEqual(bs_sc_none.value, [10]) + + def test_broadcast_for_error_condition(self): + # SPARK-39029: Test case to improve test coverage of broadcast.py + # It tests the case when broadcast should raise error . + conf = SparkConf() + conf.setMaster("local-cluster[2,1,1024]") + self.sc = SparkContext(conf=conf) + bs = self.sc.broadcast([1]) + with self.assertRaisesRegex(pickle.PickleError, "Could.*not.*serialize.*broadcast"): + self.sc.broadcast(self.sc) + with self.assertRaisesRegex(Py4JJavaError, "RuntimeError.*Broadcast.*destroyed.*driver"): + self.sc.parallelize([1]).map(lambda x: bs.destroy()).collect() + with self.assertRaisesRegex(Py4JJavaError, "RuntimeError.*Broadcast.*unpersisted.*driver"): + self.sc.parallelize([1]).map(lambda x: bs.unpersist()).collect() + class BroadcastFrameProtocolTest(unittest.TestCase): @classmethod --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org