zero323 commented on a change in pull request #34466:
URL: https://github.com/apache/spark/pull/34466#discussion_r753822162



##########
File path: python/pyspark/context.py
##########
@@ -244,26 +269,32 @@ def _do_init(
         self.environment["PYTHONHASHSEED"] = os.environ.get("PYTHONHASHSEED", 
"0")
 
         # Create the Java SparkContext through Py4J
-        self._jsc = jsc or self._initialize_context(self._conf._jconf)
+        self._jsc = jsc or self._initialize_context(self._conf._jconf)  # 
type: ignore[attr-defined]
         # Reset the SparkConf to the one actually used by the SparkContext in 
JVM.
         self._conf = SparkConf(_jconf=self._jsc.sc().conf())
 
         # Create a single Accumulator in Java that we'll send all our updates 
through;
         # they will be passed back to us through a TCP server
-        auth_token = self._gateway.gateway_parameters.auth_token
-        self._accumulatorServer = accumulators._start_update_server(auth_token)
+        auth_token = cast(JVMView, self._gateway).gateway_parameters.auth_token
+        self._accumulatorServer = accumulators._start_update_server(  # type: 
ignore[attr-defined]
+            auth_token
+        )  # type: ignore[attr-defined]

Review comment:
       Nit: We should need only one of these ignores.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to