softyoungha commented on code in PR #40633:
URL: https://github.com/apache/airflow/pull/40633#discussion_r1676157541


##########
airflow/providers/apache/spark/decorators/pyspark_submit.py:
##########
@@ -0,0 +1,213 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import inspect
+import os
+import pickle
+import warnings
+from tempfile import TemporaryDirectory
+from textwrap import dedent
+from typing import TYPE_CHECKING, Callable, Sequence
+
+import dill
+
+from airflow.configuration import conf
+from airflow.decorators.base import DecoratedOperator, TaskDecorator, 
task_decorator_factory
+from airflow.exceptions import AirflowException
+from airflow.providers.apache.spark.decorators.pyspark import 
SPARK_CONTEXT_KEYS
+from airflow.providers.apache.spark.operators.spark_submit import 
SparkSubmitOperator
+from airflow.utils.python_virtualenv import write_python_script
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+INPUT_FILENAME = "SCRIPT__GENERATED__AIRFLOW.IN"
+
+
+class _PysparkSubmitDecoratedOperator(DecoratedOperator, SparkSubmitOperator):
+    custom_operator_name = "@task.pyspark_submit"
+
+    template_fields: Sequence[str] = (
+        "conf",
+        "files",
+        "py_files",
+        "jars",
+        "driver_class_path",
+        "packages",
+        "exclude_packages",
+        "keytab",
+        "principal",
+        "proxy_user",
+        "name",
+        "env_vars",
+        "properties_file",
+        "op_args",
+        "op_kwargs",
+    )
+
+    def __init__(
+        self,
+        python_callable: Callable,
+        op_args: Sequence | None = None,
+        op_kwargs: dict | None = None,
+        use_dill: bool = False,
+        expect_airflow: bool = False,
+        **kwargs,
+    ):
+        self.use_dill = use_dill
+        self.expect_airflow = expect_airflow
+
+        signature = inspect.signature(python_callable)
+        parameters = [
+            param.replace(default=None) if param.name in SPARK_CONTEXT_KEYS 
else param
+            for param in signature.parameters.values()
+        ]
+        # mypy does not understand __signature__ attribute
+        # see https://github.com/python/mypy/issues/12472
+        python_callable.__signature__ = 
signature.replace(parameters=parameters)  # type: ignore[attr-defined]
+
+        if kwargs.get("application"):
+            if not conf.getboolean("operators", "ALLOW_ILLEGAL_ARGUMENTS"):
+                raise AirflowException(
+                    "Invalid argument 'application' were passed to 
`@task.pyspark_submit`."
+                )
+            warnings.warn(
+                "Invalid argument 'application' were passed to 
@task.pyspark_submit.",
+                UserWarning,
+                stacklevel=2,
+            )
+        if kwargs.get("application_args"):
+            if not conf.getboolean("operators", "ALLOW_ILLEGAL_ARGUMENTS"):
+                raise AirflowException(
+                    "Invalid argument 'application_args' were passed to 
`@task.pyspark_submit`."
+                )
+            warnings.warn(
+                "Invalid argument 'application_args' were passed to 
`@task.pyspark_submit`.",
+                UserWarning,
+                stacklevel=2,
+            )
+        for key in SPARK_CONTEXT_KEYS:
+            if key in kwargs:
+                if not conf.getboolean("operators", "ALLOW_ILLEGAL_ARGUMENTS"):
+                    raise AirflowException(
+                        f"Invalid key '{key}' in op_kwargs. You don't need to 
set it because it's a "
+                        "variable that will be automatically set within the 
Python process of the Spark "
+                        "job submitted via spark-submit."
+                    )
+                warnings.warn(

Review Comment:
   Oh, I made a mistake: instead of writing `if key in kwargs`, `if key in 
op_kwargs`
   
   The part you mentioned was considered with the following cases
   ```python
   @task.pyspark_submit(
       task_id="my-task",
       ...,
   )
   def pyspark_job(sc: SparkContext, spark: SparkSession):
       ...
   
   # wrong argument case, but it works
   pyspark_job(sc=1, spark=3)
   # sc/spark will be injected by SparkContext/SparkSession internally during 
spark-submit
   ```
   
   
https://github.com/apache/airflow/blob/7807eca3266b06463ac422ded6771702db0cfa4c/airflow/providers/apache/spark/decorators/pyspark_submit.py#L173-L185
   In `get_pyspark_source`, op_args and op_kwargs are read first, then spark 
and sc are injected. I thought it was necessary to inform the user that even if 
they inject different values into sc and spark, they will still be internally 
assigned as SparkSession and SparkContext.
   `conf.getboolean('operators', 'ALLOW_ILLEGAL_ARGUMENTS')` determines whether 
to raise a warning or an exception.
   Is this option be related to this series of processes?
   



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to