kaxil commented on code in PR #62645:
URL: https://github.com/apache/airflow/pull/62645#discussion_r2980132411


##########
task-sdk/src/airflow/sdk/execution_time/callback_supervisor.py:
##########
@@ -0,0 +1,344 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Supervised execution of callback workloads."""
+
+from __future__ import annotations
+
+import os
+import time
+from importlib import import_module
+from typing import TYPE_CHECKING, BinaryIO, ClassVar, Protocol
+
+import attrs
+import structlog
+from pydantic import TypeAdapter
+
+from airflow.sdk.execution_time.supervisor import (
+    MIN_HEARTBEAT_INTERVAL,
+    SOCKET_CLEANUP_TIMEOUT,
+    WatchedSubprocess,
+)
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from structlog.typing import FilteringBoundLogger
+    from typing_extensions import Self
+
+    # Core (airflow.executors.workloads.base.BundleInfo) and SDK 
(airflow.sdk.api.datamodels._generated.BundleInfo)
+    # are structurally identical, but MyPy treats them as different types. 
This Protocol makes MyPy happy.
+    class _BundleInfoLike(Protocol):
+        name: str
+        version: str | None
+
+
+__all__ = ["CallbackSubprocess", "supervise_callback"]
+
+log: FilteringBoundLogger = 
structlog.get_logger(logger_name="callback_supervisor")
+
+
+def execute_callback(
+    callback_path: str,
+    callback_kwargs: dict,
+    log,
+) -> tuple[bool, str | None]:
+    """
+    Execute a callback function by importing and calling it, returning the 
success state.
+
+    Supports two patterns:
+    1. Functions - called directly with kwargs
+    2. Classes that return callable instances (like BaseNotifier) - 
instantiated then called with context
+
+    Example:
+        # Function callback
+        execute_callback("my_module.alert_func", {"msg": "Alert!", "context": 
{...}}, log)
+
+        # Notifier callback
+        execute_callback("airflow.providers.slack...SlackWebhookNotifier", 
{"text": "Alert!"}, log)
+
+    :param callback_path: Dot-separated import path to the callback function 
or class.
+    :param callback_kwargs: Keyword arguments to pass to the callback.
+    :param log: Logger instance for recording execution.
+    :return: Tuple of (success: bool, error_message: str | None)
+    """
+    from airflow.sdk._shared.module_loading import accepts_context
+
+    if not callback_path:
+        return False, "Callback path not found."
+
+    try:
+        # Import the callback callable
+        # Expected format: "module.path.to.function_or_class"
+        module_path, function_name = callback_path.rsplit(".", 1)
+        module = import_module(module_path)
+        callback_callable = getattr(module, function_name)
+
+        log.debug("Executing callback %s(%s)...", callback_path, 
callback_kwargs)
+
+        # If the callback is a callable, call it.  If it is a class, 
instantiate it.
+        result = callback_callable(**callback_kwargs)
+
+        # If the callback is a class then it is now instantiated and callable, 
call it.
+        if callable(result):
+            context = callback_kwargs.get("context", {}) if 
accepts_context(result) else {}

Review Comment:
   For the callable-class pattern (like `BaseNotifier`): the old code called 
`result(context)` unconditionally when the result was callable. The new code 
calls `result(context)` only when `accepts_context(result)` returns `True`, and 
passes an empty dict otherwise. If a notifier's `__call__` expects context as a 
positional arg and `accepts_context` returns `False` (e.g. because it only has 
`*args`), the behavior changes.



##########
task-sdk/src/airflow/sdk/execution_time/callback_supervisor.py:
##########
@@ -0,0 +1,344 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Supervised execution of callback workloads."""
+
+from __future__ import annotations
+
+import os
+import time
+from importlib import import_module
+from typing import TYPE_CHECKING, BinaryIO, ClassVar, Protocol
+
+import attrs
+import structlog
+from pydantic import TypeAdapter
+
+from airflow.sdk.execution_time.supervisor import (
+    MIN_HEARTBEAT_INTERVAL,
+    SOCKET_CLEANUP_TIMEOUT,
+    WatchedSubprocess,
+)
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from structlog.typing import FilteringBoundLogger
+    from typing_extensions import Self
+
+    # Core (airflow.executors.workloads.base.BundleInfo) and SDK 
(airflow.sdk.api.datamodels._generated.BundleInfo)
+    # are structurally identical, but MyPy treats them as different types. 
This Protocol makes MyPy happy.
+    class _BundleInfoLike(Protocol):
+        name: str
+        version: str | None
+
+
+__all__ = ["CallbackSubprocess", "supervise_callback"]
+
+log: FilteringBoundLogger = 
structlog.get_logger(logger_name="callback_supervisor")
+
+
+def execute_callback(
+    callback_path: str,
+    callback_kwargs: dict,
+    log,
+) -> tuple[bool, str | None]:
+    """
+    Execute a callback function by importing and calling it, returning the 
success state.
+
+    Supports two patterns:
+    1. Functions - called directly with kwargs
+    2. Classes that return callable instances (like BaseNotifier) - 
instantiated then called with context
+
+    Example:
+        # Function callback
+        execute_callback("my_module.alert_func", {"msg": "Alert!", "context": 
{...}}, log)
+
+        # Notifier callback
+        execute_callback("airflow.providers.slack...SlackWebhookNotifier", 
{"text": "Alert!"}, log)
+
+    :param callback_path: Dot-separated import path to the callback function 
or class.
+    :param callback_kwargs: Keyword arguments to pass to the callback.
+    :param log: Logger instance for recording execution.
+    :return: Tuple of (success: bool, error_message: str | None)
+    """
+    from airflow.sdk._shared.module_loading import accepts_context
+
+    if not callback_path:
+        return False, "Callback path not found."
+
+    try:
+        # Import the callback callable
+        # Expected format: "module.path.to.function_or_class"
+        module_path, function_name = callback_path.rsplit(".", 1)
+        module = import_module(module_path)
+        callback_callable = getattr(module, function_name)
+
+        log.debug("Executing callback %s(%s)...", callback_path, 
callback_kwargs)
+
+        # If the callback is a callable, call it.  If it is a class, 
instantiate it.
+        result = callback_callable(**callback_kwargs)
+
+        # If the callback is a class then it is now instantiated and callable, 
call it.
+        if callable(result):
+            context = callback_kwargs.get("context", {}) if 
accepts_context(result) else {}
+            log.debug("Calling result with context for %s", callback_path)
+            result = result(context)
+
+        log.info("Callback %s executed successfully.", callback_path)
+        return True, None
+
+    except Exception as e:
+        error_msg = f"Callback execution failed: {type(e).__name__}: {str(e)}"
+        log.exception("Callback %s(%s) execution failed: %s", callback_path, 
callback_kwargs, error_msg)
+        return False, error_msg
+
+
+def _callback_subprocess_main():
+    """
+    Entry point for the callback subprocess, runs after fork.
+
+    Reads the callback path and kwargs from environment variables,
+    executes the callback, and exits with an appropriate code.
+    """
+    import json
+    import sys
+
+    log = structlog.get_logger(logger_name="callback_runner")
+
+    callback_path = os.environ.get("_AIRFLOW_CALLBACK_PATH", "")
+    callback_kwargs_json = os.environ.get("_AIRFLOW_CALLBACK_KWARGS", "{}")
+
+    if not callback_path:
+        print("No callback path found in environment", file=sys.stderr)
+        sys.exit(1)
+
+    try:
+        callback_kwargs = json.loads(callback_kwargs_json)
+    except Exception:
+        log.exception("Failed to deserialize callback kwargs")
+        sys.exit(1)
+
+    success, error_msg = execute_callback(callback_path, callback_kwargs, log)
+    if not success:
+        log.error("Callback failed", error=error_msg)
+        sys.exit(1)
+
+
+# An empty message set; the callback subprocess doesn't currently communicate 
back to the
+# supervisor. This means callback code cannot access runtime services like 
Connection.get()
+# or Variable.get() which require the supervisor to pass requests to the API 
server.
+# To enable this, add the needed message types here and implement 
_handle_request accordingly.
+# See ActivitySubprocess.decoder in supervisor.py for the full task message 
set and examples.
+_EmptyMessage: TypeAdapter[None] = TypeAdapter(None)
+
+
[email protected](kw_only=True)
+class CallbackSubprocess(WatchedSubprocess):
+    """
+    Supervised subprocess for executing callbacks.
+
+    Uses the WatchedSubprocess infrastructure for fork/monitor/signal handling
+    while keeping a simple lifecycle: start, run callback, exit.
+    """
+
+    decoder: ClassVar[TypeAdapter] = _EmptyMessage
+
+    @classmethod
+    def start(  # type: ignore[override]
+        cls,
+        *,
+        id: str,
+        callback_path: str,
+        callback_kwargs: dict,
+        target: Callable[[], None] = _callback_subprocess_main,
+        logger: FilteringBoundLogger | None = None,
+        **kwargs,
+    ) -> Self:
+        """Fork and start a new subprocess to execute the given callback."""
+        import json
+        from datetime import date, datetime
+        from uuid import UUID
+
+        class _ExtendedEncoder(json.JSONEncoder):
+            """Handle types that stdlib json can't serialize (UUID, datetime, 
etc.)."""
+
+            def default(self, o):
+                if isinstance(o, UUID):
+                    return str(o)
+                if isinstance(o, datetime):
+                    return o.isoformat()
+                if isinstance(o, date):
+                    return o.isoformat()
+                if hasattr(o, "__str__"):

Review Comment:
   `hasattr(o, "__str__")` is always `True` because every Python object 
inherits `__str__` from `object`. This means `super().default(o)` on the next 
line is dead code, and any unserializable type in `callback_kwargs` will be 
silently stringified instead of raising `TypeError`. This could mask bugs where 
bad data sneaks through.



##########
airflow-core/src/airflow/executors/local_executor.py:
##########
@@ -99,73 +110,32 @@ def _run_worker(
         with unread_messages:
             unread_messages.value -= 1
 
-        # Handle different workload types
-        if isinstance(workload, workloads.ExecuteTask):
-            try:
-                _execute_work(log, workload, team_conf)
-                output.put((workload.ti.key, TaskInstanceState.SUCCESS, None))
-            except Exception as e:
-                log.exception("Task execution failed.")
-                output.put((workload.ti.key, TaskInstanceState.FAILED, e))
-
-        elif isinstance(workload, workloads.ExecuteCallback):
-            output.put((workload.callback.id, CallbackState.RUNNING, None))
-            try:
-                _execute_callback(log, workload, team_conf)
-                output.put((workload.callback.id, CallbackState.SUCCESS, None))
-            except Exception as e:
-                log.exception("Callback execution failed")
-                output.put((workload.callback.id, CallbackState.FAILED, e))
-
-        else:
-            raise ValueError(f"LocalExecutor does not know how to handle 
{type(workload)}")
-
-
-def _execute_work(log: Logger, workload: workloads.ExecuteTask, team_conf) -> 
None:
-    """
-    Execute command received and stores result state in queue.
-
-    :param log: Logger instance
-    :param workload: The workload to execute
-    :param team_conf: Team-specific executor configuration
-    """
-    from airflow.sdk.execution_time.supervisor import supervise
-
-    setproctitle(f"{_get_executor_process_title_prefix(team_conf.team_name)} 
{workload.ti.id}", log)
-
-    base_url = team_conf.get("api", "base_url", fallback="/")
-    # If it's a relative URL, use localhost:8080 as the default
-    if base_url.startswith("/"):
-        base_url = f"http://localhost:8080{base_url}";
-    default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
-
-    # This will return the exit code of the task process, but we don't care 
about that, just if the
-    # _supervisor_ had an error reporting the state back (which will result in 
an exception.)
-    supervise(
-        # This is the "wrong" ti type, but it duck types the same. TODO: 
Create a protocol for this.
-        ti=workload.ti,  # type: ignore[arg-type]
-        dag_rel_path=workload.dag_rel_path,
-        bundle_info=workload.bundle_info,
-        token=workload.token,
-        server=team_conf.get("core", "execution_api_server_url", 
fallback=default_execution_api_server),
-        log_path=workload.log_path,
-    )
+        try:
+            _execute_workload(log, workload, team_conf)
+            output.put((workload.key, workload.success_state, None))

Review Comment:
   The old `_run_worker` sent `output.put((workload.callback.id, 
CallbackState.RUNNING, None))` before executing a callback. The new generic 
path only reports `success_state` or `failure_state`. The scheduler at 
`scheduler_job_runner.py:1214` explicitly handles `CallbackState.RUNNING` to 
update the database state. With this change, callbacks jump from `QUEUED` 
directly to `SUCCESS`/`FAILED`.
   
   Was dropping the `RUNNING` state transition intentional? If so, the 
scheduler handler for `CallbackState.RUNNING` is now dead code.



##########
task-sdk/src/airflow/sdk/execution_time/callback_supervisor.py:
##########
@@ -0,0 +1,344 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Supervised execution of callback workloads."""
+
+from __future__ import annotations
+
+import os
+import time
+from importlib import import_module
+from typing import TYPE_CHECKING, BinaryIO, ClassVar, Protocol
+
+import attrs
+import structlog
+from pydantic import TypeAdapter
+
+from airflow.sdk.execution_time.supervisor import (
+    MIN_HEARTBEAT_INTERVAL,
+    SOCKET_CLEANUP_TIMEOUT,
+    WatchedSubprocess,
+)
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+    from structlog.typing import FilteringBoundLogger
+    from typing_extensions import Self
+
+    # Core (airflow.executors.workloads.base.BundleInfo) and SDK 
(airflow.sdk.api.datamodels._generated.BundleInfo)
+    # are structurally identical, but MyPy treats them as different types. 
This Protocol makes MyPy happy.
+    class _BundleInfoLike(Protocol):
+        name: str
+        version: str | None
+
+
+__all__ = ["CallbackSubprocess", "supervise_callback"]
+
+log: FilteringBoundLogger = 
structlog.get_logger(logger_name="callback_supervisor")
+
+
+def execute_callback(
+    callback_path: str,
+    callback_kwargs: dict,
+    log,
+) -> tuple[bool, str | None]:
+    """
+    Execute a callback function by importing and calling it, returning the 
success state.
+
+    Supports two patterns:
+    1. Functions - called directly with kwargs
+    2. Classes that return callable instances (like BaseNotifier) - 
instantiated then called with context
+
+    Example:
+        # Function callback
+        execute_callback("my_module.alert_func", {"msg": "Alert!", "context": 
{...}}, log)
+
+        # Notifier callback
+        execute_callback("airflow.providers.slack...SlackWebhookNotifier", 
{"text": "Alert!"}, log)
+
+    :param callback_path: Dot-separated import path to the callback function 
or class.
+    :param callback_kwargs: Keyword arguments to pass to the callback.
+    :param log: Logger instance for recording execution.
+    :return: Tuple of (success: bool, error_message: str | None)
+    """
+    from airflow.sdk._shared.module_loading import accepts_context
+
+    if not callback_path:
+        return False, "Callback path not found."
+
+    try:
+        # Import the callback callable
+        # Expected format: "module.path.to.function_or_class"
+        module_path, function_name = callback_path.rsplit(".", 1)
+        module = import_module(module_path)
+        callback_callable = getattr(module, function_name)
+
+        log.debug("Executing callback %s(%s)...", callback_path, 
callback_kwargs)
+
+        # If the callback is a callable, call it.  If it is a class, 
instantiate it.
+        result = callback_callable(**callback_kwargs)
+
+        # If the callback is a class then it is now instantiated and callable, 
call it.
+        if callable(result):
+            context = callback_kwargs.get("context", {}) if 
accepts_context(result) else {}
+            log.debug("Calling result with context for %s", callback_path)
+            result = result(context)
+
+        log.info("Callback %s executed successfully.", callback_path)
+        return True, None
+
+    except Exception as e:
+        error_msg = f"Callback execution failed: {type(e).__name__}: {str(e)}"
+        log.exception("Callback %s(%s) execution failed: %s", callback_path, 
callback_kwargs, error_msg)
+        return False, error_msg
+
+
+def _callback_subprocess_main():
+    """
+    Entry point for the callback subprocess, runs after fork.
+
+    Reads the callback path and kwargs from environment variables,
+    executes the callback, and exits with an appropriate code.
+    """
+    import json
+    import sys
+
+    log = structlog.get_logger(logger_name="callback_runner")
+
+    callback_path = os.environ.get("_AIRFLOW_CALLBACK_PATH", "")
+    callback_kwargs_json = os.environ.get("_AIRFLOW_CALLBACK_KWARGS", "{}")
+
+    if not callback_path:
+        print("No callback path found in environment", file=sys.stderr)
+        sys.exit(1)
+
+    try:
+        callback_kwargs = json.loads(callback_kwargs_json)
+    except Exception:
+        log.exception("Failed to deserialize callback kwargs")
+        sys.exit(1)
+
+    success, error_msg = execute_callback(callback_path, callback_kwargs, log)
+    if not success:
+        log.error("Callback failed", error=error_msg)
+        sys.exit(1)
+
+
+# An empty message set; the callback subprocess doesn't currently communicate 
back to the
+# supervisor. This means callback code cannot access runtime services like 
Connection.get()
+# or Variable.get() which require the supervisor to pass requests to the API 
server.
+# To enable this, add the needed message types here and implement 
_handle_request accordingly.
+# See ActivitySubprocess.decoder in supervisor.py for the full task message 
set and examples.
+_EmptyMessage: TypeAdapter[None] = TypeAdapter(None)
+
+
[email protected](kw_only=True)
+class CallbackSubprocess(WatchedSubprocess):
+    """
+    Supervised subprocess for executing callbacks.
+
+    Uses the WatchedSubprocess infrastructure for fork/monitor/signal handling
+    while keeping a simple lifecycle: start, run callback, exit.
+    """
+
+    decoder: ClassVar[TypeAdapter] = _EmptyMessage
+
+    @classmethod
+    def start(  # type: ignore[override]
+        cls,
+        *,
+        id: str,
+        callback_path: str,
+        callback_kwargs: dict,
+        target: Callable[[], None] = _callback_subprocess_main,
+        logger: FilteringBoundLogger | None = None,
+        **kwargs,
+    ) -> Self:
+        """Fork and start a new subprocess to execute the given callback."""
+        import json
+        from datetime import date, datetime
+        from uuid import UUID
+
+        class _ExtendedEncoder(json.JSONEncoder):
+            """Handle types that stdlib json can't serialize (UUID, datetime, 
etc.)."""
+
+            def default(self, o):
+                if isinstance(o, UUID):
+                    return str(o)
+                if isinstance(o, datetime):
+                    return o.isoformat()
+                if isinstance(o, date):
+                    return o.isoformat()
+                if hasattr(o, "__str__"):
+                    return str(o)
+                return super().default(o)
+
+        # Pass the callback data to the child process via environment 
variables.
+        # These are set before fork so the child inherits them, and cleaned up 
in the parent after.
+        os.environ["_AIRFLOW_CALLBACK_PATH"] = callback_path
+        os.environ["_AIRFLOW_CALLBACK_KWARGS"] = json.dumps(callback_kwargs, 
cls=_ExtendedEncoder)

Review Comment:
   Passing callback data via `os.environ` before fork works, but environment 
variables have size limits on some platforms (Linux `MAX_ARG_STRLEN` = 131072 
bytes). Large context dicts in `callback_kwargs` could hit this. The parent 
also has the env vars visible to all threads between set and cleanup.
   
   Would passing the data over the socket pair (like `ActivitySubprocess` sends 
`StartupDetails`) be more robust here?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to