This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new c4cd58cc30a Add config setting to control exposing stacktraces (#51509)
c4cd58cc30a is described below

commit c4cd58cc30aeb2ba0c6cb92a5f1d00d5c25bdada
Author: Zhen-Lun (Kevin) Hong <zhenlun.hon...@gmail.com>
AuthorDate: Tue Jun 10 22:33:01 2025 +0800

    Add config setting to control exposing stacktraces (#51509)
    
    * feat: add a config setting to expose stacktraces
    
    * fix: remove the starting newline to simplify the traceback info
    
    * test: update tests to align with the config change
    
    * feat: add exception id for better correlation between ui messages and log 
entries
    
    * test: update tests
    
    * fix: use random string as exception id instead of python object id
    
    * fix: update tests with patched random strings
    
    * test: use patch fixtures in tests to prevent side effects
---
 .../src/airflow/api_fastapi/common/exceptions.py   | 23 ++++++++++++++
 .../src/airflow/config_templates/config.yml        |  6 ++++
 .../unit/api_fastapi/common/test_exceptions.py     | 35 ++++++++++++++++++++--
 .../core_api/routes/public/test_connections.py     |  2 +-
 .../core_api/routes/public/test_dag_run.py         |  9 ++----
 .../core_api/routes/public/test_pools.py           |  2 +-
 6 files changed, 66 insertions(+), 11 deletions(-)

diff --git a/airflow-core/src/airflow/api_fastapi/common/exceptions.py 
b/airflow-core/src/airflow/api_fastapi/common/exceptions.py
index 061eec55d3d..39909b7a463 100644
--- a/airflow-core/src/airflow/api_fastapi/common/exceptions.py
+++ b/airflow-core/src/airflow/api_fastapi/common/exceptions.py
@@ -17,6 +17,8 @@
 
 from __future__ import annotations
 
+import logging
+import traceback
 from abc import ABC, abstractmethod
 from enum import Enum
 from typing import Generic, TypeVar
@@ -24,8 +26,13 @@ from typing import Generic, TypeVar
 from fastapi import HTTPException, Request, status
 from sqlalchemy.exc import IntegrityError
 
+from airflow.configuration import conf
+from airflow.utils.strings import get_random_string
+
 T = TypeVar("T", bound=Exception)
 
+log = logging.getLogger(__name__)
+
 
 class BaseErrorHandler(Generic[T], ABC):
     """Base class for error handlers."""
@@ -61,12 +68,28 @@ class 
_UniqueConstraintErrorHandler(BaseErrorHandler[IntegrityError]):
     def exception_handler(self, request: Request, exc: IntegrityError):
         """Handle IntegrityError exception."""
         if self._is_dialect_matched(exc):
+            exception_id = get_random_string()
+            stacktrace = ""
+            for tb in traceback.format_tb(exc.__traceback__):
+                stacktrace += tb
+
+            log_message = f"Error with id {exception_id}\n{stacktrace}"
+            log.error(log_message)
+            if conf.get("api", "expose_stacktrace") == "True":
+                message = log_message
+            else:
+                message = (
+                    "Serious error when handling your request. Check logs for 
more details - "
+                    f"you will find it in api server when you look for ID 
{exception_id}"
+                )
+
             raise HTTPException(
                 status_code=status.HTTP_409_CONFLICT,
                 detail={
                     "reason": "Unique constraint violation",
                     "statement": str(exc.statement),
                     "orig_error": str(exc.orig),
+                    "message": message,
                 },
             )
 
diff --git a/airflow-core/src/airflow/config_templates/config.yml 
b/airflow-core/src/airflow/config_templates/config.yml
index bda494eed5d..30c6037da4a 100644
--- a/airflow-core/src/airflow/config_templates/config.yml
+++ b/airflow-core/src/airflow/config_templates/config.yml
@@ -1321,6 +1321,12 @@ api:
       type: string
       example: ~
       default: "False"
+    expose_stacktrace:
+      description: Expose stacktrace in the web server
+      version_added: ~
+      type: string
+      example: ~
+      default: "False"
     base_url:
       description: |
         The base url of the API server. Airflow cannot guess what domain or 
CNAME you are using.
diff --git a/airflow-core/tests/unit/api_fastapi/common/test_exceptions.py 
b/airflow-core/tests/unit/api_fastapi/common/test_exceptions.py
index bed77a67a27..b5136310611 100644
--- a/airflow-core/tests/unit/api_fastapi/common/test_exceptions.py
+++ b/airflow-core/tests/unit/api_fastapi/common/test_exceptions.py
@@ -16,6 +16,8 @@
 # under the License.
 from __future__ import annotations
 
+from unittest.mock import patch
+
 import pytest
 from fastapi import HTTPException, status
 from sqlalchemy.exc import IntegrityError
@@ -26,6 +28,7 @@ from airflow.models import DagRun, Pool, Variable
 from airflow.utils.session import provide_session
 from airflow.utils.state import DagRunState
 
+from tests_common.test_utils.config import conf_vars
 from tests_common.test_utils.db import clear_db_connections, clear_db_dags, 
clear_db_pools, clear_db_runs
 
 pytestmark = pytest.mark.db_test
@@ -50,6 +53,11 @@ PYTEST_MARKS_DB_DIALECT = [
         "reason": f"Test for {_DatabaseDialect.POSTGRES.value} only",
     },
 ]
+MOCKED_ID = "TgVcT3QW"
+MESSAGE = (
+    "Serious error when handling your request. Check logs for more details - "
+    f"you will find it in api server when you look for ID {MOCKED_ID}"
+)
 
 
 def generate_test_cases_parametrize(
@@ -109,6 +117,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO slot_pool (pool, slots, 
description, include_deferred) VALUES (?, ?, ?, ?)",
                             "orig_error": "UNIQUE constraint failed: 
slot_pool.pool",
+                            "message": MESSAGE,
                         },
                     ),
                     HTTPException(
@@ -117,6 +126,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO slot_pool (pool, slots, 
description, include_deferred) VALUES (%s, %s, %s, %s)",
                             "orig_error": "(1062, \"Duplicate entry 
'test_pool' for key 'slot_pool.slot_pool_pool_uq'\")",
+                            "message": MESSAGE,
                         },
                     ),
                     HTTPException(
@@ -125,6 +135,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO slot_pool (pool, slots, 
description, include_deferred) VALUES (%(pool)s, %(slots)s, %(description)s, 
%(include_deferred)s) RETURNING slot_pool.id",
                             "orig_error": 'duplicate key value violates unique 
constraint "slot_pool_pool_uq"\nDETAIL:  Key (pool)=(test_pool) already 
exists.\n',
+                            "message": MESSAGE,
                         },
                     ),
                 ],
@@ -135,6 +146,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": 'INSERT INTO variable ("key", val, 
description, is_encrypted) VALUES (?, ?, ?, ?)',
                             "orig_error": "UNIQUE constraint failed: 
variable.key",
+                            "message": MESSAGE,
                         },
                     ),
                     HTTPException(
@@ -143,6 +155,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO variable (`key`, val, 
description, is_encrypted) VALUES (%s, %s, %s, %s)",
                             "orig_error": "(1062, \"Duplicate entry 'test_key' 
for key 'variable.variable_key_uq'\")",
+                            "message": MESSAGE,
                         },
                     ),
                     HTTPException(
@@ -151,14 +164,23 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO variable (key, val, 
description, is_encrypted) VALUES (%(key)s, %(val)s, %(description)s, 
%(is_encrypted)s) RETURNING variable.id",
                             "orig_error": 'duplicate key value violates unique 
constraint "variable_key_uq"\nDETAIL:  Key (key)=(test_key) already exists.\n',
+                            "message": MESSAGE,
                         },
                     ),
                 ],
             ],
         ),
     )
+    @patch("airflow.api_fastapi.common.exceptions.get_random_string", 
return_value=MOCKED_ID)
+    @conf_vars({("api", "expose_stacktrace"): "False"})
     @provide_session
-    def test_handle_single_column_unique_constraint_error(self, session, 
table, expected_exception) -> None:
+    def test_handle_single_column_unique_constraint_error(
+        self,
+        mock_get_random_string,
+        session,
+        table,
+        expected_exception,
+    ) -> None:
         # Take Pool and Variable tables as test cases
         if table == "Pool":
             session.add(Pool(pool=TEST_POOL, slots=1, description="test pool", 
include_deferred=False))
@@ -188,6 +210,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO dag_run (dag_id, 
queued_at, logical_date, start_date, end_date, state, run_id, creating_job_id, 
run_type, triggered_by, conf, data_interval_start, data_interval_end, 
run_after, last_scheduling_decision, log_template_id, updated_at, clear_number, 
backfill_id, bundle_version, scheduled_by_job_id, context_carrier, 
created_dag_version_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 
(SELECT max(log_template.id) AS max_1 \nFROM log [...]
                             "orig_error": "UNIQUE constraint failed: 
dag_run.dag_id, dag_run.run_id",
+                            "message": MESSAGE,
                         },
                     ),
                     HTTPException(
@@ -196,6 +219,7 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO dag_run (dag_id, 
queued_at, logical_date, start_date, end_date, state, run_id, creating_job_id, 
run_type, triggered_by, conf, data_interval_start, data_interval_end, 
run_after, last_scheduling_decision, log_template_id, updated_at, clear_number, 
backfill_id, bundle_version, scheduled_by_job_id, context_carrier, 
created_dag_version_id) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 
%s, %s, %s, (SELECT max(log_template.id) AS m [...]
                             "orig_error": "(1062, \"Duplicate entry 
'test_dag_id-test_run_id' for key 'dag_run.dag_run_dag_id_run_id_key'\")",
+                            "message": MESSAGE,
                         },
                     ),
                     HTTPException(
@@ -204,15 +228,22 @@ class TestUniqueConstraintErrorHandler:
                             "reason": "Unique constraint violation",
                             "statement": "INSERT INTO dag_run (dag_id, 
queued_at, logical_date, start_date, end_date, state, run_id, creating_job_id, 
run_type, triggered_by, conf, data_interval_start, data_interval_end, 
run_after, last_scheduling_decision, log_template_id, updated_at, clear_number, 
backfill_id, bundle_version, scheduled_by_job_id, context_carrier, 
created_dag_version_id) VALUES (%(dag_id)s, %(queued_at)s, %(logical_date)s, 
%(start_date)s, %(end_date)s, %(state)s, %(run_i [...]
                             "orig_error": 'duplicate key value violates unique 
constraint "dag_run_dag_id_run_id_key"\nDETAIL:  Key (dag_id, 
run_id)=(test_dag_id, test_run_id) already exists.\n',
+                            "message": MESSAGE,
                         },
                     ),
                 ],
             ],
         ),
     )
+    @patch("airflow.api_fastapi.common.exceptions.get_random_string", 
return_value=MOCKED_ID)
+    @conf_vars({("api", "expose_stacktrace"): "False"})
     @provide_session
     def test_handle_multiple_columns_unique_constraint_error(
-        self, session, table, expected_exception
+        self,
+        mock_get_random_string,
+        session,
+        table,
+        expected_exception,
     ) -> None:
         if table == "DagRun":
             session.add(
diff --git 
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py
 
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py
index 6ddf1889352..a4184e29e8d 100644
--- 
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py
+++ 
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_connections.py
@@ -306,7 +306,7 @@ class TestPostConnection(TestConnectionEndpoint):
         assert response.status_code == 409
         response_json = response.json()
         assert "detail" in response_json
-        assert list(response_json["detail"].keys()) == ["reason", "statement", 
"orig_error"]
+        assert list(response_json["detail"].keys()) == ["reason", "statement", 
"orig_error", "message"]
 
     @pytest.mark.enable_redact
     @pytest.mark.parametrize(
diff --git 
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py 
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py
index baa7e503c83..718e76784bf 100644
--- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py
+++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_dag_run.py
@@ -38,12 +38,7 @@ from airflow.utils.state import DagRunState, State
 from airflow.utils.types import DagRunTriggeredByType, DagRunType
 
 from tests_common.test_utils.api_fastapi import _check_dag_run_note, 
_check_last_log
-from tests_common.test_utils.db import (
-    clear_db_dags,
-    clear_db_logs,
-    clear_db_runs,
-    clear_db_serialized_dags,
-)
+from tests_common.test_utils.db import clear_db_dags, clear_db_logs, 
clear_db_runs, clear_db_serialized_dags
 from tests_common.test_utils.format_datetime import from_datetime_to_zulu, 
from_datetime_to_zulu_without_ms
 
 if TYPE_CHECKING:
@@ -1577,7 +1572,7 @@ class TestTriggerDagRun:
         assert response.status_code == 409
         response_json = response.json()
         assert "detail" in response_json
-        assert list(response_json["detail"].keys()) == ["reason", "statement", 
"orig_error"]
+        assert list(response_json["detail"].keys()) == ["reason", "statement", 
"orig_error", "message"]
 
     @pytest.mark.usefixtures("configure_git_connection_for_dag_bundle")
     def test_should_respond_200_with_null_logical_date(self, test_client):
diff --git 
a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_pools.py 
b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_pools.py
index 8f6ed1a7019..2d0700ea5d1 100644
--- a/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_pools.py
+++ b/airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_pools.py
@@ -417,7 +417,7 @@ class TestPostPool(TestPoolsEndpoint):
         else:
             response_json = response.json()
             assert "detail" in response_json
-            assert list(response_json["detail"].keys()) == ["reason", 
"statement", "orig_error"]
+            assert list(response_json["detail"].keys()) == ["reason", 
"statement", "orig_error", "message"]
 
         assert session.query(Pool).count() == n_pools + 1
 

Reply via email to