This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch v2-10-test
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/v2-10-test by this push:
new 71261fe018b fix: log action get the correct request body (#45546)
(#45560)
71261fe018b is described below
commit 71261fe018b0095a2ec9e4053553d46fe87acd94
Author: Pierre Jeambrun <[email protected]>
AuthorDate: Mon Jan 13 19:46:53 2025 +0100
fix: log action get the correct request body (#45546) (#45560)
* fix: log action get the correct request body (#45546)
(cherry picked from commit bae4bb1d549f20a54a2e8c27c57377a0207f393b)
* Fix log_action decorator when content type is None (#45567)
(cherry picked from commit 46304d8d7e5e0d47e3829ae51401e8a4b9bfc4ae)
---------
Co-authored-by: luoyuliuyin <[email protected]>
Co-authored-by: Daniel Standish
<[email protected]>
---
airflow/www/decorators.py | 2 +-
.../endpoints/test_dag_run_endpoint.py | 56 ++++++++++++++++++++++
2 files changed, 57 insertions(+), 1 deletion(-)
diff --git a/airflow/www/decorators.py b/airflow/www/decorators.py
index 3eae5f62391..b7dc45515e2 100644
--- a/airflow/www/decorators.py
+++ b/airflow/www/decorators.py
@@ -95,7 +95,7 @@ def action_logging(func: T | None = None, event: str | None =
None) -> T | Calla
user_display = get_auth_manager().get_user_display_name()
isAPIRequest = request.blueprint == "/api/v1"
- hasJsonBody = request.headers.get("content-type") ==
"application/json" and request.json
+ hasJsonBody = "application/json" in
request.headers.get("content-type", "") and request.json
fields_skip_logging = {
"csrf_token",
diff --git a/tests/api_connexion/endpoints/test_dag_run_endpoint.py
b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
index dc77648784c..7b63aca840f 100644
--- a/tests/api_connexion/endpoints/test_dag_run_endpoint.py
+++ b/tests/api_connexion/endpoints/test_dag_run_endpoint.py
@@ -16,6 +16,7 @@
# under the License.
from __future__ import annotations
+import json
import urllib
from datetime import timedelta
from unittest import mock
@@ -25,6 +26,7 @@ import time_machine
from airflow.api_connexion.exceptions import EXCEPTIONS_LINK_MAP
from airflow.datasets import Dataset
+from airflow.models import Log
from airflow.models.dag import DAG, DagModel
from airflow.models.dagrun import DagRun
from airflow.models.dataset import DatasetEvent, DatasetModel
@@ -1729,6 +1731,60 @@ class TestPatchDagRunState(TestDagRunEndpoint):
"note": None,
}
+ @pytest.mark.parametrize("state", ["failed", "success", "queued"])
+ @pytest.mark.parametrize("run_type", [state.value for state in DagRunType])
+ def test_action_logging(self, state, run_type, dag_maker, session):
+ dag_id = "TEST_DAG_ID"
+ dag_run_id = "TEST_DAG_RUN_ID"
+ with dag_maker(dag_id) as dag:
+ task = EmptyOperator(task_id="task_id", dag=dag)
+ self.app.dag_bag.bag_dag(dag, root_dag=dag)
+ dr = dag_maker.create_dagrun(run_id=dag_run_id, run_type=run_type)
+ ti = dr.get_task_instance(task_id="task_id")
+ ti.task = task
+ ti.state = State.RUNNING
+ session.merge(ti)
+ session.commit()
+
+ request_json = {"state": state}
+
+ self.client.patch(
+ f"api/v1/dags/{dag_id}/dagRuns/{dag_run_id}",
+ json=request_json,
+ environ_overrides={"REMOTE_USER": "test"},
+ )
+
+ log = (
+ session.query(Log)
+ .filter(
+ Log.dag_id == dag_id,
+ Log.run_id == dag_run_id,
+ Log.event == "api.update_dag_run_state",
+ )
+ .order_by(Log.id.desc())
+ .first()
+ )
+ assert log.extra == json.dumps(request_json)
+
+ self.client.patch(
+ f"api/v1/dags/{dag_id}/dagRuns/{dag_run_id}",
+ json=request_json,
+ environ_overrides={"REMOTE_USER": "test"},
+ headers={"content-type": "application/json; charset=utf-8"},
+ )
+
+ log = (
+ session.query(Log)
+ .filter(
+ Log.dag_id == dag_id,
+ Log.run_id == dag_run_id,
+ Log.event == "api.update_dag_run_state",
+ )
+ .order_by(Log.id.desc())
+ .first()
+ )
+ assert log.extra == json.dumps(request_json)
+
def test_schema_validation_error_raises(self, dag_maker, session):
dag_id = "TEST_DAG_ID"
dag_run_id = "TEST_DAG_RUN_ID"