jscheffl commented on code in PR #38788:
URL: https://github.com/apache/airflow/pull/38788#discussion_r1554638601


##########
tests/models/test_taskinstance.py:
##########
@@ -1249,16 +1255,16 @@ def test_depends_on_past(self, dag_maker):
             pytest.param(
                 "all_done_setup_success",
                 2,
-                _UpstreamTIStates(6, 0, 1, 0, 0, 7, 1, 0),
+                [SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, FAILED],
                 True,
                 (True, None),  # is_teardown=True, expect_state=None
                 True,
                 id="is teardown one setup failed one setup success",
             ),
             pytest.param(
-                "all_done_setup_success",
+                "all_done",
                 2,
-                _UpstreamTIStates(6, 0, 1, 0, 0, 7, 1, 0),
+                [SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, FAILED],

Review Comment:
   This pytest fails because it seems the dependency context is returned empty 
after evaluation. I am not sure why. I assume something is not "correctly 
mocked".



##########
tests/models/test_taskinstance.py:
##########
@@ -1294,48 +1300,56 @@ def test_depends_on_past(self, dag_maker):
             pytest.param(
                 "all_done_setup_success",
                 1,
-                _UpstreamTIStates(3, 0, 1, 0, 0, 4, 1, 0),
+                [SUCCESS, SUCCESS, SUCCESS, FAILED, None, SUCCESS],
                 True,
                 (True, None),  # is_teardown=True, expect_state=None
                 False,
                 id="is teardown not all done one failed",
             ),
             pytest.param(
-                "all_done_setup_success",
+                "all_done",
                 1,
-                _UpstreamTIStates(3, 0, 1, 0, 0, 4, 1, 0),
+                [SUCCESS, SUCCESS, SUCCESS, FAILED, None, SUCCESS],
                 True,
-                (False, "upstream_failed"),  # is_teardown=False, 
expect_state="upstream_failed"
+                (False, None),  # is_teardown=False, expect_state=None (as one 
task pending)
                 False,
                 id="not teardown not all done one failed",
             ),
             pytest.param(
                 "all_done_setup_success",
                 1,
-                _UpstreamTIStates(3, 1, 0, 0, 0, 4, 1, 0),
+                [SUCCESS, SUCCESS, SUCCESS, SKIPPED, None, SUCCESS],
                 True,
                 (True, None),  # is_teardown=True, expect_state=None
                 False,
-                id="not all done one skipped",
+                id="is teardown not all done one skipped",
             ),
             pytest.param(
-                "all_done_setup_success",
+                "all_done",
                 1,
-                _UpstreamTIStates(3, 1, 0, 0, 0, 4, 1, 0),
+                [SUCCESS, SUCCESS, SUCCESS, SKIPPED, None, SUCCESS],
                 True,
-                (False, "skipped"),  # is_teardown=False, 
expect_state="skipped'
+                (False, None),  # is_teardown=False, expect_state=None
                 False,
-                id="not all done one skipped",
+                id="not teardown not all done one skipped",
+            ),
+            pytest.param(
+                "all_done",
+                1,
+                [SUCCESS, SUCCESS, SUCCESS, SUCCESS, SKIPPED, SUCCESS],
+                True,
+                (False, "skipped"),  # is_teardown=False, 
expect_state="skipped'
+                True,
+                id="not teardown all done one skipped",

Review Comment:
   In this test I would have expected a skipped result but it is returning with 
`None`after evaluation. I assume some mocks are wrong here :-(



##########
tests/models/test_taskinstance.py:
##########
@@ -1267,25 +1273,25 @@ def test_depends_on_past(self, dag_maker):
             pytest.param(
                 "all_done_setup_success",
                 2,
-                _UpstreamTIStates(6, 1, 0, 0, 0, 7, 1, 1),
+                [SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, 
SKIPPED],
                 True,
                 (True, None),  # is_teardown=True, expect_state=None
                 True,
                 id="is teardown one setup success one setup skipped",
             ),
             pytest.param(
-                "all_done_setup_success",
+                "all_done",
                 2,
-                _UpstreamTIStates(6, 1, 0, 0, 0, 7, 1, 1),
+                [SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, SUCCESS, 
SKIPPED],

Review Comment:
   This pytest fails because it seems the dependency context is returned empty 
after evaluation. I am not sure why. I assume something is not "correctly 
mocked".



##########
tests/models/test_taskinstance.py:
##########
@@ -1347,34 +1361,38 @@ def test_check_task_dependencies(
             set_teardown, expect_state = expect_state
             assert isinstance(set_teardown, bool)
 
-        monkeypatch.setattr(_UpstreamTIStates, "calculate", lambda *_: 
upstream_states)
-
         # sanity checks
-        s = upstream_states
-        assert s.skipped >= s.skipped_setup
-        assert s.success >= s.success_setup
-        assert s.done == s.failed + s.success + s.removed + s.upstream_failed 
+ s.skipped
+        assert len(upstream_states) == upstream_setups + 5
 
         with dag_maker() as dag:
             downstream = EmptyOperator(task_id="downstream", 
trigger_rule=trigger_rule)
             if set_teardown:
                 downstream.as_teardown()
+            work_tasks = []
             for i in range(5):
                 task = EmptyOperator(task_id=f"work_{i}", dag=dag)
                 task.set_downstream(downstream)
+                work_tasks.append(task)
             for i in range(upstream_setups):
                 task = EmptyOperator(task_id=f"setup_{i}", dag=dag).as_setup()
-                task.set_downstream(downstream)
+                task.set_downstream(work_tasks)

Review Comment:
   Note: I re-modelled the test DAG such that the setup tasks are in front of 
the worker tasks, else the setup is directly linked to downstream teardown 
which (1) does not make sense and (2) is explicitly not handled.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to