msumit commented on a change in pull request #16233:
URL: https://github.com/apache/airflow/pull/16233#discussion_r649408501



##########
File path: airflow/www/views.py
##########
@@ -1803,54 +1804,104 @@ def _mark_task_instance_state(  # pylint: 
disable=too-many-arguments
 
         latest_execution_date = dag.get_latest_execution_date()
         if not latest_execution_date:
-            flash(f"Cannot make {state}, seem that dag {dag_id} has never 
run", "error")
+            flash(f"Cannot mark tasks as {state}, seem that dag {dag_id} has 
never run", "error")
             return redirect(origin)
 
         execution_date = timezone.parse(execution_date)
 
         from airflow.api.common.experimental.mark_tasks import set_state
 
-        if confirmed:
-            with create_session() as session:
-                altered = set_state(
-                    tasks=[task],
-                    execution_date=execution_date,
-                    upstream=upstream,
-                    downstream=downstream,
-                    future=future,
-                    past=past,
-                    state=state,
-                    commit=True,
-                    session=session,
-                )
+        with create_session() as session:
+            altered = set_state(
+                tasks=[task],
+                execution_date=execution_date,
+                upstream=upstream,
+                downstream=downstream,
+                future=future,
+                past=past,
+                state=state,
+                commit=True,
+                session=session,
+            )
 
-                # Clear downstream tasks that are in failed/upstream_failed 
state to resume them.
-                # Flush the session so that the tasks marked success are 
reflected in the db.
-                session.flush()
-                subdag = dag.partial_subset(
-                    task_ids_or_regex={task_id},
-                    include_downstream=True,
-                    include_upstream=False,
-                )
+            # Clear downstream tasks that are in failed/upstream_failed state 
to resume them.
+            # Flush the session so that the tasks marked success are reflected 
in the db.
+            session.flush()
+            subdag = dag.partial_subset(
+                task_ids_or_regex={task_id},
+                include_downstream=True,
+                include_upstream=False,
+            )
 
-                end_date = execution_date if not future else None
-                start_date = execution_date if not past else None
-
-                subdag.clear(
-                    start_date=start_date,
-                    end_date=end_date,
-                    include_subdags=True,
-                    include_parentdag=True,
-                    only_failed=True,
-                    session=session,
-                    # Exclude the task itself from being cleared
-                    exclude_task_ids={task_id},
-                )
+            end_date = execution_date if not future else None
+            start_date = execution_date if not past else None
 
-                session.commit()
+            subdag.clear(
+                start_date=start_date,
+                end_date=end_date,
+                include_subdags=True,
+                include_parentdag=True,
+                only_failed=True,
+                session=session,
+                # Exclude the task itself from being cleared
+                exclude_task_ids={task_id},
+            )
 
-            flash(f"Marked {state} on {len(altered)} task instances")
-            return redirect(origin)
+            session.commit()
+
+        flash(f"Marked {state} on {len(altered)} task instances")
+        return redirect(origin)
+
+    @expose('/confirm', methods=['GET'])
+    @auth.has_access(
+        [
+            (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
+            (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
+        ]
+    )
+    @action_logging
+    def confirm(self):
+        """Show confirmation page for marking tasks as success or failed."""
+        args = request.args
+        dag_id = args.get('dag_id')
+        task_id = args.get('task_id')
+        execution_date = args.get('execution_date')
+        state = args.get('state')
+
+        upstream = to_boolean(args.get('failed_upstream'))

Review comment:
       IMO it's safe to assume that `None` is `False`. Just saves a lot of code 
repetition & also safeguard against handling an edge case for the future.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to