This is an automated email from the ASF dual-hosted git repository.

jhtimmins pushed a commit to branch v2-1-test
in repository https://gitbox.apache.org/repos/asf/airflow.git

commit 18c6baff95c4af77d72aefc0f4c8e5efed34416b
Author: Ephraim Anierobi <splendidzig...@gmail.com>
AuthorDate: Tue Jul 20 21:43:01 2021 +0100

    Switch test_backfill_job.py from unittest to pytest style (#17112)
    
    Prep work to use pytest fixtures in these tests
    
    (cherry picked from commit 960da8a9074a4fb58881ea79f7dc9bc8fd58a5c4)
---
 tests/jobs/test_backfill_job.py | 45 +++++++++++++++++++----------------------
 1 file changed, 21 insertions(+), 24 deletions(-)

diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py
index 154b485..62fe153 100644
--- a/tests/jobs/test_backfill_job.py
+++ b/tests/jobs/test_backfill_job.py
@@ -21,12 +21,10 @@ import datetime
 import json
 import logging
 import threading
-import unittest
 from unittest.mock import patch
 
 import pytest
 import sqlalchemy
-from parameterized import parameterized
 
 from airflow import settings
 from airflow.cli import cli_parser
@@ -56,7 +54,23 @@ logger = logging.getLogger(__name__)
 DEFAULT_DATE = timezone.datetime(2016, 1, 1)
 
 
-class TestBackfillJob(unittest.TestCase):
+@pytest.fixture(scope="module")
+def dag_bag():
+    return DagBag(include_examples=True)
+
+
+class TestBackfillJob:
+    @staticmethod
+    def clean_db():
+        clear_db_runs()
+        clear_db_pools()
+
+    @pytest.fixture(autouse=True)
+    def set_instance_attrs(self, dag_bag):
+        self.clean_db()
+        self.parser = cli_parser.get_parser()
+        self.dagbag = dag_bag
+
     def _get_dummy_dag(self, dag_id, pool=Pool.DEFAULT_POOL_NAME, 
task_concurrency=None):
         dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, 
schedule_interval='@daily')
 
@@ -73,22 +87,6 @@ class TestBackfillJob(unittest.TestCase):
                 count += 1
         return count
 
-    @classmethod
-    def setUpClass(cls):
-        cls.dagbag = DagBag(include_examples=True)
-
-    @staticmethod
-    def clean_db():
-        clear_db_runs()
-        clear_db_pools()
-
-    def setUp(self):
-        self.clean_db()
-        self.parser = cli_parser.get_parser()
-
-    def tearDown(self) -> None:
-        self.clean_db()
-
     def test_unfinished_dag_runs_set_to_failed(self):
         dag = self._get_dummy_dag('dummy_dag')
 
@@ -210,7 +208,8 @@ class TestBackfillJob(unittest.TestCase):
         session.close()
 
     @pytest.mark.backend("postgres", "mysql")
-    @parameterized.expand(
+    @pytest.mark.parametrize(
+        "dag_id, expected_execution_order",
         [
             [
                 "example_branch_operator",
@@ -246,7 +245,7 @@ class TestBackfillJob(unittest.TestCase):
                 ),
             ],
             ["latest_only", ("latest_only", "task1")],
-        ]
+        ],
     )
     def test_backfill_examples(self, dag_id, expected_execution_order):
         """
@@ -471,8 +470,6 @@ class TestBackfillJob(unittest.TestCase):
         except AirflowException:
             return
 
-        self.fail()
-
     @patch('airflow.jobs.backfill_job.BackfillJob.log')
     def test_backfill_respect_pool_limit(self, mock_log):
         session = settings.Session()
@@ -727,7 +724,7 @@ class TestBackfillJob(unittest.TestCase):
             start_date=DEFAULT_DATE,
             end_date=DEFAULT_DATE,
         )
-        with self.assertRaises(BackfillUnfinished):
+        with pytest.raises(BackfillUnfinished):
             job.run()
 
     def test_backfill_ordered_concurrent_execute(self):

Reply via email to