Taragolis commented on code in PR #28631:
URL: https://github.com/apache/airflow/pull/28631#discussion_r1058557413


##########
tests/providers/conftest.py:
##########
@@ -0,0 +1,75 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import pytest
+
+from tests.test_utils import db
+
+# Providers with subpackages
+INNER_PROVIDERS = {
+    "alibaba",
+    "amazon",
+    "apache",
+    "atlassian",
+    "common",
+    "dbt",
+    "facebook",
+    "google",
+    "microsoft",
+}
+PROVIDERS_PACKAGES = set()
+
+
+def get_test_provider_name(m):
+    """Extract provider name from module full qualname."""
+    _, _, name = m.__name__.partition("providers.")
+    for inner_provider in INNER_PROVIDERS:
+        if name.startswith(inner_provider):
+            return ".".join(name.split(".", 2)[:2])
+    return name.split(".", 1)[0]
+
+
+@pytest.fixture(scope="module", autouse=True)
+def _clear_db_between_providers_tests(request):
+    """Clear DB between each separate provider package test runs."""
+    provider_name = get_test_provider_name(request.module)
+    if provider_name and provider_name not in PROVIDERS_PACKAGES:
+        PROVIDERS_PACKAGES.add(provider_name)
+        db.clear_db_runs()

Review Comment:
   That what I tried initially
   
   > Use separate clear tests helpers instead of airflow.utils.db.resetdb(). 
locally cause some errors with duplicate constraints.
   
   ``` console
   ❯ breeze --python 3.7 --backend postgres shell --db-reset 
   
   root@ba342530c0bc:/opt/airflow# pytest 
tests/providers/docker/operators/test_docker.py::TestDockerOperator::test_execute
 
tests/providers/slack/hooks/test_slack.py::TestSlackHook::test_token_property_deprecated
 
   
==================================================================================
 test session starts 
==================================================================================
   platform linux -- Python 3.7.16, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- 
/usr/local/bin/python
   cachedir: .pytest_cache
   rootdir: /opt/airflow, configfile: pytest.ini
   plugins: cov-4.0.0, asyncio-0.20.3, rerunfailures-9.1.1, instafail-0.4.2, 
anyio-3.6.2, timeouts-1.2.1, xdist-3.1.0, requests-mock-1.10.0, 
capture-warnings-0.0.4, httpx-0.21.2, time-machine-2.8.2
   asyncio: mode=strict
   setup timeout: 0.0s, execution timeout: 0.0s, teardown timeout: 0.0s
   collected 2 items                                                            
                                                                                
                           
   
   
tests/providers/docker/operators/test_docker.py::TestDockerOperator::test_execute
 ERROR                                                                          
                 [ 50%]
   
tests/providers/slack/hooks/test_slack.py::TestSlackHook::test_token_property_deprecated
 ERROR                                                                          
          [100%]
   
   
========================================================================================
 ERRORS 
=========================================================================================
   ___________________________________________________________________ ERROR at 
setup of TestDockerOperator.test_execute 
___________________________________________________________________
   
   self = <sqlalchemy.engine.base.Connection object at 0xffff7dd28d50>, dialect 
= <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 
0xffff90993a10>
   constructor = <bound method DefaultExecutionContext._init_ddl of <class 
'sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2'>>
   statement = 'CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))', parameters = {}, execution_options = 
immutabledict({'autocommit': True})
   args = (<sqlalchemy.dialects.postgresql.base.PGDDLCompiler object at 
0xffff7dd36790>,), kw = {}, branched = <sqlalchemy.engine.base.Connection 
object at 0xffff7dd28d50>, yp = None
   conn = <sqlalchemy.pool.base._ConnectionFairy object at 0xffff7dd86190>, 
context = <sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 
object at 0xffff7dd86310>
   cursor = <cursor object at 0xffff7df4de50; closed: -1>, evt_handled = False
   
       def _execute_context(
           self,
           dialect,
           constructor,
           statement,
           parameters,
           execution_options,
           *args,
           **kw
       ):
           """Create an :class:`.ExecutionContext` and execute, returning
           a :class:`_engine.CursorResult`."""
       
           branched = self
           if self.__branch_from:
               # if this is a "branched" connection, do everything in terms
               # of the "root" connection, *except* for .close(), which is
               # the only feature that branching provides
               self = self.__branch_from
       
           if execution_options:
               yp = execution_options.get("yield_per", None)
               if yp:
                   execution_options = execution_options.union(
                       {"stream_results": True, "max_row_buffer": yp}
                   )
       
           try:
               conn = self._dbapi_connection
               if conn is None:
                   conn = self._revalidate_connection()
       
               context = constructor(
                   dialect, self, conn, execution_options, *args, **kw
               )
           except (exc.PendingRollbackError, exc.ResourceClosedError):
               raise
           except BaseException as e:
               self._handle_dbapi_exception(
                   e, util.text_type(statement), parameters, None, None
               )
       
           if (
               self._transaction
               and not self._transaction.is_active
               or (
                   self._nested_transaction
                   and not self._nested_transaction.is_active
               )
           ):
               self._invalid_transaction()
       
           elif self._trans_context_manager:
               TransactionalContext._trans_ctx_check(self)
       
           if self._is_future and self._transaction is None:
               self._autobegin()
       
           context.pre_exec()
       
           if dialect.use_setinputsizes:
               context._set_input_sizes()
       
           cursor, statement, parameters = (
               context.cursor,
               context.statement,
               context.parameters,
           )
       
           if not context.executemany:
               parameters = parameters[0]
       
           if self._has_events or self.engine._has_events:
               for fn in self.dispatch.before_cursor_execute:
                   statement, parameters = fn(
                       self,
                       cursor,
                       statement,
                       parameters,
                       context,
                       context.executemany,
                   )
       
           if self._echo:
       
               self._log_info(statement)
       
               stats = context._get_cache_stats()
       
               if not self.engine.hide_parameters:
                   self._log_info(
                       "[%s] %r",
                       stats,
                       sql_util._repr_params(
                           parameters, batches=10, ismulti=context.executemany
                       ),
                   )
               else:
                   self._log_info(
                       "[%s] [SQL parameters hidden due to 
hide_parameters=True]"
                       % (stats,)
                   )
       
           evt_handled = False
           try:
               if context.executemany:
                   if self.dialect._has_events:
                       for fn in self.dialect.dispatch.do_executemany:
                           if fn(cursor, statement, parameters, context):
                               evt_handled = True
                               break
                   if not evt_handled:
                       self.dialect.do_executemany(
                           cursor, statement, parameters, context
                       )
               elif not parameters and context.no_parameters:
                   if self.dialect._has_events:
                       for fn in self.dialect.dispatch.do_execute_no_params:
                           if fn(cursor, statement, context):
                               evt_handled = True
                               break
                   if not evt_handled:
                       self.dialect.do_execute_no_params(
                           cursor, statement, context
                       )
               else:
                   if self.dialect._has_events:
                       for fn in self.dialect.dispatch.do_execute:
                           if fn(cursor, statement, parameters, context):
                               evt_handled = True
                               break
                   if not evt_handled:
                       self.dialect.do_execute(
   >                       cursor, statement, parameters, context
                       )
   
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1901: 
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   
   self = <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 
0xffff90993a10>, cursor = <cursor object at 0xffff7df4de50; closed: -1>
   statement = 'CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))', parameters = {}
   context = 
<sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 object at 
0xffff7dd86310>
   
       def do_execute(self, cursor, statement, parameters, context=None):
   >       cursor.execute(statement, parameters)
   E       psycopg2.errors.DuplicateTable: relation "idx_ab_user_username" 
already exists
   
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/default.py:736: 
DuplicateTable
   
   The above exception was the direct cause of the following exception:
   
   request = <SubRequest '_clear_db_between_providers_tests' for <Function 
test_execute>>
   
       @pytest.fixture(scope="module", autouse=True)
       def _clear_db_between_providers_tests(request):
           """Clear DB between each separate provider package test runs."""
           # from tests.test_utils import db
       
           provider_name = get_test_provider_name(request.module)
           if provider_name and provider_name not in _CLEAR_DB_PROVIDERS:
               _CLEAR_DB_PROVIDERS.add(provider_name)
               from airflow.utils.db import resetdb
   >           resetdb()
   
   tests/providers/conftest.py:68: 
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   airflow/utils/session.py:75: in wrapper
       return func(*args, session=session, **kwargs)
   airflow/utils/db.py:1619: in resetdb
       initdb(session=session)
   airflow/utils/session.py:72: in wrapper
       return func(*args, **kwargs)
   airflow/utils/db.py:709: in initdb
       _create_db_from_orm(session=session)
   airflow/utils/db.py:692: in _create_db_from_orm
       Base.metadata.create_all(settings.engine)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/schema.py:4931: in 
create_all
       ddl.SchemaGenerator, self, checkfirst=checkfirst, tables=tables
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:3228: in 
_run_ddl_visitor
       conn._run_ddl_visitor(visitorcallable, element, **kwargs)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:2211: in 
_run_ddl_visitor
       visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:524: in 
traverse_single
       return meth(obj, **kw)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:855: in 
visit_metadata
       _is_metadata_operation=True,
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:524: in 
traverse_single
       return meth(obj, **kw)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:907: in 
visit_table
       self.traverse_single(index, create_ok=True)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:524: in 
traverse_single
       return meth(obj, **kw)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:938: in 
visit_index
       self.connection.execute(CreateIndex(index))
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1380: in 
execute
       return meth(self, multiparams, params, _EMPTY_EXECUTION_OPTS)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:81: in 
_execute_on_connection
       self, multiparams, params, execution_options
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1478: in 
_execute_ddl
       compiled,
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1944: in 
_execute_context
       e, statement, parameters, cursor, context
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:2125: in 
_handle_dbapi_exception
       sqlalchemy_exception, with_traceback=exc_info[2], from_=e
   /usr/local/lib/python3.7/site-packages/sqlalchemy/util/compat.py:211: in 
raise_
       raise exception
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1901: in 
_execute_context
       cursor, statement, parameters, context
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   
   self = <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 
0xffff90993a10>, cursor = <cursor object at 0xffff7df4de50; closed: -1>
   statement = 'CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))', parameters = {}
   context = 
<sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 object at 
0xffff7dd86310>
   
       def do_execute(self, cursor, statement, parameters, context=None):
   >       cursor.execute(statement, parameters)
   E       sqlalchemy.exc.ProgrammingError: (psycopg2.errors.DuplicateTable) 
relation "idx_ab_user_username" already exists
   E       
   E       [SQL: CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))]
   E       (Background on this error at: https://sqlalche.me/e/14/f405)
   
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/default.py:736: 
ProgrammingError
   
---------------------------------------------------------------------------------
 Captured stdout setup 
---------------------------------------------------------------------------------
   ========================= AIRFLOW ==========================
   Home of the user: /root
   Airflow home /root/airflow
   Initializing the DB - first time after entering the container.
   You can force re-initialization the database by adding --with-db-init switch 
to run-tests.
   [2022-12-28 19:49:00,870] {db.py:1608} INFO - Dropping tables that exist
   [2022-12-28 19:49:01,287] {migration.py:205} INFO - Context impl 
PostgresqlImpl.
   [2022-12-28 19:49:01,287] {migration.py:212} INFO - Will assume 
transactional DDL.
   [2022-12-28 19:49:01,292] {migration.py:205} INFO - Context impl 
PostgresqlImpl.
   [2022-12-28 19:49:01,292] {migration.py:212} INFO - Will assume 
transactional DDL.
   
---------------------------------------------------------------------------------
 Captured stderr setup 
---------------------------------------------------------------------------------
   INFO  [alembic.runtime.migration] Context impl PostgresqlImpl.
   INFO  [alembic.runtime.migration] Will assume transactional DDL.
   INFO  [alembic.runtime.migration] Running stamp_revision  -> 290244fb8b83
   WARNI [airflow.task.operators] Dependency <Task(BashOperator): 
create_entry_group>, delete_entry_group already registered for DAG: 
example_complex
   WARNI [airflow.task.operators] Dependency <Task(BashOperator): 
delete_entry_group>, create_entry_group already registered for DAG: 
example_complex
   WARNI [airflow.task.operators] Dependency <Task(BashOperator): 
create_entry_gcs>, delete_entry already registered for DAG: example_complex
   WARNI [airflow.task.operators] Dependency <Task(BashOperator): 
delete_entry>, create_entry_gcs already registered for DAG: example_complex
   WARNI [airflow.task.operators] Dependency <Task(BashOperator): create_tag>, 
delete_tag already registered for DAG: example_complex
   WARNI [airflow.task.operators] Dependency <Task(BashOperator): delete_tag>, 
create_tag already registered for DAG: example_complex
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
prepare_email>, send_email already registered for DAG: example_dag_decorator
   WARNI [airflow.task.operators] Dependency <Task(EmailOperator): send_email>, 
prepare_email already registered for DAG: example_dag_decorator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
print_the_context>, log_sql_query already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
log_sql_query>, print_the_context already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
print_the_context>, log_sql_query already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
log_sql_query>, print_the_context already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
print_the_context>, log_sql_query already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
log_sql_query>, print_the_context already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
print_the_context>, log_sql_query already registered for DAG: 
example_python_operator
   WARNI [airflow.task.operators] Dependency <Task(_PythonDecoratedOperator): 
log_sql_query>, print_the_context already registered for DAG: 
example_python_operator
   WARNI [airflow.www.fab_security.manager] No user yet created, use flask fab 
command to do it.
   INFO  [alembic.runtime.migration] Context impl PostgresqlImpl.
   INFO  [alembic.runtime.migration] Will assume transactional DDL.
   INFO  [alembic.runtime.migration] Context impl PostgresqlImpl.
   INFO  [alembic.runtime.migration] Will assume transactional DDL.
   
----------------------------------------------------------------------------------
 Captured log setup 
-----------------------------------------------------------------------------------
   INFO     airflow.utils.db:db.py:1608 Dropping tables that exist
   INFO     alembic.runtime.migration:migration.py:205 Context impl 
PostgresqlImpl.
   INFO     alembic.runtime.migration:migration.py:212 Will assume 
transactional DDL.
   INFO     alembic.runtime.migration:migration.py:205 Context impl 
PostgresqlImpl.
   INFO     alembic.runtime.migration:migration.py:212 Will assume 
transactional DDL.
   ____________________________________________________________ ERROR at setup 
of TestSlackHook.test_token_property_deprecated 
_____________________________________________________________
   
   self = <sqlalchemy.engine.base.Connection object at 0xffff7e0a8e90>, dialect 
= <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 
0xffff90993a10>
   constructor = <bound method DefaultExecutionContext._init_ddl of <class 
'sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2'>>
   statement = 'CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))', parameters = {}, execution_options = 
immutabledict({'autocommit': True})
   args = (<sqlalchemy.dialects.postgresql.base.PGDDLCompiler object at 
0xffff7dd8a550>,), kw = {}, branched = <sqlalchemy.engine.base.Connection 
object at 0xffff7e0a8e90>, yp = None
   conn = <sqlalchemy.pool.base._ConnectionFairy object at 0xffff7ee13810>, 
context = <sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 
object at 0xffff7e001090>
   cursor = <cursor object at 0xffff7ed00550; closed: -1>, evt_handled = False
   
       def _execute_context(
           self,
           dialect,
           constructor,
           statement,
           parameters,
           execution_options,
           *args,
           **kw
       ):
           """Create an :class:`.ExecutionContext` and execute, returning
           a :class:`_engine.CursorResult`."""
       
           branched = self
           if self.__branch_from:
               # if this is a "branched" connection, do everything in terms
               # of the "root" connection, *except* for .close(), which is
               # the only feature that branching provides
               self = self.__branch_from
       
           if execution_options:
               yp = execution_options.get("yield_per", None)
               if yp:
                   execution_options = execution_options.union(
                       {"stream_results": True, "max_row_buffer": yp}
                   )
       
           try:
               conn = self._dbapi_connection
               if conn is None:
                   conn = self._revalidate_connection()
       
               context = constructor(
                   dialect, self, conn, execution_options, *args, **kw
               )
           except (exc.PendingRollbackError, exc.ResourceClosedError):
               raise
           except BaseException as e:
               self._handle_dbapi_exception(
                   e, util.text_type(statement), parameters, None, None
               )
       
           if (
               self._transaction
               and not self._transaction.is_active
               or (
                   self._nested_transaction
                   and not self._nested_transaction.is_active
               )
           ):
               self._invalid_transaction()
       
           elif self._trans_context_manager:
               TransactionalContext._trans_ctx_check(self)
       
           if self._is_future and self._transaction is None:
               self._autobegin()
       
           context.pre_exec()
       
           if dialect.use_setinputsizes:
               context._set_input_sizes()
       
           cursor, statement, parameters = (
               context.cursor,
               context.statement,
               context.parameters,
           )
       
           if not context.executemany:
               parameters = parameters[0]
       
           if self._has_events or self.engine._has_events:
               for fn in self.dispatch.before_cursor_execute:
                   statement, parameters = fn(
                       self,
                       cursor,
                       statement,
                       parameters,
                       context,
                       context.executemany,
                   )
       
           if self._echo:
       
               self._log_info(statement)
       
               stats = context._get_cache_stats()
       
               if not self.engine.hide_parameters:
                   self._log_info(
                       "[%s] %r",
                       stats,
                       sql_util._repr_params(
                           parameters, batches=10, ismulti=context.executemany
                       ),
                   )
               else:
                   self._log_info(
                       "[%s] [SQL parameters hidden due to 
hide_parameters=True]"
                       % (stats,)
                   )
       
           evt_handled = False
           try:
               if context.executemany:
                   if self.dialect._has_events:
                       for fn in self.dialect.dispatch.do_executemany:
                           if fn(cursor, statement, parameters, context):
                               evt_handled = True
                               break
                   if not evt_handled:
                       self.dialect.do_executemany(
                           cursor, statement, parameters, context
                       )
               elif not parameters and context.no_parameters:
                   if self.dialect._has_events:
                       for fn in self.dialect.dispatch.do_execute_no_params:
                           if fn(cursor, statement, context):
                               evt_handled = True
                               break
                   if not evt_handled:
                       self.dialect.do_execute_no_params(
                           cursor, statement, context
                       )
               else:
                   if self.dialect._has_events:
                       for fn in self.dialect.dispatch.do_execute:
                           if fn(cursor, statement, parameters, context):
                               evt_handled = True
                               break
                   if not evt_handled:
                       self.dialect.do_execute(
   >                       cursor, statement, parameters, context
                       )
   
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1901: 
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   
   self = <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 
0xffff90993a10>, cursor = <cursor object at 0xffff7ed00550; closed: -1>
   statement = 'CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))', parameters = {}
   context = 
<sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 object at 
0xffff7e001090>
   
       def do_execute(self, cursor, statement, parameters, context=None):
   >       cursor.execute(statement, parameters)
   E       psycopg2.errors.DuplicateTable: relation "idx_ab_user_username" 
already exists
   
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/default.py:736: 
DuplicateTable
   
   The above exception was the direct cause of the following exception:
   
   request = <SubRequest '_clear_db_between_providers_tests' for <Function 
test_token_property_deprecated>>
   
       @pytest.fixture(scope="module", autouse=True)
       def _clear_db_between_providers_tests(request):
           """Clear DB between each separate provider package test runs."""
           # from tests.test_utils import db
       
           provider_name = get_test_provider_name(request.module)
           if provider_name and provider_name not in _CLEAR_DB_PROVIDERS:
               _CLEAR_DB_PROVIDERS.add(provider_name)
               from airflow.utils.db import resetdb
   >           resetdb()
   
   tests/providers/conftest.py:68: 
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   airflow/utils/session.py:75: in wrapper
       return func(*args, session=session, **kwargs)
   airflow/utils/db.py:1619: in resetdb
       initdb(session=session)
   airflow/utils/session.py:72: in wrapper
       return func(*args, **kwargs)
   airflow/utils/db.py:709: in initdb
       _create_db_from_orm(session=session)
   airflow/utils/db.py:692: in _create_db_from_orm
       Base.metadata.create_all(settings.engine)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/schema.py:4931: in 
create_all
       ddl.SchemaGenerator, self, checkfirst=checkfirst, tables=tables
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:3228: in 
_run_ddl_visitor
       conn._run_ddl_visitor(visitorcallable, element, **kwargs)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:2211: in 
_run_ddl_visitor
       visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:524: in 
traverse_single
       return meth(obj, **kw)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:855: in 
visit_metadata
       _is_metadata_operation=True,
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:524: in 
traverse_single
       return meth(obj, **kw)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:907: in 
visit_table
       self.traverse_single(index, create_ok=True)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/visitors.py:524: in 
traverse_single
       return meth(obj, **kw)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:938: in 
visit_index
       self.connection.execute(CreateIndex(index))
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1380: in 
execute
       return meth(self, multiparams, params, _EMPTY_EXECUTION_OPTS)
   /usr/local/lib/python3.7/site-packages/sqlalchemy/sql/ddl.py:81: in 
_execute_on_connection
       self, multiparams, params, execution_options
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1478: in 
_execute_ddl
       compiled,
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1944: in 
_execute_context
       e, statement, parameters, cursor, context
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:2125: in 
_handle_dbapi_exception
       sqlalchemy_exception, with_traceback=exc_info[2], from_=e
   /usr/local/lib/python3.7/site-packages/sqlalchemy/util/compat.py:211: in 
raise_
       raise exception
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/base.py:1901: in 
_execute_context
       cursor, statement, parameters, context
   _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _
   
   self = <sqlalchemy.dialects.postgresql.psycopg2.PGDialect_psycopg2 object at 
0xffff90993a10>, cursor = <cursor object at 0xffff7ed00550; closed: -1>
   statement = 'CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))', parameters = {}
   context = 
<sqlalchemy.dialects.postgresql.psycopg2.PGExecutionContext_psycopg2 object at 
0xffff7e001090>
   
       def do_execute(self, cursor, statement, parameters, context=None):
   >       cursor.execute(statement, parameters)
   E       sqlalchemy.exc.ProgrammingError: (psycopg2.errors.DuplicateTable) 
relation "idx_ab_user_username" already exists
   E       
   E       [SQL: CREATE UNIQUE INDEX idx_ab_user_username ON ab_user 
(lower(username))]
   E       (Background on this error at: https://sqlalche.me/e/14/f405)
   
   /usr/local/lib/python3.7/site-packages/sqlalchemy/engine/default.py:736: 
ProgrammingError
   
---------------------------------------------------------------------------------
 Captured stderr setup 
---------------------------------------------------------------------------------
   INFO  [airflow.utils.db] Dropping tables that exist
   INFO  [alembic.runtime.migration] Context impl PostgresqlImpl.
   INFO  [alembic.runtime.migration] Will assume transactional DDL.
   INFO  [alembic.runtime.migration] Context impl PostgresqlImpl.
   INFO  [alembic.runtime.migration] Will assume transactional DDL.
   
----------------------------------------------------------------------------------
 Captured log setup 
-----------------------------------------------------------------------------------
   INFO     airflow.utils.db:db.py:1608 Dropping tables that exist
   INFO     alembic.runtime.migration:migration.py:205 Context impl 
PostgresqlImpl.
   INFO     alembic.runtime.migration:migration.py:212 Will assume 
transactional DDL.
   INFO     alembic.runtime.migration:migration.py:205 Context impl 
PostgresqlImpl.
   INFO     alembic.runtime.migration:migration.py:212 Will assume 
transactional DDL.
   
===================================================================================
 warnings summary 
====================================================================================
   
tests/providers/docker/operators/test_docker.py::TestDockerOperator::test_execute
     /opt/airflow/airflow/example_dags/example_sensor_decorator.py:64: 
RemovedInAirflow3Warning: Param `schedule_interval` is deprecated and will be 
removed in a future release. Please use `schedule` instead. 
       tutorial_etl_dag = example_sensor_decorator()
   
   
tests/providers/docker/operators/test_docker.py::TestDockerOperator::test_execute
     /opt/airflow/airflow/example_dags/example_subdag_operator.py:45: 
RemovedInAirflow3Warning: This class is deprecated. Please use 
`airflow.utils.task_group.TaskGroup`.
       subdag=subdag(DAG_NAME, "section-1", dag.default_args),
   
   
tests/providers/docker/operators/test_docker.py::TestDockerOperator::test_execute
     /opt/airflow/airflow/example_dags/example_subdag_operator.py:54: 
RemovedInAirflow3Warning: This class is deprecated. Please use 
`airflow.utils.task_group.TaskGroup`.
       subdag=subdag(DAG_NAME, "section-2", dag.default_args),
   
   -- Docs: https://docs.pytest.org/en/stable/warnings.html
   
================================================================================
 short test summary info 
================================================================================
   ERROR 
tests/providers/docker/operators/test_docker.py::TestDockerOperator::test_execute
 - sqlalchemy.exc.ProgrammingError: (psycopg2.errors.DuplicateTable) relation 
"idx_ab_user_user...
   ERROR 
tests/providers/slack/hooks/test_slack.py::TestSlackHook::test_token_property_deprecated
 - sqlalchemy.exc.ProgrammingError: (psycopg2.errors.DuplicateTable) relation 
"idx_ab_us...
   
============================================================================= 3 
warnings, 2 errors in 8.28s 
=============================================================================
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@airflow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to