This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 1cde11a447e Move databricks provider to new structure (#46207)
1cde11a447e is described below

commit 1cde11a447e60d0738b0c317c3d3e8265360014f
Author: Josix <[email protected]>
AuthorDate: Sat Feb 8 00:24:32 2025 +0800

    Move databricks provider to new structure (#46207)
    
    * refactor(providers/databricks): move databricks provider to new structure
    
    * remove unused caplog
    
    ---------
    
    Co-authored-by: Jarek Potiuk <[email protected]>
---
 .github/boring-cyborg.yml                          |   5 +-
 dev/moving_providers/move_providers.py             |   3 +
 docs/.gitignore                                    |   1 +
 .../changelog.rst                                  |  25 ---
 providers/databricks/README.rst                    |  87 +++++++++
 .../docs}/.latest-doc-only-change.txt              |   0
 .../docs/changelog.rst}                            |   4 +-
 .../databricks/docs}/commits.rst                   |   0
 .../databricks/docs}/connections/databricks.rst    |   0
 ...icks_workflow_task_group_airflow_graph_view.png | Bin
 .../docs}/img/workflow_plugin_launch_task.png      | Bin
 .../docs}/img/workflow_plugin_single_task.png      | Bin
 .../img/workflow_run_databricks_graph_view.png     | Bin
 .../databricks/docs}/index.rst                     |   0
 .../docs}/installing-providers-from-sources.rst    |   0
 .../docs/integration-logos}/Databricks.png         | Bin
 .../databricks/docs}/operators/copy_into.rst       |   2 +-
 .../databricks/docs}/operators/index.rst           |   0
 .../databricks/docs}/operators/jobs_create.rst     |   6 +-
 .../databricks/docs}/operators/notebook.rst        |   4 +-
 .../databricks/docs}/operators/repos_create.rst    |   2 +-
 .../databricks/docs}/operators/repos_delete.rst    |   2 +-
 .../databricks/docs}/operators/repos_update.rst    |   2 +-
 .../databricks/docs}/operators/run_now.rst         |   0
 .../databricks/docs}/operators/sql.rst             |  16 +-
 .../databricks/docs}/operators/submit_run.rst      |   4 +-
 .../databricks/docs}/operators/task.rst            |   6 +-
 .../databricks/docs}/operators/workflow.rst        |   2 +-
 .../databricks/docs}/plugins/index.rst             |   0
 .../databricks/docs}/plugins/workflow.rst          |   0
 .../databricks/docs}/security.rst                  |   0
 .../providers => }/databricks/provider.yaml        |  30 +--
 providers/databricks/pyproject.toml                | 105 +++++++++++
 .../src/airflow/providers/databricks/LICENSE       | 201 +++++++++++++++++++++
 .../src/airflow/providers/databricks/__init__.py   |   0
 .../src/airflow/providers/databricks/exceptions.py |   0
 .../providers/databricks/get_provider_info.py      | 193 ++++++++++++++++++++
 .../airflow/providers/databricks/hooks/__init__.py |   0
 .../providers/databricks/hooks/databricks.py       |   0
 .../providers/databricks/hooks/databricks_base.py  |   0
 .../providers/databricks/hooks/databricks_sql.py   |   5 +-
 .../providers/databricks/operators/__init__.py     |   0
 .../providers/databricks/operators/databricks.py   |   2 +-
 .../databricks/operators/databricks_repos.py       |   0
 .../databricks/operators/databricks_sql.py         |   3 +-
 .../databricks/operators/databricks_workflow.py    |   0
 .../providers/databricks/plugins/__init__.py       |   0
 .../databricks/plugins/databricks_workflow.py      |   0
 .../providers/databricks/sensors/__init__.py       |   0
 .../databricks/sensors/databricks_partition.py     |   3 +-
 .../providers/databricks/sensors/databricks_sql.py |   0
 .../providers/databricks/triggers/__init__.py      |   0
 .../providers/databricks/triggers/databricks.py    |   0
 .../airflow/providers/databricks/utils/__init__.py |   0
 .../providers/databricks/utils/databricks.py       |   0
 .../tests/conftest.py}                             |  21 +--
 .../tests/provider_tests}/__init__.py              |   1 +
 .../tests/provider_tests}/databricks/__init__.py   |   0
 .../provider_tests}/databricks/hooks/__init__.py   |   0
 .../databricks/hooks/test_databricks.py            |   0
 .../test_databricks_azure_workload_identity.py     |   0
 ...est_databricks_azure_workload_identity_async.py |   0
 .../databricks/hooks/test_databricks_base.py       |   0
 .../databricks/hooks/test_databricks_sql.py        |   0
 .../databricks/operators/__init__.py               |   0
 .../databricks/operators/test_databricks.py        |   2 +-
 .../databricks/operators/test_databricks_copy.py   |   0
 .../databricks/operators/test_databricks_repos.py  |   0
 .../databricks/operators/test_databricks_sql.py    |   0
 .../operators/test_databricks_workflow.py          |   0
 .../provider_tests}/databricks/plugins/__init__.py |   0
 .../databricks/plugins/test_databricks_workflow.py |   0
 .../provider_tests}/databricks/sensors/__init__.py |   0
 .../sensors/test_databricks_partition.py           |   0
 .../databricks/sensors/test_databricks_sql.py      |   0
 .../provider_tests}/databricks/test_exceptions.py  |   0
 .../databricks/triggers/__init__.py                |   0
 .../databricks/triggers/test_databricks.py         |   0
 .../provider_tests}/databricks/utils/__init__.py   |   0
 .../databricks/utils/test_databricks.py            |   0
 .../tests/system/databricks/__init__.py            |   0
 .../tests/system/databricks/example_databricks.py  |   0
 .../system/databricks/example_databricks_repos.py  |   0
 .../databricks/example_databricks_sensors.py       |   0
 .../system/databricks/example_databricks_sql.py    |   0
 .../databricks/example_databricks_workflow.py      |   0
 pyproject.toml                                     |   3 +
 scripts/ci/docker-compose/remove-sources.yml       |   1 +
 scripts/ci/docker-compose/tests-sources.yml        |   1 +
 89 files changed, 639 insertions(+), 103 deletions(-)

diff --git a/.github/boring-cyborg.yml b/.github/boring-cyborg.yml
index 4d2ff366586..f9e6c9f9243 100644
--- a/.github/boring-cyborg.yml
+++ b/.github/boring-cyborg.yml
@@ -112,10 +112,7 @@ labelPRBasedOnFilePath:
     - providers/standard/**
 
   provider:databricks:
-    - providers/src/airflow/providers/databricks/**/*
-    - docs/apache-airflow-providers-databricks/**/*
-    - providers/tests/databricks/**/*
-    - providers/tests/system/databricks/**/*
+    - providers/databricks/**
 
   provider:datadog:
     - providers/datadog/**
diff --git a/dev/moving_providers/move_providers.py 
b/dev/moving_providers/move_providers.py
index 0481235ea9e..89efdd28321 100755
--- a/dev/moving_providers/move_providers.py
+++ b/dev/moving_providers/move_providers.py
@@ -371,6 +371,8 @@ def move_provider_yaml(provider_id: str) -> 
tuple[list[str], list[str], list[str
         if line.startswith("    logo: "):
             logo_path = line[len("    logo: ") :]
             logo_name = logo_path.split("/")[-1]
+            if logo_path in already_moved_logos:
+                continue
             new_logo_dir = (
                 PROVIDERS_DIR_PATH / _get_provider_only_path(provider_id) / 
"docs" / "integration-logos"
             )
@@ -386,6 +388,7 @@ def move_provider_yaml(provider_id: str) -> 
tuple[list[str], list[str], list[str
                 remove_empty_parent_dir=True,
             )
             line = f"    logo: /docs/integration-logos/{logo_name}"
+            already_moved_logos.add(logo_path)
         if line == "dependencies:" and not in_dependencies:
             in_dependencies = True
             continue
diff --git a/docs/.gitignore b/docs/.gitignore
index 9d8fc074d1b..68b03c51453 100644
--- a/docs/.gitignore
+++ b/docs/.gitignore
@@ -28,6 +28,7 @@ apache-airflow-providers-cohere
 apache-airflow-providers-common-compat
 apache-airflow-providers-common-io
 apache-airflow-providers-common-sql
+apache-airflow-providers-databricks
 apache-airflow-providers-datadog
 apache-airflow-providers-dbt-cloud
 apache-airflow-providers-dingding
diff --git a/docs/apache-airflow-providers-databricks/changelog.rst 
b/docs/apache-airflow-providers-databricks/changelog.rst
deleted file mode 100644
index 8e4f5f126ef..00000000000
--- a/docs/apache-airflow-providers-databricks/changelog.rst
+++ /dev/null
@@ -1,25 +0,0 @@
-
- .. Licensed to the Apache Software Foundation (ASF) under one
-    or more contributor license agreements.  See the NOTICE file
-    distributed with this work for additional information
-    regarding copyright ownership.  The ASF licenses this file
-    to you under the Apache License, Version 2.0 (the
-    "License"); you may not use this file except in compliance
-    with the License.  You may obtain a copy of the License at
-
- ..   http://www.apache.org/licenses/LICENSE-2.0
-
- .. Unless required by applicable law or agreed to in writing,
-    software distributed under the License is distributed on an
-    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-    KIND, either express or implied.  See the License for the
-    specific language governing permissions and limitations
-    under the License.
-
- ..  NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
-     OVERWRITTEN WHEN PREPARING PACKAGES.
-
- ..  IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
-     `PROVIDER_CHANGELOG_TEMPLATE.rst.jinja2` IN the 
`dev/breeze/src/airflow_breeze/templates` DIRECTORY
-
-.. include:: ../../providers/src/airflow/providers/databricks/CHANGELOG.rst
diff --git a/providers/databricks/README.rst b/providers/databricks/README.rst
new file mode 100644
index 00000000000..03db4020bc5
--- /dev/null
+++ b/providers/databricks/README.rst
@@ -0,0 +1,87 @@
+
+ .. Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
+
+ ..   http://www.apache.org/licenses/LICENSE-2.0
+
+ .. Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE 
TEMPLATE
+    `PROVIDER_README_TEMPLATE.rst.jinja2` IN the 
`dev/breeze/src/airflow_breeze/templates` DIRECTORY
+
+
+Package ``apache-airflow-providers-databricks``
+
+Release: ``7.0.0``
+
+
+`Databricks <https://databricks.com/>`__
+
+
+Provider package
+----------------
+
+This is a provider package for ``databricks`` provider. All classes for this 
provider package
+are in ``airflow.providers.databricks`` python package.
+
+You can find package information and changelog for the provider
+in the `documentation 
<https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0/>`_.
+
+Installation
+------------
+
+You can install this package on top of an existing Airflow 2 installation (see 
``Requirements`` below
+for the minimum Airflow version supported) via
+``pip install apache-airflow-providers-databricks``
+
+The package supports the following python versions: 3.9,3.10,3.11,3.12
+
+Requirements
+------------
+
+=======================================  ==================
+PIP package                              Version required
+=======================================  ==================
+``apache-airflow``                       ``>=2.9.0``
+``apache-airflow-providers-common-sql``  ``>=1.20.0``
+``requests``                             ``>=2.27.0,<3``
+``databricks-sql-connector``             ``>=3.0.0``
+``aiohttp``                              ``>=3.9.2,<4``
+``mergedeep``                            ``>=1.3.4``
+``pandas``                               ``>=2.1.2,<2.2``
+``pyarrow``                              ``>=14.0.1``
+=======================================  ==================
+
+Cross provider package dependencies
+-----------------------------------
+
+Those are dependencies that might be needed in order to use all the features 
of the package.
+You need to install the specified provider packages in order to use them.
+
+You can install such cross-provider dependencies when installing from PyPI. 
For example:
+
+.. code-block:: bash
+
+    pip install apache-airflow-providers-databricks[common.sql]
+
+
+============================================================================================================
  ==============
+Dependent package                                                              
                               Extra
+============================================================================================================
  ==============
+`apache-airflow-providers-common-sql 
<https://airflow.apache.org/docs/apache-airflow-providers-common-sql>`_  
``common.sql``
+============================================================================================================
  ==============
+
+The changelog for the provider package can be found in the
+`changelog 
<https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0/changelog.html>`_.
diff --git 
a/providers/src/airflow/providers/databricks/.latest-doc-only-change.txt 
b/providers/databricks/docs/.latest-doc-only-change.txt
similarity index 100%
rename from 
providers/src/airflow/providers/databricks/.latest-doc-only-change.txt
rename to providers/databricks/docs/.latest-doc-only-change.txt
diff --git a/providers/src/airflow/providers/databricks/CHANGELOG.rst 
b/providers/databricks/docs/changelog.rst
similarity index 99%
rename from providers/src/airflow/providers/databricks/CHANGELOG.rst
rename to providers/databricks/docs/changelog.rst
index 227d297a86f..8a2279b2745 100644
--- a/providers/src/airflow/providers/databricks/CHANGELOG.rst
+++ b/providers/databricks/docs/changelog.rst
@@ -139,7 +139,7 @@ Features
 Misc
 ~~~~
 
-* ``Removed deprecated method referance airflow.www.auth.has_access when min 
airflow version >= 2.8.0 (#41747)``
+* ``Removed deprecated method reference airflow.www.auth.has_access when min 
airflow version >= 2.8.0 (#41747)``
 * ``remove deprecated soft_fail from providers (#41710)``
 
 6.9.0
@@ -451,7 +451,7 @@ Misc
 Features
 ~~~~~~~~
 
-* ``Add "QUEUED" to RUN_LIFE_CYCLE_STATES following deployement of … (#33886)``
+* ``Add "QUEUED" to RUN_LIFE_CYCLE_STATES following deployment of … (#33886)``
 * ``allow DatabricksSubmitRunOperator to accept a pipeline name for a 
pipeline_task (#32903)``
 
 Misc
diff --git a/docs/apache-airflow-providers-databricks/commits.rst 
b/providers/databricks/docs/commits.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/commits.rst
rename to providers/databricks/docs/commits.rst
diff --git 
a/docs/apache-airflow-providers-databricks/connections/databricks.rst 
b/providers/databricks/docs/connections/databricks.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/connections/databricks.rst
rename to providers/databricks/docs/connections/databricks.rst
diff --git 
a/docs/apache-airflow-providers-databricks/img/databricks_workflow_task_group_airflow_graph_view.png
 
b/providers/databricks/docs/img/databricks_workflow_task_group_airflow_graph_view.png
similarity index 100%
rename from 
docs/apache-airflow-providers-databricks/img/databricks_workflow_task_group_airflow_graph_view.png
rename to 
providers/databricks/docs/img/databricks_workflow_task_group_airflow_graph_view.png
diff --git 
a/docs/apache-airflow-providers-databricks/img/workflow_plugin_launch_task.png 
b/providers/databricks/docs/img/workflow_plugin_launch_task.png
similarity index 100%
rename from 
docs/apache-airflow-providers-databricks/img/workflow_plugin_launch_task.png
rename to providers/databricks/docs/img/workflow_plugin_launch_task.png
diff --git 
a/docs/apache-airflow-providers-databricks/img/workflow_plugin_single_task.png 
b/providers/databricks/docs/img/workflow_plugin_single_task.png
similarity index 100%
rename from 
docs/apache-airflow-providers-databricks/img/workflow_plugin_single_task.png
rename to providers/databricks/docs/img/workflow_plugin_single_task.png
diff --git 
a/docs/apache-airflow-providers-databricks/img/workflow_run_databricks_graph_view.png
 b/providers/databricks/docs/img/workflow_run_databricks_graph_view.png
similarity index 100%
rename from 
docs/apache-airflow-providers-databricks/img/workflow_run_databricks_graph_view.png
rename to providers/databricks/docs/img/workflow_run_databricks_graph_view.png
diff --git a/docs/apache-airflow-providers-databricks/index.rst 
b/providers/databricks/docs/index.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/index.rst
rename to providers/databricks/docs/index.rst
diff --git 
a/docs/apache-airflow-providers-databricks/installing-providers-from-sources.rst
 b/providers/databricks/docs/installing-providers-from-sources.rst
similarity index 100%
rename from 
docs/apache-airflow-providers-databricks/installing-providers-from-sources.rst
rename to providers/databricks/docs/installing-providers-from-sources.rst
diff --git a/docs/integration-logos/databricks/Databricks.png 
b/providers/databricks/docs/integration-logos/Databricks.png
similarity index 100%
rename from docs/integration-logos/databricks/Databricks.png
rename to providers/databricks/docs/integration-logos/Databricks.png
diff --git a/docs/apache-airflow-providers-databricks/operators/copy_into.rst 
b/providers/databricks/docs/operators/copy_into.rst
similarity index 95%
rename from docs/apache-airflow-providers-databricks/operators/copy_into.rst
rename to providers/databricks/docs/operators/copy_into.rst
index 56eb20c6622..7b4ce43567a 100644
--- a/docs/apache-airflow-providers-databricks/operators/copy_into.rst
+++ b/providers/databricks/docs/operators/copy_into.rst
@@ -46,7 +46,7 @@ Importing CSV data
 
 An example usage of the DatabricksCopyIntoOperator to import CSV data into a 
table is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sql.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sql.py
     :language: python
     :start-after: [START howto_operator_databricks_copy_into]
     :end-before: [END howto_operator_databricks_copy_into]
diff --git a/docs/apache-airflow-providers-databricks/operators/index.rst 
b/providers/databricks/docs/operators/index.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/operators/index.rst
rename to providers/databricks/docs/operators/index.rst
diff --git a/docs/apache-airflow-providers-databricks/operators/jobs_create.rst 
b/providers/databricks/docs/operators/jobs_create.rst
similarity index 92%
rename from docs/apache-airflow-providers-databricks/operators/jobs_create.rst
rename to providers/databricks/docs/operators/jobs_create.rst
index 5a79c8244a9..ad4525196ef 100644
--- a/docs/apache-airflow-providers-databricks/operators/jobs_create.rst
+++ b/providers/databricks/docs/operators/jobs_create.rst
@@ -67,7 +67,7 @@ Specifying parameters as JSON
 
 An example usage of the DatabricksCreateJobsOperator is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_jobs_create_json]
     :end-before: [END howto_operator_databricks_jobs_create_json]
@@ -77,7 +77,7 @@ Using named parameters
 
 You can also use named parameters to initialize the operator and run the job.
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_jobs_create_named]
     :end-before: [END howto_operator_databricks_jobs_create_named]
@@ -88,7 +88,7 @@ Pairing with DatabricksRunNowOperator
 You can use the ``job_id`` that is returned by the 
DatabricksCreateJobsOperator in the
 return_value XCom as an argument to the DatabricksRunNowOperator to run the 
job.
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_run_now]
     :end-before: [END howto_operator_databricks_run_now]
diff --git a/docs/apache-airflow-providers-databricks/operators/notebook.rst 
b/providers/databricks/docs/operators/notebook.rst
similarity index 89%
rename from docs/apache-airflow-providers-databricks/operators/notebook.rst
rename to providers/databricks/docs/operators/notebook.rst
index bf7b04ca74c..ba9e11b97ea 100644
--- a/docs/apache-airflow-providers-databricks/operators/notebook.rst
+++ b/providers/databricks/docs/operators/notebook.rst
@@ -31,14 +31,14 @@ Examples
 
 Running a notebook in Databricks on a new cluster
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_notebook_new_cluster]
     :end-before: [END howto_operator_databricks_notebook_new_cluster]
 
 Running a notebook in Databricks on an existing cluster
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_notebook_existing_cluster]
     :end-before: [END howto_operator_databricks_notebook_existing_cluster]
diff --git 
a/docs/apache-airflow-providers-databricks/operators/repos_create.rst 
b/providers/databricks/docs/operators/repos_create.rst
similarity index 96%
rename from docs/apache-airflow-providers-databricks/operators/repos_create.rst
rename to providers/databricks/docs/operators/repos_create.rst
index 6b60eae2787..d9608bad91c 100644
--- a/docs/apache-airflow-providers-databricks/operators/repos_create.rst
+++ b/providers/databricks/docs/operators/repos_create.rst
@@ -63,7 +63,7 @@ Create a Databricks Repo
 
 An example usage of the DatabricksReposCreateOperator is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_repos.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_repos.py
     :language: python
     :start-after: [START howto_operator_databricks_repo_create]
     :end-before: [END howto_operator_databricks_repo_create]
diff --git 
a/docs/apache-airflow-providers-databricks/operators/repos_delete.rst 
b/providers/databricks/docs/operators/repos_delete.rst
similarity index 95%
rename from docs/apache-airflow-providers-databricks/operators/repos_delete.rst
rename to providers/databricks/docs/operators/repos_delete.rst
index 3186dd131df..09849c6ae0c 100644
--- a/docs/apache-airflow-providers-databricks/operators/repos_delete.rst
+++ b/providers/databricks/docs/operators/repos_delete.rst
@@ -55,7 +55,7 @@ Deleting Databricks Repo by specifying path
 
 An example usage of the DatabricksReposDeleteOperator is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_repos.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_repos.py
     :language: python
     :start-after: [START howto_operator_databricks_repo_delete]
     :end-before: [END howto_operator_databricks_repo_delete]
diff --git 
a/docs/apache-airflow-providers-databricks/operators/repos_update.rst 
b/providers/databricks/docs/operators/repos_update.rst
similarity index 96%
rename from docs/apache-airflow-providers-databricks/operators/repos_update.rst
rename to providers/databricks/docs/operators/repos_update.rst
index 6893ee0107e..90f700ff581 100644
--- a/docs/apache-airflow-providers-databricks/operators/repos_update.rst
+++ b/providers/databricks/docs/operators/repos_update.rst
@@ -60,7 +60,7 @@ Updating Databricks Repo by specifying path
 
 An example usage of the DatabricksReposUpdateOperator is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_repos.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_repos.py
     :language: python
     :start-after: [START howto_operator_databricks_repo_update]
     :end-before: [END howto_operator_databricks_repo_update]
diff --git a/docs/apache-airflow-providers-databricks/operators/run_now.rst 
b/providers/databricks/docs/operators/run_now.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/operators/run_now.rst
rename to providers/databricks/docs/operators/run_now.rst
diff --git a/docs/apache-airflow-providers-databricks/operators/sql.rst 
b/providers/databricks/docs/operators/sql.rst
similarity index 88%
rename from docs/apache-airflow-providers-databricks/operators/sql.rst
rename to providers/databricks/docs/operators/sql.rst
index 33acacae1c6..336bb3cc01b 100644
--- a/docs/apache-airflow-providers-databricks/operators/sql.rst
+++ b/providers/databricks/docs/operators/sql.rst
@@ -49,7 +49,7 @@ Selecting data
 
 An example usage of the DatabricksSqlOperator to select data from a table is 
as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sql.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sql.py
     :language: python
     :start-after: [START howto_operator_databricks_sql_select]
     :end-before: [END howto_operator_databricks_sql_select]
@@ -59,7 +59,7 @@ Selecting data into a file
 
 An example usage of the DatabricksSqlOperator to select data from a table and 
store in a file is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sql.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sql.py
     :language: python
     :start-after: [START howto_operator_databricks_sql_select_file]
     :end-before: [END howto_operator_databricks_sql_select_file]
@@ -69,7 +69,7 @@ Executing multiple statements
 
 An example usage of the DatabricksSqlOperator to perform multiple SQL 
statements is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sql.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sql.py
     :language: python
     :start-after: [START howto_operator_databricks_sql_multiple]
     :end-before: [END howto_operator_databricks_sql_multiple]
@@ -80,7 +80,7 @@ Executing multiple statements from a file
 
 An example usage of the DatabricksSqlOperator to perform statements from a 
file is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sql.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sql.py
     :language: python
     :start-after: [START howto_operator_databricks_sql_multiple_file]
     :end-before: [END howto_operator_databricks_sql_multiple_file]
@@ -107,7 +107,7 @@ Examples
 --------
 Configuring Databricks connection to be used with the Sensor.
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sensors.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sensors.py
     :language: python
     :dedent: 4
     :start-after: [START howto_sensor_databricks_connection_setup]
@@ -115,7 +115,7 @@ Configuring Databricks connection to be used with the 
Sensor.
 
 Poking the specific table with the SQL statement:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sensors.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sensors.py
     :language: python
     :dedent: 4
     :start-after: [START howto_sensor_databricks_sql]
@@ -154,7 +154,7 @@ Examples
 --------
 Configuring Databricks connection to be used with the Sensor.
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sensors.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sensors.py
     :language: python
     :dedent: 4
     :start-after: [START howto_sensor_databricks_connection_setup]
@@ -162,7 +162,7 @@ Configuring Databricks connection to be used with the 
Sensor.
 
 Poking the specific table for existence of data/partition:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_sensors.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_sensors.py
     :language: python
     :dedent: 4
     :start-after: [START howto_sensor_databricks_partition]
diff --git a/docs/apache-airflow-providers-databricks/operators/submit_run.rst 
b/providers/databricks/docs/operators/submit_run.rst
similarity index 96%
rename from docs/apache-airflow-providers-databricks/operators/submit_run.rst
rename to providers/databricks/docs/operators/submit_run.rst
index 7a8d13f646c..813af046127 100644
--- a/docs/apache-airflow-providers-databricks/operators/submit_run.rst
+++ b/providers/databricks/docs/operators/submit_run.rst
@@ -114,7 +114,7 @@ Specifying parameters as JSON
 
 An example usage of the DatabricksSubmitRunOperator is as follows:
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_json]
     :end-before: [END howto_operator_databricks_json]
@@ -124,7 +124,7 @@ Using named parameters
 
 You can also use named parameters to initialize the operator and run the job.
 
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_named]
     :end-before: [END howto_operator_databricks_named]
diff --git a/docs/apache-airflow-providers-databricks/operators/task.rst 
b/providers/databricks/docs/operators/task.rst
similarity index 88%
rename from docs/apache-airflow-providers-databricks/operators/task.rst
rename to providers/databricks/docs/operators/task.rst
index 47ceafe58ad..a1b83b3bb93 100644
--- a/docs/apache-airflow-providers-databricks/operators/task.rst
+++ b/providers/databricks/docs/operators/task.rst
@@ -33,21 +33,21 @@ Examples
 
 Running a notebook in Databricks using DatabricksTaskOperator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_task_notebook]
     :end-before: [END howto_operator_databricks_task_notebook]
 
 Running a SQL query in Databricks using DatabricksTaskOperator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_task_sql]
     :end-before: [END howto_operator_databricks_task_sql]
 
 Running a python file in Databricks in using DatabricksTaskOperator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks.py
     :language: python
     :start-after: [START howto_operator_databricks_task_python]
     :end-before: [END howto_operator_databricks_task_python]
diff --git a/docs/apache-airflow-providers-databricks/operators/workflow.rst 
b/providers/databricks/docs/operators/workflow.rst
similarity index 97%
rename from docs/apache-airflow-providers-databricks/operators/workflow.rst
rename to providers/databricks/docs/operators/workflow.rst
index b5c81050143..84f245ab20b 100644
--- a/docs/apache-airflow-providers-databricks/operators/workflow.rst
+++ b/providers/databricks/docs/operators/workflow.rst
@@ -45,7 +45,7 @@ Examples
 
 Example of what a DAG looks like with a DatabricksWorkflowTaskGroup
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-.. exampleinclude:: 
/../../providers/tests/system/databricks/example_databricks_workflow.py
+.. exampleinclude:: 
/../../providers/databricks/tests/system/databricks/example_databricks_workflow.py
     :language: python
     :start-after: [START howto_databricks_workflow_notebook]
     :end-before: [END howto_databricks_workflow_notebook]
diff --git a/docs/apache-airflow-providers-databricks/plugins/index.rst 
b/providers/databricks/docs/plugins/index.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/plugins/index.rst
rename to providers/databricks/docs/plugins/index.rst
diff --git a/docs/apache-airflow-providers-databricks/plugins/workflow.rst 
b/providers/databricks/docs/plugins/workflow.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/plugins/workflow.rst
rename to providers/databricks/docs/plugins/workflow.rst
diff --git a/docs/apache-airflow-providers-databricks/security.rst 
b/providers/databricks/docs/security.rst
similarity index 100%
rename from docs/apache-airflow-providers-databricks/security.rst
rename to providers/databricks/docs/security.rst
diff --git a/providers/src/airflow/providers/databricks/provider.yaml 
b/providers/databricks/provider.yaml
similarity index 84%
rename from providers/src/airflow/providers/databricks/provider.yaml
rename to providers/databricks/provider.yaml
index 0efe1474ec1..950c223a74f 100644
--- a/providers/src/airflow/providers/databricks/provider.yaml
+++ b/providers/databricks/provider.yaml
@@ -72,31 +72,6 @@ versions:
   - 1.0.1
   - 1.0.0
 
-dependencies:
-  - apache-airflow>=2.9.0
-  - apache-airflow-providers-common-sql>=1.20.0
-  - requests>=2.27.0,<3
-  - databricks-sql-connector>=3.0.0
-  - aiohttp>=3.9.2, <4
-  - mergedeep>=1.3.4
-  - pandas>=2.1.2,<2.2
-  - pyarrow>=14.0.1
-
-
-additional-extras:
-  # pip install apache-airflow-providers-databricks[sdk]
-  - name: sdk
-    description: Install Databricks SDK
-    dependencies:
-      - databricks-sdk==0.10.0
-  - name: azure-identity
-    description: Install Azure Identity client library
-    dependencies:
-      - azure-identity>=1.3.1
-
-devel-dependencies:
-  - deltalake>=0.12.0
-
 integrations:
   - integration-name: Databricks
     external-doc-url: https://databricks.com/
@@ -106,14 +81,13 @@ integrations:
       - /docs/apache-airflow-providers-databricks/operators/submit_run.rst
       - /docs/apache-airflow-providers-databricks/operators/run_now.rst
       - /docs/apache-airflow-providers-databricks/operators/task.rst
-    logo: /integration-logos/databricks/Databricks.png
+    logo: /docs/integration-logos/Databricks.png
     tags: [service]
   - integration-name: Databricks SQL
     external-doc-url: https://databricks.com/product/databricks-sql
     how-to-guide:
       - /docs/apache-airflow-providers-databricks/operators/sql.rst
       - /docs/apache-airflow-providers-databricks/operators/copy_into.rst
-    logo: /integration-logos/databricks/Databricks.png
     tags: [service]
   - integration-name: Databricks Repos
     external-doc-url: https://docs.databricks.com/repos/index.html
@@ -121,13 +95,11 @@ integrations:
       - /docs/apache-airflow-providers-databricks/operators/repos_create.rst
       - /docs/apache-airflow-providers-databricks/operators/repos_update.rst
       - /docs/apache-airflow-providers-databricks/operators/repos_delete.rst
-    logo: /integration-logos/databricks/Databricks.png
     tags: [service]
   - integration-name: Databricks Workflow
     external-doc-url: https://docs.databricks.com/en/workflows/index.html
     how-to-guide:
       - /docs/apache-airflow-providers-databricks/operators/workflow.rst
-    logo: /integration-logos/databricks/Databricks.png
     tags: [service]
 
 operators:
diff --git a/providers/databricks/pyproject.toml 
b/providers/databricks/pyproject.toml
new file mode 100644
index 00000000000..49152151e9c
--- /dev/null
+++ b/providers/databricks/pyproject.toml
@@ -0,0 +1,105 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+
+# IF YOU WANT TO MODIFY THIS FILE EXCEPT DEPENDENCIES, YOU SHOULD MODIFY THE 
TEMPLATE
+# `pyproject_TEMPLATE.toml.jinja2` IN the 
`dev/breeze/src/airflow_breeze/templates` DIRECTORY
+[build-system]
+requires = ["flit_core==3.10.1"]
+build-backend = "flit_core.buildapi"
+
+[project]
+name = "apache-airflow-providers-databricks"
+version = "7.0.0"
+description = "Provider package apache-airflow-providers-databricks for Apache 
Airflow"
+readme = "README.rst"
+authors = [
+    {name="Apache Software Foundation", email="[email protected]"},
+]
+maintainers = [
+    {name="Apache Software Foundation", email="[email protected]"},
+]
+keywords = [ "airflow-provider", "databricks", "airflow", "integration" ]
+classifiers = [
+    "Development Status :: 5 - Production/Stable",
+    "Environment :: Console",
+    "Environment :: Web Environment",
+    "Intended Audience :: Developers",
+    "Intended Audience :: System Administrators",
+    "Framework :: Apache Airflow",
+    "Framework :: Apache Airflow :: Provider",
+    "License :: OSI Approved :: Apache Software License",
+    "Programming Language :: Python :: 3.9",
+    "Programming Language :: Python :: 3.10",
+    "Programming Language :: Python :: 3.11",
+    "Programming Language :: Python :: 3.12",
+    "Topic :: System :: Monitoring",
+]
+requires-python = "~=3.9"
+
+# The dependencies should be modified in place in the generated file
+# Any change in the dependencies is preserved when the file is regenerated
+dependencies = [
+    "apache-airflow>=2.9.0",
+    "apache-airflow-providers-common-sql>=1.20.0",
+    "requests>=2.27.0,<3",
+    "databricks-sql-connector>=3.0.0",
+    "aiohttp>=3.9.2, <4",
+    "mergedeep>=1.3.4",
+    "pandas>=2.1.2,<2.2",
+    "pyarrow>=14.0.1",
+]
+
+# The optional dependencies should be modified in place in the generated file
+# Any change in the dependencies is preserved when the file is regenerated
+[project.optional-dependencies]
+# pip install apache-airflow-providers-databricks[sdk]
+"sdk" = [
+    "databricks-sdk==0.10.0",
+]
+"azure-identity" = [
+    "azure-identity>=1.3.1",
+]
+
+# The dependency groups should be modified in place in the generated file
+# Any change in the dependencies is preserved when the file is regenerated
+[dependency-groups]
+dev = [
+    "deltalake>=0.12.0",
+]
+
+[project.urls]
+"Documentation" = 
"https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0";
+"Changelog" = 
"https://airflow.apache.org/docs/apache-airflow-providers-databricks/7.0.0/changelog.html";
+"Bug Tracker" = "https://github.com/apache/airflow/issues";
+"Source Code" = "https://github.com/apache/airflow";
+"Slack Chat" = "https://s.apache.org/airflow-slack";
+"Twitter" = "https://x.com/ApacheAirflow";
+"YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/";
+
+[project.entry-points."apache_airflow_provider"]
+provider_info = 
"airflow.providers.databricks.get_provider_info:get_provider_info"
+
+[project.entry-points."airflow.plugins"]
+databricks_workflow = 
"airflow.providers.databricks.plugins.databricks_workflow:DatabricksWorkflowPlugin"
+
+[tool.flit.module]
+name = "airflow.providers.databricks"
+
+[tool.pytest.ini_options]
+ignore = "tests/system/"
diff --git a/providers/databricks/src/airflow/providers/databricks/LICENSE 
b/providers/databricks/src/airflow/providers/databricks/LICENSE
new file mode 100644
index 00000000000..11069edd790
--- /dev/null
+++ b/providers/databricks/src/airflow/providers/databricks/LICENSE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/providers/src/airflow/providers/databricks/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/__init__.py
rename to providers/databricks/src/airflow/providers/databricks/__init__.py
diff --git a/providers/src/airflow/providers/databricks/exceptions.py 
b/providers/databricks/src/airflow/providers/databricks/exceptions.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/exceptions.py
rename to providers/databricks/src/airflow/providers/databricks/exceptions.py
diff --git 
a/providers/databricks/src/airflow/providers/databricks/get_provider_info.py 
b/providers/databricks/src/airflow/providers/databricks/get_provider_info.py
new file mode 100644
index 00000000000..d18af30cdb2
--- /dev/null
+++ b/providers/databricks/src/airflow/providers/databricks/get_provider_info.py
@@ -0,0 +1,193 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
+#
+# IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
+# `get_provider_info_TEMPLATE.py.jinja2` IN the 
`dev/breeze/src/airflow_breeze/templates` DIRECTORY
+
+
+def get_provider_info():
+    return {
+        "package-name": "apache-airflow-providers-databricks",
+        "name": "Databricks",
+        "description": "`Databricks <https://databricks.com/>`__\n",
+        "state": "ready",
+        "source-date-epoch": 1734533222,
+        "versions": [
+            "7.0.0",
+            "6.13.0",
+            "6.12.0",
+            "6.11.0",
+            "6.10.0",
+            "6.9.0",
+            "6.8.0",
+            "6.7.0",
+            "6.6.0",
+            "6.5.0",
+            "6.4.0",
+            "6.3.0",
+            "6.2.0",
+            "6.1.0",
+            "6.0.0",
+            "5.0.1",
+            "5.0.0",
+            "4.7.0",
+            "4.6.0",
+            "4.5.0",
+            "4.4.0",
+            "4.3.3",
+            "4.3.2",
+            "4.3.1",
+            "4.3.0",
+            "4.2.0",
+            "4.1.0",
+            "4.0.1",
+            "4.0.0",
+            "3.4.0",
+            "3.3.0",
+            "3.2.0",
+            "3.1.0",
+            "3.0.0",
+            "2.7.0",
+            "2.6.0",
+            "2.5.0",
+            "2.4.0",
+            "2.3.0",
+            "2.2.0",
+            "2.1.0",
+            "2.0.2",
+            "2.0.1",
+            "2.0.0",
+            "1.0.1",
+            "1.0.0",
+        ],
+        "integrations": [
+            {
+                "integration-name": "Databricks",
+                "external-doc-url": "https://databricks.com/";,
+                "how-to-guide": [
+                    
"/docs/apache-airflow-providers-databricks/operators/jobs_create.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/notebook.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/submit_run.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/run_now.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/task.rst",
+                ],
+                "logo": "/docs/integration-logos/Databricks.png",
+                "tags": ["service"],
+            },
+            {
+                "integration-name": "Databricks SQL",
+                "external-doc-url": 
"https://databricks.com/product/databricks-sql";,
+                "how-to-guide": [
+                    
"/docs/apache-airflow-providers-databricks/operators/sql.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/copy_into.rst",
+                ],
+                "tags": ["service"],
+            },
+            {
+                "integration-name": "Databricks Repos",
+                "external-doc-url": 
"https://docs.databricks.com/repos/index.html";,
+                "how-to-guide": [
+                    
"/docs/apache-airflow-providers-databricks/operators/repos_create.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/repos_update.rst",
+                    
"/docs/apache-airflow-providers-databricks/operators/repos_delete.rst",
+                ],
+                "tags": ["service"],
+            },
+            {
+                "integration-name": "Databricks Workflow",
+                "external-doc-url": 
"https://docs.databricks.com/en/workflows/index.html";,
+                "how-to-guide": 
["/docs/apache-airflow-providers-databricks/operators/workflow.rst"],
+                "tags": ["service"],
+            },
+        ],
+        "operators": [
+            {
+                "integration-name": "Databricks",
+                "python-modules": 
["airflow.providers.databricks.operators.databricks"],
+            },
+            {
+                "integration-name": "Databricks SQL",
+                "python-modules": 
["airflow.providers.databricks.operators.databricks_sql"],
+            },
+            {
+                "integration-name": "Databricks Repos",
+                "python-modules": 
["airflow.providers.databricks.operators.databricks_repos"],
+            },
+            {
+                "integration-name": "Databricks Workflow",
+                "python-modules": 
["airflow.providers.databricks.operators.databricks_workflow"],
+            },
+        ],
+        "hooks": [
+            {
+                "integration-name": "Databricks",
+                "python-modules": [
+                    "airflow.providers.databricks.hooks.databricks",
+                    "airflow.providers.databricks.hooks.databricks_base",
+                ],
+            },
+            {
+                "integration-name": "Databricks SQL",
+                "python-modules": 
["airflow.providers.databricks.hooks.databricks_sql"],
+            },
+        ],
+        "triggers": [
+            {
+                "integration-name": "Databricks",
+                "python-modules": 
["airflow.providers.databricks.triggers.databricks"],
+            }
+        ],
+        "sensors": [
+            {
+                "integration-name": "Databricks",
+                "python-modules": [
+                    "airflow.providers.databricks.sensors.databricks_sql",
+                    
"airflow.providers.databricks.sensors.databricks_partition",
+                ],
+            }
+        ],
+        "connection-types": [
+            {
+                "hook-class-name": 
"airflow.providers.databricks.hooks.databricks.DatabricksHook",
+                "connection-type": "databricks",
+            }
+        ],
+        "plugins": [
+            {
+                "name": "databricks_workflow",
+                "plugin-class": 
"airflow.providers.databricks.plugins.databricks_workflow.DatabricksWorkflowPlugin",
+            }
+        ],
+        "extra-links": 
["airflow.providers.databricks.operators.databricks.DatabricksJobRunLink"],
+        "dependencies": [
+            "apache-airflow>=2.9.0",
+            "apache-airflow-providers-common-sql>=1.20.0",
+            "requests>=2.27.0,<3",
+            "databricks-sql-connector>=3.0.0",
+            "aiohttp>=3.9.2, <4",
+            "mergedeep>=1.3.4",
+            "pandas>=2.1.2,<2.2",
+            "pyarrow>=14.0.1",
+        ],
+        "optional-dependencies": {
+            "sdk": ["databricks-sdk==0.10.0"],
+            "azure-identity": ["azure-identity>=1.3.1"],
+        },
+        "devel-dependencies": ["deltalake>=0.12.0"],
+    }
diff --git a/providers/src/airflow/providers/databricks/hooks/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/hooks/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/hooks/__init__.py
rename to 
providers/databricks/src/airflow/providers/databricks/hooks/__init__.py
diff --git a/providers/src/airflow/providers/databricks/hooks/databricks.py 
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/hooks/databricks.py
rename to 
providers/databricks/src/airflow/providers/databricks/hooks/databricks.py
diff --git 
a/providers/src/airflow/providers/databricks/hooks/databricks_base.py 
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/hooks/databricks_base.py
rename to 
providers/databricks/src/airflow/providers/databricks/hooks/databricks_base.py
diff --git a/providers/src/airflow/providers/databricks/hooks/databricks_sql.py 
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
similarity index 99%
rename from providers/src/airflow/providers/databricks/hooks/databricks_sql.py
rename to 
providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
index b0f52798d9f..7dc15fdf591 100644
--- a/providers/src/airflow/providers/databricks/hooks/databricks_sql.py
+++ 
b/providers/databricks/src/airflow/providers/databricks/hooks/databricks_sql.py
@@ -31,14 +31,13 @@ from typing import (
     overload,
 )
 
-from databricks import sql  # type: ignore[attr-defined]
-from databricks.sql.types import Row
-
 from airflow.exceptions import AirflowException
 from airflow.models.connection import Connection as AirflowConnection
 from airflow.providers.common.sql.hooks.sql import DbApiHook, 
return_single_query_results
 from airflow.providers.databricks.exceptions import 
DatabricksSqlExecutionError, DatabricksSqlExecutionTimeout
 from airflow.providers.databricks.hooks.databricks_base import 
BaseDatabricksHook
+from databricks import sql  # type: ignore[attr-defined]
+from databricks.sql.types import Row
 
 if TYPE_CHECKING:
     from databricks.sql.client import Connection
diff --git a/providers/src/airflow/providers/databricks/operators/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/operators/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/operators/__init__.py
rename to 
providers/databricks/src/airflow/providers/databricks/operators/__init__.py
diff --git a/providers/src/airflow/providers/databricks/operators/databricks.py 
b/providers/databricks/src/airflow/providers/databricks/operators/databricks.py
similarity index 99%
rename from providers/src/airflow/providers/databricks/operators/databricks.py
rename to 
providers/databricks/src/airflow/providers/databricks/operators/databricks.py
index 3c121c49a9e..75035b13fc3 100644
--- a/providers/src/airflow/providers/databricks/operators/databricks.py
+++ 
b/providers/databricks/src/airflow/providers/databricks/operators/databricks.py
@@ -1114,7 +1114,7 @@ class DatabricksTaskBaseOperator(BaseOperator, ABC):
             raise ValueError("Must specify either existing_cluster_id, 
new_cluster or environments.")
         return run_json
 
-    def _launch_job(self, context: Context | None = None) -> int:
+    def _launch_job(self, context: Context | None = None) -> int | None:
         """Launch the job on Databricks."""
         run_json = self._get_run_json()
         self.databricks_run_id = self._hook.submit_run(run_json)
diff --git 
a/providers/src/airflow/providers/databricks/operators/databricks_repos.py 
b/providers/databricks/src/airflow/providers/databricks/operators/databricks_repos.py
similarity index 100%
rename from 
providers/src/airflow/providers/databricks/operators/databricks_repos.py
rename to 
providers/databricks/src/airflow/providers/databricks/operators/databricks_repos.py
diff --git 
a/providers/src/airflow/providers/databricks/operators/databricks_sql.py 
b/providers/databricks/src/airflow/providers/databricks/operators/databricks_sql.py
similarity index 99%
rename from 
providers/src/airflow/providers/databricks/operators/databricks_sql.py
rename to 
providers/databricks/src/airflow/providers/databricks/operators/databricks_sql.py
index a4cb062c9b8..06a4c2f4456 100644
--- a/providers/src/airflow/providers/databricks/operators/databricks_sql.py
+++ 
b/providers/databricks/src/airflow/providers/databricks/operators/databricks_sql.py
@@ -24,12 +24,11 @@ import json
 from collections.abc import Sequence
 from typing import TYPE_CHECKING, Any, ClassVar
 
-from databricks.sql.utils import ParamEscaper
-
 from airflow.exceptions import AirflowException
 from airflow.models import BaseOperator
 from airflow.providers.common.sql.operators.sql import SQLExecuteQueryOperator
 from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
+from databricks.sql.utils import ParamEscaper
 
 if TYPE_CHECKING:
     from airflow.utils.context import Context
diff --git 
a/providers/src/airflow/providers/databricks/operators/databricks_workflow.py 
b/providers/databricks/src/airflow/providers/databricks/operators/databricks_workflow.py
similarity index 100%
rename from 
providers/src/airflow/providers/databricks/operators/databricks_workflow.py
rename to 
providers/databricks/src/airflow/providers/databricks/operators/databricks_workflow.py
diff --git a/providers/src/airflow/providers/databricks/plugins/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/plugins/__init__.py
similarity index 100%
copy from providers/src/airflow/providers/databricks/plugins/__init__.py
copy to 
providers/databricks/src/airflow/providers/databricks/plugins/__init__.py
diff --git 
a/providers/src/airflow/providers/databricks/plugins/databricks_workflow.py 
b/providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
similarity index 100%
rename from 
providers/src/airflow/providers/databricks/plugins/databricks_workflow.py
rename to 
providers/databricks/src/airflow/providers/databricks/plugins/databricks_workflow.py
diff --git a/providers/src/airflow/providers/databricks/sensors/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/sensors/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/sensors/__init__.py
rename to 
providers/databricks/src/airflow/providers/databricks/sensors/__init__.py
diff --git 
a/providers/src/airflow/providers/databricks/sensors/databricks_partition.py 
b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py
similarity index 99%
rename from 
providers/src/airflow/providers/databricks/sensors/databricks_partition.py
rename to 
providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py
index df9f8cf3fe1..b3a10a9e946 100644
--- a/providers/src/airflow/providers/databricks/sensors/databricks_partition.py
+++ 
b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_partition.py
@@ -25,12 +25,11 @@ from datetime import datetime
 from functools import cached_property
 from typing import TYPE_CHECKING, Any, Callable
 
-from databricks.sql.utils import ParamEscaper
-
 from airflow.exceptions import AirflowException
 from airflow.providers.common.sql.hooks.sql import fetch_all_handler
 from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook
 from airflow.sensors.base import BaseSensorOperator
+from databricks.sql.utils import ParamEscaper
 
 if TYPE_CHECKING:
     try:
diff --git 
a/providers/src/airflow/providers/databricks/sensors/databricks_sql.py 
b/providers/databricks/src/airflow/providers/databricks/sensors/databricks_sql.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/sensors/databricks_sql.py
rename to 
providers/databricks/src/airflow/providers/databricks/sensors/databricks_sql.py
diff --git a/providers/src/airflow/providers/databricks/triggers/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/triggers/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/triggers/__init__.py
rename to 
providers/databricks/src/airflow/providers/databricks/triggers/__init__.py
diff --git a/providers/src/airflow/providers/databricks/triggers/databricks.py 
b/providers/databricks/src/airflow/providers/databricks/triggers/databricks.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/triggers/databricks.py
rename to 
providers/databricks/src/airflow/providers/databricks/triggers/databricks.py
diff --git a/providers/src/airflow/providers/databricks/utils/__init__.py 
b/providers/databricks/src/airflow/providers/databricks/utils/__init__.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/utils/__init__.py
rename to 
providers/databricks/src/airflow/providers/databricks/utils/__init__.py
diff --git a/providers/src/airflow/providers/databricks/utils/databricks.py 
b/providers/databricks/src/airflow/providers/databricks/utils/databricks.py
similarity index 100%
rename from providers/src/airflow/providers/databricks/utils/databricks.py
rename to 
providers/databricks/src/airflow/providers/databricks/utils/databricks.py
diff --git a/providers/tests/databricks/hooks/test_databricks_base.py 
b/providers/databricks/tests/conftest.py
similarity index 62%
copy from providers/tests/databricks/hooks/test_databricks_base.py
copy to providers/databricks/tests/conftest.py
index fdc019c1b5c..068fe6bbf5a 100644
--- a/providers/tests/databricks/hooks/test_databricks_base.py
+++ b/providers/databricks/tests/conftest.py
@@ -1,4 +1,3 @@
-#
 # Licensed to the Apache Software Foundation (ASF) under one
 # or more contributor license agreements.  See the NOTICE file
 # distributed with this work for additional information
@@ -17,17 +16,17 @@
 # under the License.
 from __future__ import annotations
 
-import pytest
+import pathlib
 
-from airflow.providers.databricks.hooks.databricks_base import 
BaseDatabricksHook
+import pytest
 
-DEFAULT_CONN_ID = "databricks_default"
+pytest_plugins = "tests_common.pytest_plugin"
 
 
-class TestBaseDatabricksHook:
-    def test_init_exception(self):
-        """
-        Tests handling incorrect parameters passed to ``__init__``
-        """
-        with pytest.raises(ValueError, match="Retry limit must be greater than 
or equal to 1"):
-            BaseDatabricksHook(databricks_conn_id=DEFAULT_CONN_ID, 
retry_limit=0)
[email protected](tryfirst=True)
+def pytest_configure(config: pytest.Config) -> None:
+    deprecations_ignore_path = 
pathlib.Path(__file__).parent.joinpath("deprecations_ignore.yml")
+    dep_path = [deprecations_ignore_path] if deprecations_ignore_path.exists() 
else []
+    config.inicfg["airflow_deprecations_ignore"] = (
+        config.inicfg.get("airflow_deprecations_ignore", []) + dep_path  # 
type: ignore[assignment,operator]
+    )
diff --git a/providers/src/airflow/providers/databricks/plugins/__init__.py 
b/providers/databricks/tests/provider_tests/__init__.py
similarity index 90%
rename from providers/src/airflow/providers/databricks/plugins/__init__.py
rename to providers/databricks/tests/provider_tests/__init__.py
index 13a83393a91..e8fd2285643 100644
--- a/providers/src/airflow/providers/databricks/plugins/__init__.py
+++ b/providers/databricks/tests/provider_tests/__init__.py
@@ -14,3 +14,4 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
+__path__ = __import__("pkgutil").extend_path(__path__, __name__)  # type: 
ignore
diff --git a/providers/tests/databricks/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/__init__.py
similarity index 100%
rename from providers/tests/databricks/__init__.py
rename to providers/databricks/tests/provider_tests/databricks/__init__.py
diff --git a/providers/tests/databricks/hooks/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/hooks/__init__.py
similarity index 100%
rename from providers/tests/databricks/hooks/__init__.py
rename to providers/databricks/tests/provider_tests/databricks/hooks/__init__.py
diff --git a/providers/tests/databricks/hooks/test_databricks.py 
b/providers/databricks/tests/provider_tests/databricks/hooks/test_databricks.py
similarity index 100%
rename from providers/tests/databricks/hooks/test_databricks.py
rename to 
providers/databricks/tests/provider_tests/databricks/hooks/test_databricks.py
diff --git 
a/providers/tests/databricks/hooks/test_databricks_azure_workload_identity.py 
b/providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_azure_workload_identity.py
similarity index 100%
rename from 
providers/tests/databricks/hooks/test_databricks_azure_workload_identity.py
rename to 
providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_azure_workload_identity.py
diff --git 
a/providers/tests/databricks/hooks/test_databricks_azure_workload_identity_async.py
 
b/providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_azure_workload_identity_async.py
similarity index 100%
rename from 
providers/tests/databricks/hooks/test_databricks_azure_workload_identity_async.py
rename to 
providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_azure_workload_identity_async.py
diff --git a/providers/tests/databricks/hooks/test_databricks_base.py 
b/providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_base.py
similarity index 100%
rename from providers/tests/databricks/hooks/test_databricks_base.py
rename to 
providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_base.py
diff --git a/providers/tests/databricks/hooks/test_databricks_sql.py 
b/providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_sql.py
similarity index 100%
rename from providers/tests/databricks/hooks/test_databricks_sql.py
rename to 
providers/databricks/tests/provider_tests/databricks/hooks/test_databricks_sql.py
diff --git a/providers/tests/databricks/operators/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/operators/__init__.py
similarity index 100%
rename from providers/tests/databricks/operators/__init__.py
rename to 
providers/databricks/tests/provider_tests/databricks/operators/__init__.py
diff --git a/providers/tests/databricks/operators/test_databricks.py 
b/providers/databricks/tests/provider_tests/databricks/operators/test_databricks.py
similarity index 99%
rename from providers/tests/databricks/operators/test_databricks.py
rename to 
providers/databricks/tests/provider_tests/databricks/operators/test_databricks.py
index ac5e556e48d..5ff686ea0aa 100644
--- a/providers/tests/databricks/operators/test_databricks.py
+++ 
b/providers/databricks/tests/provider_tests/databricks/operators/test_databricks.py
@@ -2208,7 +2208,7 @@ class TestDatabricksNotebookOperator:
         exception_message = "Both new_cluster and existing_cluster_id are set. 
Only one should be set."
         assert str(exc_info.value) == exception_message
 
-    def test_both_new_and_existing_cluster_unset(self, caplog):
+    def test_both_new_and_existing_cluster_unset(self):
         operator = DatabricksNotebookOperator(
             task_id="test_task",
             notebook_path="test_path",
diff --git a/providers/tests/databricks/operators/test_databricks_copy.py 
b/providers/databricks/tests/provider_tests/databricks/operators/test_databricks_copy.py
similarity index 100%
rename from providers/tests/databricks/operators/test_databricks_copy.py
rename to 
providers/databricks/tests/provider_tests/databricks/operators/test_databricks_copy.py
diff --git a/providers/tests/databricks/operators/test_databricks_repos.py 
b/providers/databricks/tests/provider_tests/databricks/operators/test_databricks_repos.py
similarity index 100%
rename from providers/tests/databricks/operators/test_databricks_repos.py
rename to 
providers/databricks/tests/provider_tests/databricks/operators/test_databricks_repos.py
diff --git a/providers/tests/databricks/operators/test_databricks_sql.py 
b/providers/databricks/tests/provider_tests/databricks/operators/test_databricks_sql.py
similarity index 100%
rename from providers/tests/databricks/operators/test_databricks_sql.py
rename to 
providers/databricks/tests/provider_tests/databricks/operators/test_databricks_sql.py
diff --git a/providers/tests/databricks/operators/test_databricks_workflow.py 
b/providers/databricks/tests/provider_tests/databricks/operators/test_databricks_workflow.py
similarity index 100%
rename from providers/tests/databricks/operators/test_databricks_workflow.py
rename to 
providers/databricks/tests/provider_tests/databricks/operators/test_databricks_workflow.py
diff --git a/providers/tests/databricks/plugins/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/plugins/__init__.py
similarity index 100%
rename from providers/tests/databricks/plugins/__init__.py
rename to 
providers/databricks/tests/provider_tests/databricks/plugins/__init__.py
diff --git a/providers/tests/databricks/plugins/test_databricks_workflow.py 
b/providers/databricks/tests/provider_tests/databricks/plugins/test_databricks_workflow.py
similarity index 100%
rename from providers/tests/databricks/plugins/test_databricks_workflow.py
rename to 
providers/databricks/tests/provider_tests/databricks/plugins/test_databricks_workflow.py
diff --git a/providers/tests/databricks/sensors/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/sensors/__init__.py
similarity index 100%
rename from providers/tests/databricks/sensors/__init__.py
rename to 
providers/databricks/tests/provider_tests/databricks/sensors/__init__.py
diff --git a/providers/tests/databricks/sensors/test_databricks_partition.py 
b/providers/databricks/tests/provider_tests/databricks/sensors/test_databricks_partition.py
similarity index 100%
rename from providers/tests/databricks/sensors/test_databricks_partition.py
rename to 
providers/databricks/tests/provider_tests/databricks/sensors/test_databricks_partition.py
diff --git a/providers/tests/databricks/sensors/test_databricks_sql.py 
b/providers/databricks/tests/provider_tests/databricks/sensors/test_databricks_sql.py
similarity index 100%
rename from providers/tests/databricks/sensors/test_databricks_sql.py
rename to 
providers/databricks/tests/provider_tests/databricks/sensors/test_databricks_sql.py
diff --git a/providers/tests/databricks/test_exceptions.py 
b/providers/databricks/tests/provider_tests/databricks/test_exceptions.py
similarity index 100%
rename from providers/tests/databricks/test_exceptions.py
rename to 
providers/databricks/tests/provider_tests/databricks/test_exceptions.py
diff --git a/providers/tests/databricks/triggers/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/triggers/__init__.py
similarity index 100%
rename from providers/tests/databricks/triggers/__init__.py
rename to 
providers/databricks/tests/provider_tests/databricks/triggers/__init__.py
diff --git a/providers/tests/databricks/triggers/test_databricks.py 
b/providers/databricks/tests/provider_tests/databricks/triggers/test_databricks.py
similarity index 100%
rename from providers/tests/databricks/triggers/test_databricks.py
rename to 
providers/databricks/tests/provider_tests/databricks/triggers/test_databricks.py
diff --git a/providers/tests/databricks/utils/__init__.py 
b/providers/databricks/tests/provider_tests/databricks/utils/__init__.py
similarity index 100%
rename from providers/tests/databricks/utils/__init__.py
rename to providers/databricks/tests/provider_tests/databricks/utils/__init__.py
diff --git a/providers/tests/databricks/utils/test_databricks.py 
b/providers/databricks/tests/provider_tests/databricks/utils/test_databricks.py
similarity index 100%
rename from providers/tests/databricks/utils/test_databricks.py
rename to 
providers/databricks/tests/provider_tests/databricks/utils/test_databricks.py
diff --git a/providers/tests/system/databricks/__init__.py 
b/providers/databricks/tests/system/databricks/__init__.py
similarity index 100%
rename from providers/tests/system/databricks/__init__.py
rename to providers/databricks/tests/system/databricks/__init__.py
diff --git a/providers/tests/system/databricks/example_databricks.py 
b/providers/databricks/tests/system/databricks/example_databricks.py
similarity index 100%
rename from providers/tests/system/databricks/example_databricks.py
rename to providers/databricks/tests/system/databricks/example_databricks.py
diff --git a/providers/tests/system/databricks/example_databricks_repos.py 
b/providers/databricks/tests/system/databricks/example_databricks_repos.py
similarity index 100%
rename from providers/tests/system/databricks/example_databricks_repos.py
rename to 
providers/databricks/tests/system/databricks/example_databricks_repos.py
diff --git a/providers/tests/system/databricks/example_databricks_sensors.py 
b/providers/databricks/tests/system/databricks/example_databricks_sensors.py
similarity index 100%
rename from providers/tests/system/databricks/example_databricks_sensors.py
rename to 
providers/databricks/tests/system/databricks/example_databricks_sensors.py
diff --git a/providers/tests/system/databricks/example_databricks_sql.py 
b/providers/databricks/tests/system/databricks/example_databricks_sql.py
similarity index 100%
rename from providers/tests/system/databricks/example_databricks_sql.py
rename to providers/databricks/tests/system/databricks/example_databricks_sql.py
diff --git a/providers/tests/system/databricks/example_databricks_workflow.py 
b/providers/databricks/tests/system/databricks/example_databricks_workflow.py
similarity index 100%
rename from providers/tests/system/databricks/example_databricks_workflow.py
rename to 
providers/databricks/tests/system/databricks/example_databricks_workflow.py
diff --git a/pyproject.toml b/pyproject.toml
index ea6face2d29..1314f1e3db0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -667,6 +667,7 @@ dev = [
   "apache-airflow-providers-common-io",
   "apache-airflow-providers-common-sql",
   "apache-airflow-providers-docker",
+  "apache-airflow-providers-databricks",
   "apache-airflow-providers-datadog",
   "apache-airflow-providers-dbt-cloud",
   "apache-airflow-providers-dingding",
@@ -763,6 +764,7 @@ apache-airflow-providers-cohere = { workspace = true }
 apache-airflow-providers-common-compat = { workspace = true }
 apache-airflow-providers-common-io = { workspace = true }
 apache-airflow-providers-common-sql = { workspace = true }
+apache-airflow-providers-databricks = { workspace = true }
 apache-airflow-providers-datadog = { workspace = true }
 apache-airflow-providers-dbt-cloud = { workspace = true }
 apache-airflow-providers-dingding = { workspace = true }
@@ -858,6 +860,7 @@ members = [
     "providers/common/compat",
     "providers/common/io",
     "providers/common/sql",
+    "providers/databricks",
     "providers/datadog",
     "providers/dbt/cloud",
     "providers/dingding",
diff --git a/scripts/ci/docker-compose/remove-sources.yml 
b/scripts/ci/docker-compose/remove-sources.yml
index 17e4e53627a..08eb51ac546 100644
--- a/scripts/ci/docker-compose/remove-sources.yml
+++ b/scripts/ci/docker-compose/remove-sources.yml
@@ -60,6 +60,7 @@ services:
       - ../../../empty:/opt/airflow/providers/common/compat/src
       - ../../../empty:/opt/airflow/providers/common/io/src
       - ../../../empty:/opt/airflow/providers/common/sql/src
+      - ../../../empty:/opt/airflow/providers/databricks/src
       - ../../../empty:/opt/airflow/providers/datadog/src
       - ../../../empty:/opt/airflow/providers/dbt/cloud/src
       - ../../../empty:/opt/airflow/providers/dingding/src
diff --git a/scripts/ci/docker-compose/tests-sources.yml 
b/scripts/ci/docker-compose/tests-sources.yml
index 43ff5ad79e4..ae66e0abfb0 100644
--- a/scripts/ci/docker-compose/tests-sources.yml
+++ b/scripts/ci/docker-compose/tests-sources.yml
@@ -67,6 +67,7 @@ services:
       - 
../../../providers/common/compat/tests:/opt/airflow/providers/common/compat/tests
       - 
../../../providers/common/io/tests:/opt/airflow/providers/common/io/tests
       - 
../../../providers/common/sql/tests:/opt/airflow/providers/common/sql/tests
+      - 
../../../providers/databricks/tests:/opt/airflow/providers/databricks/tests
       - ../../../providers/datadog/tests:/opt/airflow/providers/datadog/tests
       - 
../../../providers/dbt/cloud/tests:/opt/airflow/providers/dbt/cloud/tests
       - ../../../providers/dingding/tests:/opt/airflow/providers/dingding/tests

Reply via email to