This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 32fc4be502 Migrate Google example DAG mysql_to_gcs to new design 
AIP-47 (#24540)
32fc4be502 is described below

commit 32fc4be502b3d1b60681e38b7b6947445987ec19
Author: Chenglong Yan <[email protected]>
AuthorDate: Mon Jun 20 06:56:08 2022 +0800

    Migrate Google example DAG mysql_to_gcs to new design AIP-47 (#24540)
    
    related: #22447, #22430
---
 .../cloud/example_dags/example_mysql_to_gcs.py     | 40 -----------
 .../operators/transfer/mysql_to_gcs.rst            |  2 +-
 .../cloud/transfers/test_mysql_to_gcs_system.py    | 81 ----------------------
 .../google/cloud/gcs/example_mysql_to_gcs.py       | 74 ++++++++++++++++++++
 4 files changed, 75 insertions(+), 122 deletions(-)

diff --git 
a/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py 
b/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
deleted file mode 100644
index c8c798bc89..0000000000
--- a/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-import os
-from datetime import datetime
-
-from airflow import models
-from airflow.providers.google.cloud.transfers.mysql_to_gcs import 
MySQLToGCSOperator
-
-GCS_BUCKET = os.environ.get("GCP_GCS_BUCKET", "example-airflow-mysql-gcs")
-FILENAME = 'test_file'
-
-SQL_QUERY = "SELECT * from test_table"
-
-with models.DAG(
-    'example_mysql_to_gcs',
-    schedule_interval='@once',
-    start_date=datetime(2021, 1, 1),
-    catchup=False,
-    tags=['example'],
-) as dag:
-    # [START howto_operator_mysql_to_gcs]
-    upload = MySQLToGCSOperator(
-        task_id='mysql_to_gcs', sql=SQL_QUERY, bucket=GCS_BUCKET, 
filename=FILENAME, export_format='csv'
-    )
-    # [END howto_operator_mysql_to_gcs]
diff --git 
a/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst 
b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst
index 6323c1eb03..65f69448b8 100644
--- a/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst
+++ b/docs/apache-airflow-providers-google/operators/transfer/mysql_to_gcs.rst
@@ -38,7 +38,7 @@ When you use this operator, you can optionally compress the 
data being uploaded
 
 Below is an example of using this operator to upload data to GCS.
 
-.. exampleinclude:: 
/../../airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
+.. exampleinclude:: 
/../../tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
     :language: python
     :dedent: 0
     :start-after: [START howto_operator_mysql_to_gcs]
diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs_system.py 
b/tests/providers/google/cloud/transfers/test_mysql_to_gcs_system.py
deleted file mode 100644
index 08aeac929c..0000000000
--- a/tests/providers/google/cloud/transfers/test_mysql_to_gcs_system.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-import pytest
-from psycopg2 import OperationalError, ProgrammingError
-
-from airflow.providers.google.cloud.example_dags.example_mysql_to_gcs import 
GCS_BUCKET
-from airflow.providers.mysql.hooks.mysql import MySqlHook
-from tests.providers.google.cloud.utils.gcp_authenticator import GCP_GCS_KEY
-from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, 
GoogleSystemTest, provide_gcp_context
-
-CREATE_QUERY = """
-CREATE TABLE test_table
-(
-    id int auto_increment primary key,
-    params json
-);
-"""
-
-LOAD_QUERY = """
-INSERT INTO test_table (id, params)
-VALUES
-   (
-      1, '{ "customer": "Lily Bush", "items": {"product": "Diaper","qty": 24}}'
-   ),
-   (
-      2, '{ "customer": "Josh William", "items": {"product": "Toy Car","qty": 
1}}'
-   ),
-   (
-      3, '{ "customer": "Mary Clark", "items": {"product": "Toy Train","qty": 
2}}'
-   );
-"""
-DELETE_QUERY = "DROP TABLE test_table;"
-
-
[email protected]("mysql")
[email protected]_file(GCP_GCS_KEY)
-class MySQLToGCSSystemTest(GoogleSystemTest):
-    @staticmethod
-    def init_db():
-        try:
-            hook = MySqlHook()
-            hook.run(CREATE_QUERY)
-            hook.run(LOAD_QUERY)
-        except (OperationalError, ProgrammingError):
-            pass
-
-    @staticmethod
-    def drop_db():
-        hook = MySqlHook()
-        hook.run(DELETE_QUERY)
-
-    @provide_gcp_context(GCP_GCS_KEY)
-    def setUp(self):
-        super().setUp()
-        self.create_gcs_bucket(GCS_BUCKET)
-        self.init_db()
-
-    @provide_gcp_context(GCP_GCS_KEY)
-    def test_run_example_dag(self):
-        self.run_dag('example_mysql_to_gcs', CLOUD_DAG_FOLDER)
-
-    @provide_gcp_context(GCP_GCS_KEY)
-    def tearDown(self):
-        self.delete_gcs_bucket(GCS_BUCKET)
-        self.drop_db()
-        super().tearDown()
diff --git a/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py 
b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
new file mode 100644
index 0000000000..a891a72a3b
--- /dev/null
+++ b/tests/system/providers/google/cloud/gcs/example_mysql_to_gcs.py
@@ -0,0 +1,74 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import os
+from datetime import datetime
+
+from airflow import models
+from airflow.providers.google.cloud.operators.gcs import 
GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.google.cloud.transfers.mysql_to_gcs import 
MySQLToGCSOperator
+from airflow.utils.trigger_rule import TriggerRule
+
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
+DAG_ID = "example_mysql_to_gcs"
+
+BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
+FILENAME = 'test_file'
+
+SQL_QUERY = "SELECT * from test_table"
+
+with models.DAG(
+    DAG_ID,
+    schedule_interval='@once',
+    start_date=datetime(2021, 1, 1),
+    catchup=False,
+    tags=['example', 'mysql'],
+) as dag:
+    create_bucket = GCSCreateBucketOperator(
+        task_id="create_bucket", bucket_name=BUCKET_NAME, project_id=PROJECT_ID
+    )
+
+    # [START howto_operator_mysql_to_gcs]
+    upload_mysql_to_gcs = MySQLToGCSOperator(
+        task_id='mysql_to_gcs', sql=SQL_QUERY, bucket=BUCKET_NAME, 
filename=FILENAME, export_format='csv'
+    )
+    # [END howto_operator_mysql_to_gcs]
+
+    delete_bucket = GCSDeleteBucketOperator(
+        task_id="delete_bucket", bucket_name=BUCKET_NAME, 
trigger_rule=TriggerRule.ALL_DONE
+    )
+
+    (
+        # TEST SETUP
+        create_bucket
+        # TEST BODY
+        >> upload_mysql_to_gcs
+        # TEST TEARDOWN
+        >> delete_bucket
+    )
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)

Reply via email to