RosterIn commented on a change in pull request #6578: [DEPENDS ON 
#6575][DEPENDS ON #6577][AIRFLOW-5907] Add S3 to MySql Operator
URL: https://github.com/apache/airflow/pull/6578#discussion_r346816678
 
 

 ##########
 File path: airflow/operators/s3_to_mysql.py
 ##########
 @@ -0,0 +1,65 @@
+from airflow.hooks.mysql_hook import MySqlHook
+from airflow.models import BaseOperator
+from airflow.providers.aws.hooks.s3 import S3Hook
+from airflow.utils.decorators import apply_defaults
+
+
+class S3ToMySqlTransfer(BaseOperator):
+    """
+    Loads a file from S3 into a MySQL table.
+
+    :param s3_source_key: The path to the file (S3 key) that will be loaded 
into MySQL.
+    :type s3_source_key: str
+    :param mysql_table: The MySQL table into where the data will be sent.
+    :type mysql_table: str
+    :param delimiter: The delimiter for the file.
+    :type delimiter: str
+    :param header_rows: This parameter defines the number of header rows in 
the input file.
+    :type header_rows: int
+    :param aws_conn_id: The S3 connection that contains the credentials to the 
S3 Bucket.
+    :type aws_conn_id: str
+    :param mysql_conn_id: The MySQL connection that contains the credentials 
to the MySQL data base.
+    :type mysql_conn_id: str
+    """
+
+    template_fields = ('s3_source_key',)
+    template_ext = ()
+    ui_color = '#f4a460'
+
+    @apply_defaults
+    def __init__(self,
+                 s3_source_key,
+                 mysql_table,
+                 *args,
+                 delimiter=',',
+                 header_rows=1,
+                 aws_conn_id='s3_default',
+                 mysql_conn_id='mysql_default',
+                 **kwargs):
+        super(S3ToMySqlTransfer, self).__init__(*args, **kwargs)
+        self.s3_source_key = s3_source_key
+        self.mysql_table = mysql_table
+        self.delimiter = delimiter
+        self.header_rows = header_rows
+        self.aws_conn_id = aws_conn_id
+        self.mysql_conn_id = mysql_conn_id
+
+    def execute(self, context):
+        """
+        Executes the transfer operation from S3 to MySQL.
+
+        :param context: The context that is being provided when executing.
+        :type context: dict
+        """
+        self.log.info('Loading %s to MySql table %s...', self.s3_source_key, 
self.mysql_table)
+
+        s3_hook = S3Hook(aws_conn_id=self.aws_conn_id)
+        file = s3_hook.download_file(key=self.s3_source_key)
 
 Review comment:
   You download the file to local disk which is expected.
   Do you think maybe you can also add an attempt to delete the file once the 
load it completed (If faild skip like nothing happened)?
   There may be GBs of data downloaded to the local disk. Even though it's tmp 
folder and it's being automatically cleaned up. It doesn't hurt to try to 
delete the dirt?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to