This is an automated email from the ASF dual-hosted git repository.

jedcunningham pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new f5ad26d  Fixup string concatenations (#19099)
f5ad26d is described below

commit f5ad26dcdd7bcb724992528dce71056965b94d26
Author: blag <b...@users.noreply.github.com>
AuthorDate: Thu Oct 21 15:56:51 2021 -0700

    Fixup string concatenations (#19099)
---
 .pre-commit-config.yaml                            |   2 +-
 airflow/models/dagcode.py                          |   7 +-
 airflow/models/taskinstance.py                     |   4 +-
 airflow/operators/google_api_to_s3_transfer.py     |   8 +-
 airflow/providers/alibaba/cloud/sensors/oss_key.py |   4 +-
 airflow/providers/amazon/aws/hooks/s3.py           |   8 +-
 airflow/providers/amazon/aws/sensors/s3_key.py     |   4 +-
 airflow/providers/databricks/hooks/databricks.py   |   2 +-
 .../example_dags/example_docker_copy_data.py       |   2 +-
 airflow/providers/yandex/hooks/yandex.py           |   2 +-
 airflow/sensors/smart_sensor.py                    |   2 +-
 dev/provider_packages/prepare_provider_packages.py |   2 +-
 .../pre_commit_check_extras_have_providers.py      |   2 +-
 .../amazon/aws/utils/eks_test_constants.py         |  61 ++++++------
 .../apache/hive/operators/test_hive_stats.py       |  16 +--
 .../apache/spark/hooks/test_spark_submit.py        | 109 +++++++++++----------
 16 files changed, 121 insertions(+), 114 deletions(-)

diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index e652f95..72a5ec6 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -563,7 +563,7 @@ repos:
         additional_dependencies:
           - 'PyYAML==5.3.1'
           - 'jsonschema==3.2.0'
-          - 'tabulate==0.8.7'
+          - 'tabulate==0.8.8'
           - 'jsonpath-ng==1.5.3'
           - 'rich==10.9.0'
       - id: mermaid
diff --git a/airflow/models/dagcode.py b/airflow/models/dagcode.py
index fa5f7e1..7322ba9 100644
--- a/airflow/models/dagcode.py
+++ b/airflow/models/dagcode.py
@@ -94,10 +94,11 @@ class DagCode(Base):
             hashes_to_filelocs = {DagCode.dag_fileloc_hash(fileloc): fileloc 
for fileloc in filelocs}
             message = ""
             for fileloc in conflicting_filelocs:
+                filename = 
hashes_to_filelocs[DagCode.dag_fileloc_hash(fileloc)]
                 message += (
-                    "Filename '{}' causes a hash collision in the "
-                    + "database with '{}'. Please rename the file."
-                
).format(hashes_to_filelocs[DagCode.dag_fileloc_hash(fileloc)], fileloc)
+                    f"Filename '{filename}' causes a hash collision in the "
+                    f"database with '{fileloc}'. Please rename the file."
+                )
             raise AirflowException(message)
 
         existing_filelocs = {dag_code.fileloc for dag_code in 
existing_orm_dag_codes}
diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py
index c3484ee..b9745cc 100644
--- a/airflow/models/taskinstance.py
+++ b/airflow/models/taskinstance.py
@@ -1267,8 +1267,8 @@ class TaskInstance(Base, LoggingMixin):
     def _log_state(self, lead_msg: str = ''):
         self.log.info(
             '%sMarking task as %s.'
-            + ' dag_id=%s, task_id=%s,'
-            + ' execution_date=%s, start_date=%s, end_date=%s',
+            ' dag_id=%s, task_id=%s,'
+            ' execution_date=%s, start_date=%s, end_date=%s',
             lead_msg,
             self.state.upper(),
             self.dag_id,
diff --git a/airflow/operators/google_api_to_s3_transfer.py 
b/airflow/operators/google_api_to_s3_transfer.py
index b08ba75..9566cdd 100644
--- a/airflow/operators/google_api_to_s3_transfer.py
+++ b/airflow/operators/google_api_to_s3_transfer.py
@@ -40,10 +40,10 @@ class GoogleApiToS3Transfer(GoogleApiToS3Operator):
 
     def __init__(self, **kwargs):
         warnings.warn(
-            """This class is deprecated.
-            Please use
-            `airflow.providers.amazon.aws.transfers."""
-            + "google_api_to_s3_transfer.GoogleApiToS3Operator`.",
+            "This class is deprecated. "
+            "Please use "
+            "`airflow.providers.amazon.aws.transfers."
+            "google_api_to_s3_transfer.GoogleApiToS3Operator`.",
             DeprecationWarning,
             stacklevel=3,
         )
diff --git a/airflow/providers/alibaba/cloud/sensors/oss_key.py 
b/airflow/providers/alibaba/cloud/sensors/oss_key.py
index 22bd8e9..9949aff 100644
--- a/airflow/providers/alibaba/cloud/sensors/oss_key.py
+++ b/airflow/providers/alibaba/cloud/sensors/oss_key.py
@@ -76,8 +76,8 @@ class OSSKeySensor(BaseSensorOperator):
             if parsed_url.scheme != '' or parsed_url.netloc != '':
                 raise AirflowException(
                     'If bucket_name is provided, bucket_key'
-                    + ' should be relative path from root'
-                    + ' level, rather than a full oss:// url'
+                    ' should be relative path from root'
+                    ' level, rather than a full oss:// url'
                 )
 
         self.log.info('Poking for key : oss://%s/%s', self.bucket_name, 
self.bucket_key)
diff --git a/airflow/providers/amazon/aws/hooks/s3.py 
b/airflow/providers/amazon/aws/hooks/s3.py
index 84f3794..4a2a15b 100644
--- a/airflow/providers/amazon/aws/hooks/s3.py
+++ b/airflow/providers/amazon/aws/hooks/s3.py
@@ -716,8 +716,8 @@ class S3Hook(AwsBaseHook):
             if parsed_url.scheme != '' or parsed_url.netloc != '':
                 raise AirflowException(
                     'If dest_bucket_name is provided, '
-                    + 'dest_bucket_key should be relative path '
-                    + 'from root level, rather than a full s3:// url'
+                    'dest_bucket_key should be relative path '
+                    'from root level, rather than a full s3:// url'
                 )
 
         if source_bucket_name is None:
@@ -727,8 +727,8 @@ class S3Hook(AwsBaseHook):
             if parsed_url.scheme != '' or parsed_url.netloc != '':
                 raise AirflowException(
                     'If source_bucket_name is provided, '
-                    + 'source_bucket_key should be relative path '
-                    + 'from root level, rather than a full s3:// url'
+                    'source_bucket_key should be relative path '
+                    'from root level, rather than a full s3:// url'
                 )
 
         copy_source = {'Bucket': source_bucket_name, 'Key': source_bucket_key, 
'VersionId': source_version_id}
diff --git a/airflow/providers/amazon/aws/sensors/s3_key.py 
b/airflow/providers/amazon/aws/sensors/s3_key.py
index 9a7af08..57cc0df 100644
--- a/airflow/providers/amazon/aws/sensors/s3_key.py
+++ b/airflow/providers/amazon/aws/sensors/s3_key.py
@@ -89,8 +89,8 @@ class S3KeySensor(BaseSensorOperator):
             if parsed_url.scheme != '' or parsed_url.netloc != '':
                 raise AirflowException(
                     'If bucket_name is provided, bucket_key'
-                    + ' should be relative path from root'
-                    + ' level, rather than a full s3:// url'
+                    ' should be relative path from root'
+                    ' level, rather than a full s3:// url'
                 )
 
         self.log.info('Poking for key : s3://%s/%s', self.bucket_name, 
self.bucket_key)
diff --git a/airflow/providers/databricks/hooks/databricks.py 
b/airflow/providers/databricks/hooks/databricks.py
index 5e7fc35..5e843a4 100644
--- a/airflow/providers/databricks/hooks/databricks.py
+++ b/airflow/providers/databricks/hooks/databricks.py
@@ -216,7 +216,7 @@ class DatabricksHook(BaseHook):
 
             if attempt_num == self.retry_limit:
                 raise AirflowException(
-                    ('API requests to Databricks failed {} times. ' + 'Giving 
up.').format(self.retry_limit)
+                    f'API requests to Databricks failed {self.retry_limit} 
times. Giving up.'
                 )
 
             attempt_num += 1
diff --git a/airflow/providers/docker/example_dags/example_docker_copy_data.py 
b/airflow/providers/docker/example_dags/example_docker_copy_data.py
index f03c4be..5ce78d0 100644
--- a/airflow/providers/docker/example_dags/example_docker_copy_data.py
+++ b/airflow/providers/docker/example_dags/example_docker_copy_data.py
@@ -75,7 +75,7 @@ t_move = DockerOperator(
         "/bin/bash",
         "-c",
         "/bin/sleep 30; "
-        "/bin/mv {{ params.source_location }}/" + f"{t_view.output}" + " {{ 
params.target_location }};"
+        "/bin/mv {{ params.source_location }}/" + str(t_view.output) + " {{ 
params.target_location }};"
         "/bin/echo '{{ params.target_location }}/" + f"{t_view.output}';",
     ],
     task_id="move_data",
diff --git a/airflow/providers/yandex/hooks/yandex.py 
b/airflow/providers/yandex/hooks/yandex.py
index ee1ae0d..06049e5 100644
--- a/airflow/providers/yandex/hooks/yandex.py
+++ b/airflow/providers/yandex/hooks/yandex.py
@@ -119,7 +119,7 @@ class YandexCloudBaseHook(BaseHook):
         if not (service_account_json or oauth_token or 
service_account_json_path):
             raise AirflowException(
                 'No credentials are found in connection. Specify either 
service account '
-                + 'authentication JSON or user OAuth token in Yandex.Cloud 
connection'
+                'authentication JSON or user OAuth token in Yandex.Cloud 
connection'
             )
         if service_account_json_path:
             with open(service_account_json_path) as infile:
diff --git a/airflow/sensors/smart_sensor.py b/airflow/sensors/smart_sensor.py
index 1e8c827..e042c61 100644
--- a/airflow/sensors/smart_sensor.py
+++ b/airflow/sensors/smart_sensor.py
@@ -125,7 +125,7 @@ class SensorWork:
         log_id = "-".join(
             [si.dag_id, si.task_id, 
si.execution_date.strftime("%Y_%m_%dT%H_%M_%S_%f"), str(si.try_number)]
         )
-        logger = logging.getLogger('airflow.task' + '.' + log_id)
+        logger = logging.getLogger(f'airflow.task.{log_id}')
 
         if len(logger.handlers) == 0:
             handler = self.create_new_task_handler()
diff --git a/dev/provider_packages/prepare_provider_packages.py 
b/dev/provider_packages/prepare_provider_packages.py
index a56b529..c3c7554 100755
--- a/dev/provider_packages/prepare_provider_packages.py
+++ b/dev/provider_packages/prepare_provider_packages.py
@@ -1238,7 +1238,7 @@ def 
validate_provider_info_with_runtime_schema(provider_info: Dict[str, Any]) ->
         console.print("[red]Provider info not validated against runtime 
schema[/]")
         raise Exception(
             "Error when validating schema. The schema must be compatible with "
-            + "airflow/provider_info.schema.json.",
+            "airflow/provider_info.schema.json.",
             ex,
         )
 
diff --git a/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py 
b/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py
index 20088b7..7c1b7eb 100755
--- a/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py
+++ b/scripts/ci/pre_commit/pre_commit_check_extras_have_providers.py
@@ -71,7 +71,7 @@ def check_all_providers_are_listed_in_setup_py() -> None:
         if provider_name not in ALL_PROVIDERS:
             errors.append(
                 f"The provider {provider_name} is missing in setup.py "
-                + "[bold]PROVIDERS_REQUIREMENTS[/]: [red]NOK[/]"
+                "[bold]PROVIDERS_REQUIREMENTS[/]: [red]NOK[/]"
             )
 
 
diff --git a/tests/providers/amazon/aws/utils/eks_test_constants.py 
b/tests/providers/amazon/aws/utils/eks_test_constants.py
index 1a5985c..4624cd4 100644
--- a/tests/providers/amazon/aws/utils/eks_test_constants.py
+++ b/tests/providers/amazon/aws/utils/eks_test_constants.py
@@ -187,10 +187,10 @@ class PageCount:
 
 
 FARGATE_PROFILE_UUID_PATTERN: str = (
-    
"(?P<fargate_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
+    
r"(?P<fargate_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
 )
 NODEGROUP_UUID_PATTERN: str = (
-    
"(?P<nodegroup_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
+    
r"(?P<nodegroup_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
 )
 
 
@@ -198,38 +198,41 @@ class RegExTemplates:
     """The compiled RegEx patterns used in testing."""
 
     CLUSTER_ARN: Pattern = re.compile(
-        "arn:"
-        + "(?P<partition>.+):"
-        + "eks:"
-        + "(?P<region>[-0-9a-zA-Z]+):"
-        + "(?P<account_id>[0-9]{12}):"
-        + "cluster/"
-        + "(?P<cluster_name>.+)"
+        r"""arn:
+        (?P<partition>.+):
+        eks:
+        (?P<region>[-0-9a-zA-Z]+):
+        (?P<account_id>[0-9]{12}):
+        cluster/
+        (?P<cluster_name>.+)""",
+        re.VERBOSE,
     )
     FARGATE_PROFILE_ARN: Pattern = re.compile(
-        "arn:"
-        + "(?P<partition>.+):"
-        + "eks:"
-        + "(?P<region>[-0-9a-zA-Z]+):"
-        + "(?P<account_id>[0-9]{12}):"
-        + "fargateprofile/"
-        + "(?P<cluster_name>.+)/"
-        + "(?P<fargate_name>.+)/"
-        + FARGATE_PROFILE_UUID_PATTERN
+        r"""arn:
+        (?P<partition>.+):
+        eks:
+        (?P<region>[-0-9a-zA-Z]+):
+        (?P<account_id>[0-9]{12}):
+        fargateprofile/
+        (?P<cluster_name>.+)/
+        (?P<fargate_name>.+)/"""
+        + FARGATE_PROFILE_UUID_PATTERN,
+        re.VERBOSE,
     )
     NODEGROUP_ARN: Pattern = re.compile(
-        "arn:"
-        + "(?P<partition>.+):"
-        + "eks:"
-        + "(?P<region>[-0-9a-zA-Z]+):"
-        + "(?P<account_id>[0-9]{12}):"
-        + "nodegroup/"
-        + "(?P<cluster_name>.+)/"
-        + "(?P<nodegroup_name>.+)/"
-        + NODEGROUP_UUID_PATTERN
+        r"""arn:
+        (?P<partition>.+):
+        eks:
+        (?P<region>[-0-9a-zA-Z]+):
+        (?P<account_id>[0-9]{12}):
+        nodegroup/
+        (?P<cluster_name>.+)/
+        (?P<nodegroup_name>.+)/"""
+        + NODEGROUP_UUID_PATTERN,
+        re.VERBOSE,
     )
-    NODEGROUP_ASG_NAME_PATTERN: Pattern = re.compile("eks-" + 
NODEGROUP_UUID_PATTERN)
-    NODEGROUP_SECURITY_GROUP_NAME_PATTERN: Pattern = re.compile("sg-" + 
"([-0-9a-z]{17})")
+    NODEGROUP_ASG_NAME_PATTERN: Pattern = 
re.compile(f"eks-{NODEGROUP_UUID_PATTERN}")
+    NODEGROUP_SECURITY_GROUP_NAME_PATTERN: Pattern = 
re.compile(r"sg-([-0-9a-z]{17})")
 
 
 class MethodNames:
diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py 
b/tests/providers/apache/hive/operators/test_hive_stats.py
index 02dbdd8..b51420b 100644
--- a/tests/providers/apache/hive/operators/test_hive_stats.py
+++ b/tests/providers/apache/hive/operators/test_hive_stats.py
@@ -308,17 +308,19 @@ class 
TestHiveStatsCollectionOperator(TestHiveEnvironment):
                 op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, 
ignore_ti_state=True)
 
         select_count_query = (
-            "SELECT COUNT(*) AS __count FROM airflow."
-            + "static_babynames_partitioned WHERE ds = '2015-01-01';"
+            "SELECT COUNT(*) AS __count "
+            "FROM airflow.static_babynames_partitioned "
+            "WHERE ds = '2015-01-01';"
         )
         mock_presto_hook.get_first.assert_called_with(hql=select_count_query)
 
         expected_stats_select_query = (
-            "SELECT 1 FROM hive_stats WHERE table_name='airflow."
-            + "static_babynames_partitioned' AND "
-            + "partition_repr='{\"ds\": \"2015-01-01\"}' AND "
-            + "dttm='2015-01-01T00:00:00+00:00' "
-            + "LIMIT 1;"
+            "SELECT 1 "
+            "FROM hive_stats "
+            "WHERE table_name='airflow.static_babynames_partitioned' "
+            "  AND partition_repr='{\"ds\": \"2015-01-01\"}' "
+            "  AND dttm='2015-01-01T00:00:00+00:00' "
+            "LIMIT 1;"
         )
 
         raw_stats_select_query = 
mock_mysql_hook.get_records.call_args_list[0][0][0]
diff --git a/tests/providers/apache/spark/hooks/test_spark_submit.py 
b/tests/providers/apache/spark/hooks/test_spark_submit.py
index 070719a..be47bfb 100644
--- a/tests/providers/apache/spark/hooks/test_spark_submit.py
+++ b/tests/providers/apache/spark/hooks/test_spark_submit.py
@@ -88,9 +88,11 @@ class TestSparkSubmitHook(unittest.TestCase):
                 conn_id='spark_k8s_cluster',
                 conn_type='spark',
                 host='k8s://https://k8s-master',
-                extra='{"spark-home": "/opt/spark", '
-                + '"deploy-mode": "cluster", '
-                + '"namespace": "mynamespace"}',
+                extra=(
+                    '{"spark-home": "/opt/spark", '
+                    ' "deploy-mode": "cluster", '
+                    ' "namespace": "mynamespace"}'
+                ),
             )
         )
         db.merge_conn(
@@ -120,7 +122,7 @@ class TestSparkSubmitHook(unittest.TestCase):
                 conn_id='spark_binary_and_home_set',
                 conn_type='spark',
                 host='yarn',
-                extra='{"spark-home": "/path/to/spark_home", ' + 
'"spark-binary": "custom-spark-submit"}',
+                extra='{"spark-home": "/path/to/spark_home", "spark-binary": 
"custom-spark-submit"}',
             )
         )
         db.merge_conn(
@@ -628,11 +630,11 @@ class TestSparkSubmitHook(unittest.TestCase):
         log_lines = [
             'SPARK_MAJOR_VERSION is set to 2, using Spark2',
             'WARN NativeCodeLoader: Unable to load native-hadoop library for 
your '
-            + 'platform... using builtin-java classes where applicable',
+            'platform... using builtin-java classes where applicable',
             'WARN DomainSocketFactory: The short-circuit local reads feature 
cannot '
             'be used because libhadoop cannot be loaded.',
             'INFO Client: Requesting a new application from cluster with 10 
NodeManagers',
-            'INFO Client: Submitting application 
application_1486558679801_1820 ' + 'to ResourceManager',
+            'INFO Client: Submitting application 
application_1486558679801_1820 to ResourceManager',
         ]
         # When
         hook._process_spark_submit_log(log_lines)
@@ -646,25 +648,25 @@ class TestSparkSubmitHook(unittest.TestCase):
         hook = SparkSubmitHook(conn_id='spark_k8s_cluster')
         log_lines = [
             'INFO  LoggingPodStatusWatcherImpl:54 - State changed, new state:'
-            + 'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
-            + 'namespace: default'
-            + 'labels: spark-app-selector -> 
spark-465b868ada474bda82ccb84ab2747fcd,'
-            + 'spark-role -> driver'
-            + 'pod uid: ba9c61f6-205f-11e8-b65f-d48564c88e42'
-            + 'creation time: 2018-03-05T10:26:55Z'
-            + 'service account name: spark'
-            + 'volumes: spark-init-properties, download-jars-volume,'
-            + 'download-files-volume, spark-token-2vmlm'
-            + 'node name: N/A'
-            + 'start time: N/A'
-            + 'container images: N/A'
-            + 'phase: Pending'
-            + 'status: []'
-            + '2018-03-05 11:26:56 INFO  LoggingPodStatusWatcherImpl:54 - 
State changed,'
-            + ' new state:'
-            + 'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
-            + 'namespace: default'
-            + 'Exit code: 999'
+            'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
+            'namespace: default'
+            'labels: spark-app-selector -> 
spark-465b868ada474bda82ccb84ab2747fcd,'
+            'spark-role -> driver'
+            'pod uid: ba9c61f6-205f-11e8-b65f-d48564c88e42'
+            'creation time: 2018-03-05T10:26:55Z'
+            'service account name: spark'
+            'volumes: spark-init-properties, download-jars-volume,'
+            'download-files-volume, spark-token-2vmlm'
+            'node name: N/A'
+            'start time: N/A'
+            'container images: N/A'
+            'phase: Pending'
+            'status: []'
+            '2018-03-05 11:26:56 INFO  LoggingPodStatusWatcherImpl:54 - State 
changed,'
+            ' new state:'
+            'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
+            'namespace: default'
+            'Exit code: 999'
         ]
 
         # When
@@ -693,7 +695,7 @@ class TestSparkSubmitHook(unittest.TestCase):
             '17/11/28 11:14:15 INFO RestSubmissionClient: Submitting a request 
'
             'to launch an application in spark://spark-standalone-master:6066',
             '17/11/28 11:14:15 INFO RestSubmissionClient: Submission 
successfully '
-            + 'created as driver-20171128111415-0001. Polling submission 
state...',
+            'created as driver-20171128111415-0001. Polling submission 
state...',
         ]
         # When
         hook._process_spark_submit_log(log_lines)
@@ -707,9 +709,8 @@ class TestSparkSubmitHook(unittest.TestCase):
         hook = SparkSubmitHook(conn_id='spark_standalone_cluster')
         log_lines = [
             'Submitting a request for the status of submission '
-            + 'driver-20171128111415-0001 in 
spark://spark-standalone-master:6066',
-            '17/11/28 11:15:37 INFO RestSubmissionClient: Server responded 
with '
-            + 'SubmissionStatusResponse:',
+            'driver-20171128111415-0001 in 
spark://spark-standalone-master:6066',
+            '17/11/28 11:15:37 INFO RestSubmissionClient: Server responded 
with SubmissionStatusResponse:',
             '{',
             '"action" : "SubmissionStatusResponse",',
             '"driverState" : "RUNNING",',
@@ -738,12 +739,12 @@ class TestSparkSubmitHook(unittest.TestCase):
         log_lines = [
             'SPARK_MAJOR_VERSION is set to 2, using Spark2',
             'WARN NativeCodeLoader: Unable to load native-hadoop library for 
your '
-            + 'platform... using builtin-java classes where applicable',
+            'platform... using builtin-java classes where applicable',
             'WARN DomainSocketFactory: The short-circuit local reads feature 
cannot '
-            + 'be used because libhadoop cannot be loaded.',
+            'be used because libhadoop cannot be loaded.',
             'INFO Client: Requesting a new application from cluster with 10 '
-            + 'NodeManagerapplication_1486558679801_1820s',
-            'INFO Client: Submitting application 
application_1486558679801_1820 ' + 'to ResourceManager',
+            'NodeManagerapplication_1486558679801_1820s',
+            'INFO Client: Submitting application 
application_1486558679801_1820 to ResourceManager',
         ]
         env = {"PATH": "hadoop/bin"}
         hook = SparkSubmitHook(conn_id='spark_yarn_cluster', env_vars=env)
@@ -792,9 +793,9 @@ class TestSparkSubmitHook(unittest.TestCase):
         log_lines = [
             'Running Spark using the REST application submission protocol.',
             '17/11/28 11:14:15 INFO RestSubmissionClient: Submitting a request 
'
-            + 'to launch an application in 
spark://spark-standalone-master:6066',
+            'to launch an application in spark://spark-standalone-master:6066',
             '17/11/28 11:14:15 INFO RestSubmissionClient: Submission 
successfully '
-            + 'created as driver-20171128111415-0001. Polling submission 
state...',
+            'created as driver-20171128111415-0001. Polling submission 
state...',
         ]
         hook = SparkSubmitHook(conn_id='spark_standalone_cluster')
         hook._process_spark_submit_log(log_lines)
@@ -821,25 +822,25 @@ class TestSparkSubmitHook(unittest.TestCase):
         hook = SparkSubmitHook(conn_id='spark_k8s_cluster')
         log_lines = [
             'INFO  LoggingPodStatusWatcherImpl:54 - State changed, new state:'
-            + 'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
-            + 'namespace: default'
-            + 'labels: spark-app-selector -> 
spark-465b868ada474bda82ccb84ab2747fcd,'
-            + 'spark-role -> driver'
-            + 'pod uid: ba9c61f6-205f-11e8-b65f-d48564c88e42'
-            + 'creation time: 2018-03-05T10:26:55Z'
-            + 'service account name: spark'
-            + 'volumes: spark-init-properties, download-jars-volume,'
-            + 'download-files-volume, spark-token-2vmlm'
-            + 'node name: N/A'
-            + 'start time: N/A'
-            + 'container images: N/A'
-            + 'phase: Pending'
-            + 'status: []'
-            + '2018-03-05 11:26:56 INFO  LoggingPodStatusWatcherImpl:54 - 
State changed,'
-            + ' new state:'
-            + 'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
-            + 'namespace: default'
-            + 'Exit code: 0'
+            'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
+            'namespace: default'
+            'labels: spark-app-selector -> 
spark-465b868ada474bda82ccb84ab2747fcd,'
+            'spark-role -> driver'
+            'pod uid: ba9c61f6-205f-11e8-b65f-d48564c88e42'
+            'creation time: 2018-03-05T10:26:55Z'
+            'service account name: spark'
+            'volumes: spark-init-properties, download-jars-volume,'
+            'download-files-volume, spark-token-2vmlm'
+            'node name: N/A'
+            'start time: N/A'
+            'container images: N/A'
+            'phase: Pending'
+            'status: []'
+            '2018-03-05 11:26:56 INFO  LoggingPodStatusWatcherImpl:54 - State 
changed,'
+            ' new state:'
+            'pod name: spark-pi-edf2ace37be7353a958b38733a12f8e6-driver'
+            'namespace: default'
+            'Exit code: 0'
         ]
         hook._process_spark_submit_log(log_lines)
         hook.submit()

Reply via email to