This is an automated email from the ASF dual-hosted git repository.

onikolas pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 5f8ceb2ff3 shorten poke intervals on systems tests (#29183)
5f8ceb2ff3 is described below

commit 5f8ceb2ff372894909c814eb45f68dde1581b766
Author: Raphaƫl Vandon <114772123+vandonr-...@users.noreply.github.com>
AuthorDate: Thu Jan 26 13:34:52 2023 -0800

    shorten poke intervals on systems tests (#29183)
    
    shorten sleep time on athena sys test. 2 minutes faster.
    
    set shorter polling times of fast ops in batch sys test - 4min faster
    
    shorter polling on cloudformation systest - 1min30s faster
    
    shorter polling time on lambda sys test - 1 min faster
    
    shorter wait on quicksight
    
    shorter waits for sagemaker test
    
    shorten wait time for setp_functions, 50s faster
    
    shorter wait on dms. This one is important so that we don't miss the 
running status
    
    set sleep times out of doc
---
 tests/system/providers/amazon/aws/example_athena.py         | 5 +++++
 tests/system/providers/amazon/aws/example_batch.py          | 4 ++++
 tests/system/providers/amazon/aws/example_cloudformation.py | 2 ++
 tests/system/providers/amazon/aws/example_dms.py            | 2 ++
 tests/system/providers/amazon/aws/example_lambda.py         | 1 +
 tests/system/providers/amazon/aws/example_quicksight.py     | 1 +
 tests/system/providers/amazon/aws/example_sagemaker.py      | 2 ++
 tests/system/providers/amazon/aws/example_step_functions.py | 1 +
 8 files changed, 18 insertions(+)

diff --git a/tests/system/providers/amazon/aws/example_athena.py 
b/tests/system/providers/amazon/aws/example_athena.py
index e4bc8190f8..3d2487d7b5 100644
--- a/tests/system/providers/amazon/aws/example_athena.py
+++ b/tests/system/providers/amazon/aws/example_athena.py
@@ -102,6 +102,7 @@ with DAG(
         query=query_create_database,
         database=athena_database,
         output_location=f"s3://{s3_bucket}/",
+        sleep_time=1,
     )
 
     create_table = AthenaOperator(
@@ -109,6 +110,7 @@ with DAG(
         query=query_create_table,
         database=athena_database,
         output_location=f"s3://{s3_bucket}/",
+        sleep_time=1,
     )
 
     # [START howto_operator_athena]
@@ -119,6 +121,7 @@ with DAG(
         output_location=f"s3://{s3_bucket}/",
     )
     # [END howto_operator_athena]
+    read_table.sleep_time = 1
 
     # [START howto_sensor_athena]
     await_query = AthenaSensor(
@@ -133,6 +136,7 @@ with DAG(
         database=athena_database,
         output_location=f"s3://{s3_bucket}/",
         trigger_rule=TriggerRule.ALL_DONE,
+        sleep_time=1,
     )
 
     drop_database = AthenaOperator(
@@ -141,6 +145,7 @@ with DAG(
         database=athena_database,
         output_location=f"s3://{s3_bucket}/",
         trigger_rule=TriggerRule.ALL_DONE,
+        sleep_time=1,
     )
 
     delete_s3_bucket = S3DeleteBucketOperator(
diff --git a/tests/system/providers/amazon/aws/example_batch.py 
b/tests/system/providers/amazon/aws/example_batch.py
index 474fa6be8c..1937127fcd 100644
--- a/tests/system/providers/amazon/aws/example_batch.py
+++ b/tests/system/providers/amazon/aws/example_batch.py
@@ -178,6 +178,7 @@ with DAG(
         compute_environment=batch_job_compute_environment_name,
     )
     # [END howto_sensor_batch_compute_environment]
+    wait_for_compute_environment_valid.poke_interval = 1
 
     # [START howto_sensor_batch_job_queue]
     wait_for_job_queue_valid = BatchJobQueueSensor(
@@ -185,6 +186,7 @@ with DAG(
         job_queue=batch_job_queue_name,
     )
     # [END howto_sensor_batch_job_queue]
+    wait_for_job_queue_valid.poke_interval = 1
 
     # [START howto_operator_batch]
     submit_batch_job = BatchOperator(
@@ -209,11 +211,13 @@ with DAG(
     wait_for_compute_environment_disabled = BatchComputeEnvironmentSensor(
         task_id="wait_for_compute_environment_disabled",
         compute_environment=batch_job_compute_environment_name,
+        poke_interval=1,
     )
 
     wait_for_job_queue_modified = BatchJobQueueSensor(
         task_id="wait_for_job_queue_modified",
         job_queue=batch_job_queue_name,
+        poke_interval=1,
     )
 
     wait_for_job_queue_deleted = BatchJobQueueSensor(
diff --git a/tests/system/providers/amazon/aws/example_cloudformation.py 
b/tests/system/providers/amazon/aws/example_cloudformation.py
index a7c5d9d727..fc6e04d422 100644
--- a/tests/system/providers/amazon/aws/example_cloudformation.py
+++ b/tests/system/providers/amazon/aws/example_cloudformation.py
@@ -79,6 +79,7 @@ with DAG(
         stack_name=cloudformation_stack_name,
     )
     # [END howto_sensor_cloudformation_create_stack]
+    wait_for_stack_create.poke_interval = 10
 
     # [START howto_operator_cloudformation_delete_stack]
     delete_stack = CloudFormationDeleteStackOperator(
@@ -94,6 +95,7 @@ with DAG(
         stack_name=cloudformation_stack_name,
     )
     # [END howto_sensor_cloudformation_delete_stack]
+    wait_for_stack_delete.poke_interval = 10
 
     chain(
         # TEST SETUP
diff --git a/tests/system/providers/amazon/aws/example_dms.py 
b/tests/system/providers/amazon/aws/example_dms.py
index fea57af171..8e13e17c32 100644
--- a/tests/system/providers/amazon/aws/example_dms.py
+++ b/tests/system/providers/amazon/aws/example_dms.py
@@ -352,6 +352,7 @@ with DAG(
         replication_task_arn=task_arn,
         target_statuses=["running"],
         termination_statuses=["stopped", "deleting", "failed"],
+        poke_interval=10,
     )
 
     # [START howto_operator_dms_stop_task]
@@ -368,6 +369,7 @@ with DAG(
         replication_task_arn=task_arn,
     )
     # [END howto_sensor_dms_task_completed]
+    await_task_stop.poke_interval = 10
 
     # [START howto_operator_dms_delete_task]
     delete_task = DmsDeleteTaskOperator(
diff --git a/tests/system/providers/amazon/aws/example_lambda.py 
b/tests/system/providers/amazon/aws/example_lambda.py
index e1222d91bf..505681f6b2 100644
--- a/tests/system/providers/amazon/aws/example_lambda.py
+++ b/tests/system/providers/amazon/aws/example_lambda.py
@@ -97,6 +97,7 @@ with models.DAG(
         function_name=lambda_function_name,
     )
     # [END howto_sensor_lambda_function_state]
+    wait_lambda_function_state.poke_interval = 1
 
     # [START howto_operator_invoke_lambda_function]
     invoke_lambda_function = AwsLambdaInvokeFunctionOperator(
diff --git a/tests/system/providers/amazon/aws/example_quicksight.py 
b/tests/system/providers/amazon/aws/example_quicksight.py
index 3095cf01bc..bc3cbd2abd 100644
--- a/tests/system/providers/amazon/aws/example_quicksight.py
+++ b/tests/system/providers/amazon/aws/example_quicksight.py
@@ -190,6 +190,7 @@ with DAG(
         ingestion_id=ingestion_id,
     )
     # [END howto_sensor_quicksight]
+    await_job.poke_interval = 10
 
     delete_bucket = S3DeleteBucketOperator(
         task_id="delete_s3_bucket",
diff --git a/tests/system/providers/amazon/aws/example_sagemaker.py 
b/tests/system/providers/amazon/aws/example_sagemaker.py
index 0d4d498e4c..9506970446 100644
--- a/tests/system/providers/amazon/aws/example_sagemaker.py
+++ b/tests/system/providers/amazon/aws/example_sagemaker.py
@@ -527,6 +527,7 @@ with DAG(
     # [START howto_sensor_sagemaker_auto_ml]
     await_automl = 
SageMakerAutoMLSensor(job_name=test_setup["auto_ml_job_name"], 
task_id="await_auto_ML")
     # [END howto_sensor_sagemaker_auto_ml]
+    await_automl.poke_interval = 10
 
     # [START howto_operator_sagemaker_start_pipeline]
     start_pipeline1 = SageMakerStartPipelineOperator(
@@ -553,6 +554,7 @@ with DAG(
         pipeline_exec_arn=start_pipeline2.output,
     )
     # [END howto_sensor_sagemaker_pipeline]
+    await_pipeline2.poke_interval = 10
 
     # [START howto_operator_sagemaker_experiment]
     create_experiment = SageMakerCreateExperimentOperator(
diff --git a/tests/system/providers/amazon/aws/example_step_functions.py 
b/tests/system/providers/amazon/aws/example_step_functions.py
index e287463547..b33a25e48b 100644
--- a/tests/system/providers/amazon/aws/example_step_functions.py
+++ b/tests/system/providers/amazon/aws/example_step_functions.py
@@ -92,6 +92,7 @@ with DAG(
         task_id="wait_for_execution", execution_arn=execution_arn
     )
     # [END howto_sensor_step_function_execution]
+    wait_for_execution.poke_interval = 1
 
     # [START howto_operator_step_function_get_execution_output]
     get_execution_output = StepFunctionGetExecutionOutputOperator(

Reply via email to