This is an automated email from the ASF dual-hosted git repository.

damccorm pushed a commit to branch users/damccorm/v2-removal
in repository https://gitbox.apache.org/repos/asf/beam.git

commit aa735cec94e520fb895c22694df82f335b83eac6
Author: Danny McCormick <dannymccorm...@google.com>
AuthorDate: Wed Jul 12 16:01:46 2023 -0400

    Finish removing v1/v2 splits
---
 ...stCommit_Python_ValidatesRunner_Dataflow.groovy |  4 ++--
 .../runinference_metrics/pipeline/options.py       |  1 -
 .../apache_beam/io/gcp/bigquery_read_it_test.py    |  5 +---
 .../python/apache_beam/options/pipeline_options.py |  3 +--
 .../runners/dataflow/dataflow_metrics_test.py      |  1 -
 .../transforms/combinefn_lifecycle_test.py         | 22 ------------------
 sdks/python/scripts/run_integration_test.sh        | 27 ----------------------
 sdks/python/test-suites/dataflow/build.gradle      |  8 +++----
 sdks/python/test-suites/dataflow/common.gradle     | 11 ---------
 sdks/python/test-suites/gradle.properties          |  4 ++--
 10 files changed, 10 insertions(+), 76 deletions(-)

diff --git 
a/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy 
b/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy
index 13e78617d2c..db052a0046c 100644
--- a/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy
+++ b/.test-infra/jenkins/job_PostCommit_Python_ValidatesRunner_Dataflow.groovy
@@ -36,8 +36,8 @@ 
PostcommitJobBuilder.postCommitJob('beam_PostCommit_Py_VR_Dataflow', 'Run Python
       steps {
         gradle {
           rootBuildScriptDir(commonJobProperties.checkoutDir)
-          
tasks(':sdks:python:test-suites:dataflow:validatesRunnerBatchTestsV2')
-          
tasks(':sdks:python:test-suites:dataflow:validatesRunnerStreamingTestsV2')
+          tasks(':sdks:python:test-suites:dataflow:validatesRunnerBatchTests')
+          
tasks(':sdks:python:test-suites:dataflow:validatesRunnerStreamingTests')
           switches('-PuseWheelDistribution')
           commonJobProperties.setGradleSwitches(delegate)
         }
diff --git 
a/sdks/python/apache_beam/examples/inference/runinference_metrics/pipeline/options.py
 
b/sdks/python/apache_beam/examples/inference/runinference_metrics/pipeline/options.py
index b32200ed733..1966100430e 100644
--- 
a/sdks/python/apache_beam/examples/inference/runinference_metrics/pipeline/options.py
+++ 
b/sdks/python/apache_beam/examples/inference/runinference_metrics/pipeline/options.py
@@ -61,7 +61,6 @@ def get_pipeline_options(
     flags = [
         "--experiment=worker_accelerator=type:nvidia-tesla-p4;count:1;"\
           "install-nvidia-driver",
-        "--experiment=use_runner_v2",
     ]
     dataflow_options.update({
         "sdk_container_image": cfg.DOCKER_IMG,
diff --git a/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py 
b/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py
index 248e0849cdd..0672cd5ea16 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery_read_it_test.py
@@ -792,10 +792,7 @@ class ReadAllBQTests(BigQueryReadIntegrationTests):
   @skip(['PortableRunner', 'FlinkRunner'])
   @pytest.mark.it_postcommit
   def test_read_queries(self):
-    # TODO(https://github.com/apache/beam/issues/20610): Remove experiment when
-    # tests run on r_v2.
-    args = self.args + ["--experiments=use_runner_v2"]
-    with beam.Pipeline(argv=args) as p:
+    with beam.Pipeline(argv=self.args) as p:
       result = (
           p
           | beam.Create([
diff --git a/sdks/python/apache_beam/options/pipeline_options.py 
b/sdks/python/apache_beam/options/pipeline_options.py
index d56b464e71c..d36b335ae29 100644
--- a/sdks/python/apache_beam/options/pipeline_options.py
+++ b/sdks/python/apache_beam/options/pipeline_options.py
@@ -1151,8 +1151,7 @@ class DebugOptions(PipelineOptions):
         help=(
             'Number of threads per worker to use on the runner. If left '
             'unspecified, the runner will compute an appropriate number of '
-            'threads to use. Currently only enabled for DataflowRunner when '
-            'experiment \'use_runner_v2\' is enabled.'))
+            'threads to use.'))
 
   def add_experiment(self, experiment):
     # pylint: disable=access-member-before-definition
diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py 
b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
index 06e1585ffc4..86e71f9c1ed 100644
--- a/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
+++ b/sdks/python/apache_beam/runners/dataflow/dataflow_metrics_test.py
@@ -503,7 +503,6 @@ class TestDataflowMetrics(unittest.TestCase):
         self.ONLY_COUNTERS_LIST)
 
     pipeline_options = PipelineOptions([
-        '--experiments=use_runner_v2',
         '--experiments=use_portable_job_submission',
         '--temp_location=gs://any-location/temp',
         '--project=dummy_project',
diff --git a/sdks/python/apache_beam/transforms/combinefn_lifecycle_test.py 
b/sdks/python/apache_beam/transforms/combinefn_lifecycle_test.py
index 6834c1d6174..bb2f7a07567 100644
--- a/sdks/python/apache_beam/transforms/combinefn_lifecycle_test.py
+++ b/sdks/python/apache_beam/transforms/combinefn_lifecycle_test.py
@@ -36,39 +36,17 @@ from apache_beam.transforms.combinefn_lifecycle_pipeline 
import run_combine
 from apache_beam.transforms.combinefn_lifecycle_pipeline import run_pardo
 
 
-def skip_unless_v2(fn):
-  @wraps(fn)
-  def wrapped(*args, **kwargs):
-    self = args[0]
-    options = self.pipeline.get_pipeline_options()
-    standard_options = options.view_as(StandardOptions)
-    experiments = options.view_as(DebugOptions).experiments or []
-
-    if 'DataflowRunner' in standard_options.runner and \
-       'use_runner_v2' not in experiments:
-      self.skipTest(
-          'CombineFn.setup and CombineFn.teardown are not supported. '
-          'Please use Dataflow Runner V2.')
-    else:
-      return fn(*args, **kwargs)
-
-  return wrapped
-
-
 @pytest.mark.it_validatesrunner
 class CombineFnLifecycleTest(unittest.TestCase):
   def setUp(self):
     self.pipeline = TestPipeline(is_integration_test=True)
 
-  @skip_unless_v2
   def test_combine(self):
     run_combine(self.pipeline)
 
-  @skip_unless_v2
   def test_non_liftable_combine(self):
     run_combine(self.pipeline, lift_combiners=False)
 
-  @skip_unless_v2
   def test_combining_value_state(self):
     if ('DataflowRunner' in self.pipeline.get_pipeline_options().view_as(
         StandardOptions).runner):
diff --git a/sdks/python/scripts/run_integration_test.sh 
b/sdks/python/scripts/run_integration_test.sh
index 508d9f50421..4f29ed5a4ad 100755
--- a/sdks/python/scripts/run_integration_test.sh
+++ b/sdks/python/scripts/run_integration_test.sh
@@ -133,16 +133,6 @@ case $key in
         shift # past argument
         shift # past value
         ;;
-    --runner_v2)
-        RUNNER_V2="$2"
-        shift # past argument
-        shift # past value
-        ;;
-    --disable_runner_v2)
-        DISABLE_RUNNER_V2="$2"
-        shift # past argument
-        shift # past value
-        ;;
     --kms_key_name)
         KMS_KEY_NAME="$2"
         shift # past argument
@@ -244,23 +234,6 @@ if [[ -z $PIPELINE_OPTS ]]; then
     opts+=("--streaming")
   fi
 
-  # Add --runner_v2 if provided
-  if [[ "$RUNNER_V2" = true ]]; then
-    opts+=("--experiments=use_runner_v2")
-    if [[ "$STREAMING" = true ]]; then
-      # Dataflow Runner V2 only supports streaming engine.
-      opts+=("--enable_streaming_engine")
-    else
-      opts+=("--experiments=beam_fn_api")
-    fi
-
-  fi
-
-  # Add --disable_runner_v2 if provided
-  if [[ "$DISABLE_RUNNER_V2" = true ]]; then
-    opts+=("--experiments=disable_runner_v2")
-  fi
-
   if [[ ! -z "$KMS_KEY_NAME" ]]; then
     opts+=(
       "--kms_key_name=$KMS_KEY_NAME"
diff --git a/sdks/python/test-suites/dataflow/build.gradle 
b/sdks/python/test-suites/dataflow/build.gradle
index 50d35774ffc..08f03e207f3 100644
--- a/sdks/python/test-suites/dataflow/build.gradle
+++ b/sdks/python/test-suites/dataflow/build.gradle
@@ -42,14 +42,14 @@ task chicagoTaxiExample {
   }
 }
 
-task validatesRunnerBatchTestsV2 {
-  getVersionsAsList('dataflow_validates_runner_batch_tests_V2').each {
+task validatesRunnerBatchTests {
+  getVersionsAsList('dataflow_validates_runner_batch_tests').each {
     
dependsOn.add(":sdks:python:test-suites:dataflow:py${getVersionSuffix(it)}:validatesRunnerBatchTests")
   }
 }
 
-task validatesRunnerStreamingTestsV2 {
-  getVersionsAsList('dataflow_validates_runner_streaming_tests_V2').each {
+task validatesRunnerStreamingTests {
+  getVersionsAsList('dataflow_validates_runner_streaming_tests').each {
     
dependsOn.add(":sdks:python:test-suites:dataflow:py${getVersionSuffix(it)}:validatesRunnerStreamingTests")
   }
 }
diff --git a/sdks/python/test-suites/dataflow/common.gradle 
b/sdks/python/test-suites/dataflow/common.gradle
index 44257b09c01..12c44013185 100644
--- a/sdks/python/test-suites/dataflow/common.gradle
+++ b/sdks/python/test-suites/dataflow/common.gradle
@@ -181,7 +181,6 @@ task examples {
     def argMap = [
             "test_opts": testOpts + ["--numprocesses=8", "--dist=loadfile"],
             "sdk_location": project.ext.sdkLocation,
-            "runner_v2": "true",
             "suite": "postCommitIT-df${pythonVersionSuffix}-xdist",
             "collect": "examples_postcommit and not no_xdist and not 
sickbay_dataflow"
     ]
@@ -197,7 +196,6 @@ task examples {
     def argMap = [
             "test_opts": testOpts,
             "sdk_location": project.ext.sdkLocation,
-            "runner_v2": "true",
             "suite": "postCommitIT-df${pythonVersionSuffix}-no-xdist",
             "collect": "examples_postcommit and no_xdist and not 
sickbay_dataflow"
     ]
@@ -220,13 +218,6 @@ task validatesRunnerBatchTests {
         "collect": "it_validatesrunner and not no_sickbay_batch"
     ]
 
-    if (project.hasProperty('useRunnerV2')) {
-      argMap.put("runner_v2", "true")
-    }
-
-    if (project.hasProperty('disableRunnerV2')) {
-      argMap.put("disable_runner_v2", "true")
-    }
     def cmdArgs = mapToArgString(argMap)
     exec {
       executable 'sh'
@@ -247,7 +238,6 @@ task validatesRunnerStreamingTests {
                 "sdk_location": project.ext.sdkLocation,
                 "suite": 
"validatesRunnerStreamingTests-df${pythonVersionSuffix}-xdist",
                 "collect": "it_validatesrunner and not no_sickbay_streaming 
and not no_xdist",
-                "runner_v2": "true",
                 ]
 
     def cmdArgs = mapToArgString(argMap)
@@ -265,7 +255,6 @@ task validatesRunnerStreamingTests {
                 "sdk_location": project.ext.sdkLocation,
                 "suite": 
"validatesRunnerStreamingTests-df${pythonVersionSuffix}-noxdist",
                 "collect": "it_validatesrunner and not no_sickbay_streaming 
and no_xdist",
-                "runner_v2": "true",
                 ]
 
     def cmdArgs = mapToArgString(argMap)
diff --git a/sdks/python/test-suites/gradle.properties 
b/sdks/python/test-suites/gradle.properties
index af3b16d2e30..72fc651733d 100644
--- a/sdks/python/test-suites/gradle.properties
+++ b/sdks/python/test-suites/gradle.properties
@@ -28,8 +28,8 @@ dataflow_mongodbio_it_task_py_versions=3.8
 dataflow_chicago_taxi_example_task_py_versions=3.8
 
 # TODO: Enable following tests after making sure we have enough capacity.
-dataflow_validates_runner_batch_tests_V2=3.8,3.11
-dataflow_validates_runner_streaming_tests_V2=3.8,3.11
+dataflow_validates_runner_batch_tests=3.8,3.11
+dataflow_validates_runner_streaming_tests=3.8,3.11
 dataflow_examples_postcommit_py_versions=3.11
 # TFX_BSL is not yet supported on Python 3.10.
 dataflow_cloudml_benchmark_tests_py_versions=3.9

Reply via email to