This is an automated email from the ASF dual-hosted git repository.

lcwik pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 9ac6711  [BEAM-9846] Clean up references to unused native BQ 
source/sink experiment in Dataflow
     new 837f4a3  Merge pull request #11562 from lukecwik/beam9846
9ac6711 is described below

commit 9ac67111921e53e4c4ca769e821f1d81132572a5
Author: Luke Cwik <lc...@google.com>
AuthorDate: Tue Apr 28 12:42:16 2020 -0700

    [BEAM-9846] Clean up references to unused native BQ source/sink experiment 
in Dataflow
---
 .../beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java | 27 +++----------
 .../sdk/io/gcp/bigquery/BigQueryToTableIT.java     | 44 ----------------------
 2 files changed, 5 insertions(+), 66 deletions(-)

diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
index cdbc5f6..40d6de2 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
@@ -29,7 +29,6 @@ import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.testing.TestPipelineOptions;
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.values.PCollection;
-import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -68,17 +67,13 @@ public class BigQueryIOReadIT {
     void setNumRecords(long numRecords);
   }
 
-  private void setupTestEnvironment(String recordSize, boolean 
enableCustomBigquery) {
+  private void setupTestEnvironment(String recordSize) {
     PipelineOptionsFactory.register(BigQueryIOReadOptions.class);
     options = 
TestPipeline.testingPipelineOptions().as(BigQueryIOReadOptions.class);
     options.setNumRecords(numOfRecords.get(recordSize));
     options.setTempLocation(options.getTempRoot() + "/temp-it/");
     project = 
TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject();
     options.setInputTable(project + ":" + datasetId + "." + tablePrefix + 
recordSize);
-    if (enableCustomBigquery) {
-      options.setExperiments(
-          ImmutableList.of("enable_custom_bigquery_sink", 
"enable_custom_bigquery_source"));
-    }
   }
 
   private void runBigQueryIOReadPipeline() {
@@ -93,37 +88,25 @@ public class BigQueryIOReadIT {
 
   @Test
   public void testBigQueryReadEmpty() throws Exception {
-    setupTestEnvironment("empty", false);
+    setupTestEnvironment("empty");
     runBigQueryIOReadPipeline();
   }
 
   @Test
   public void testBigQueryRead1M() throws Exception {
-    setupTestEnvironment("1M", false);
+    setupTestEnvironment("1M");
     runBigQueryIOReadPipeline();
   }
 
   @Test
   public void testBigQueryRead1G() throws Exception {
-    setupTestEnvironment("1G", false);
+    setupTestEnvironment("1G");
     runBigQueryIOReadPipeline();
   }
 
   @Test
   public void testBigQueryRead1T() throws Exception {
-    setupTestEnvironment("1T", false);
-    runBigQueryIOReadPipeline();
-  }
-
-  @Test
-  public void testBigQueryReadEmptyCustom() throws Exception {
-    setupTestEnvironment("empty", true);
-    runBigQueryIOReadPipeline();
-  }
-
-  @Test
-  public void testBigQueryRead1TCustom() throws Exception {
-    setupTestEnvironment("1T", true);
+    setupTestEnvironment("1T");
     runBigQueryIOReadPipeline();
   }
 }
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
index 79266d7..0cdd987 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
@@ -45,7 +45,6 @@ import org.apache.beam.sdk.options.Description;
 import org.apache.beam.sdk.options.ExperimentalOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.Validation;
-import org.apache.beam.sdk.testing.DataflowPortabilityApiUnsupported;
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.testing.TestPipelineOptions;
 import org.apache.beam.sdk.transforms.Reshuffle;
@@ -59,7 +58,6 @@ import org.joda.time.Duration;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
 import org.slf4j.Logger;
@@ -324,46 +322,4 @@ public class BigQueryToTableIT {
 
     this.verifyStandardQueryRes(outputTable);
   }
-
-  @Test
-  @Category(DataflowPortabilityApiUnsupported.class)
-  public void testNewTypesQueryWithoutReshuffleWithCustom() throws Exception {
-    final String outputTable =
-        project + ":" + BIG_QUERY_DATASET_ID + "." + 
"testNewTypesQueryWithoutReshuffleWithCustom";
-    BigQueryToTableOptions options = this.setupNewTypesQueryTest(outputTable);
-    options.setExperiments(
-        ImmutableList.of("enable_custom_bigquery_sink", 
"enable_custom_bigquery_source"));
-
-    this.runBigQueryToTablePipeline(options);
-
-    this.verifyNewTypesQueryRes(outputTable);
-  }
-
-  @Test
-  @Category(DataflowPortabilityApiUnsupported.class)
-  public void testLegacyQueryWithoutReshuffleWithCustom() throws Exception {
-    final String outputTable =
-        project + ":" + BIG_QUERY_DATASET_ID + "." + 
"testLegacyQueryWithoutReshuffleWithCustom";
-    BigQueryToTableOptions options = this.setupLegacyQueryTest(outputTable);
-    options.setExperiments(
-        ImmutableList.of("enable_custom_bigquery_sink", 
"enable_custom_bigquery_source"));
-
-    this.runBigQueryToTablePipeline(options);
-
-    this.verifyLegacyQueryRes(outputTable);
-  }
-
-  @Test
-  @Category(DataflowPortabilityApiUnsupported.class)
-  public void testStandardQueryWithoutReshuffleWithCustom() throws Exception {
-    final String outputTable =
-        project + ":" + BIG_QUERY_DATASET_ID + "." + 
"testStandardQueryWithoutReshuffleWithCustom";
-    BigQueryToTableOptions options = this.setupStandardQueryTest(outputTable);
-    options.setExperiments(
-        ImmutableList.of("enable_custom_bigquery_sink", 
"enable_custom_bigquery_source"));
-
-    this.runBigQueryToTablePipeline(options);
-
-    this.verifyStandardQueryRes(outputTable);
-  }
 }

Reply via email to