This is an automated email from the ASF dual-hosted git repository.

altay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 37b76b6  Add aliases for machine_type and disk_type flags to match 
existing documentation (#8603)
37b76b6 is described below

commit 37b76b67b5d0cbd92e6a3fadee67f9fcf93cbc5d
Author: Ahmet Altay <aal...@gmail.com>
AuthorDate: Tue Jun 11 14:22:42 2019 -0700

    Add aliases for machine_type and disk_type flags to match existing 
documentation (#8603)
    
    * Add aliases for machine_type and disk_type flags to match existing 
documentation
---
 sdks/python/apache_beam/options/pipeline_options.py      |  4 ++--
 sdks/python/apache_beam/options/pipeline_options_test.py | 13 +++++++++++++
 2 files changed, 15 insertions(+), 2 deletions(-)

diff --git a/sdks/python/apache_beam/options/pipeline_options.py 
b/sdks/python/apache_beam/options/pipeline_options.py
index fde1a7e..1155018 100644
--- a/sdks/python/apache_beam/options/pipeline_options.py
+++ b/sdks/python/apache_beam/options/pipeline_options.py
@@ -558,7 +558,7 @@ class WorkerOptions(PipelineOptions):
         help=
         ('If and how to autoscale the workerpool.'))
     parser.add_argument(
-        '--worker_machine_type',
+        '--worker_machine_type', '--machine_type',
         dest='machine_type',
         default=None,
         help=('Machine type to create Dataflow worker VMs as. See '
@@ -574,7 +574,7 @@ class WorkerOptions(PipelineOptions):
         ('Remote worker disk size, in gigabytes, or 0 to use the default size. 
'
          'If not set, the Dataflow service will use a reasonable default.'))
     parser.add_argument(
-        '--worker_disk_type',
+        '--worker_disk_type', '--disk_type',
         dest='disk_type',
         default=None,
         help=('Specifies what type of persistent disk should be used.'))
diff --git a/sdks/python/apache_beam/options/pipeline_options_test.py 
b/sdks/python/apache_beam/options/pipeline_options_test.py
index 5c51725..e082c8d 100644
--- a/sdks/python/apache_beam/options/pipeline_options_test.py
+++ b/sdks/python/apache_beam/options/pipeline_options_test.py
@@ -28,6 +28,7 @@ from apache_beam.options.pipeline_options import DebugOptions
 from apache_beam.options.pipeline_options import PipelineOptions
 from apache_beam.options.pipeline_options import ProfilingOptions
 from apache_beam.options.pipeline_options import TypeOptions
+from apache_beam.options.pipeline_options import WorkerOptions
 from apache_beam.options.value_provider import RuntimeValueProvider
 from apache_beam.options.value_provider import StaticValueProvider
 from apache_beam.transforms.display import DisplayData
@@ -252,6 +253,18 @@ class PipelineOptionsTest(unittest.TestCase):
     options = PipelineOptions(flags=[''])
     self.assertEqual(options.get_all_options()['experiments'], None)
 
+  def test_worker_options(self):
+    options = PipelineOptions(['--machine_type', 'abc', '--disk_type', 'def'])
+    worker_options = options.view_as(WorkerOptions)
+    self.assertEqual(worker_options.machine_type, 'abc')
+    self.assertEqual(worker_options.disk_type, 'def')
+
+    options = PipelineOptions(
+        ['--worker_machine_type', 'abc', '--worker_disk_type', 'def'])
+    worker_options = options.view_as(WorkerOptions)
+    self.assertEqual(worker_options.machine_type, 'abc')
+    self.assertEqual(worker_options.disk_type, 'def')
+
   def test_option_modifications_are_shared_between_views(self):
     pipeline_options = PipelineOptions([
         '--mock_option', 'value', '--mock_flag',

Reply via email to