This is an automated email from the ASF dual-hosted git repository.

pabloem pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 375bd3a  Fixing all lint raising-format-tuple warnings.
375bd3a is described below

commit 375bd3a6a53ba3ba7c965278dcb322875e1b4dca
Author: Pablo <pabl...@google.com>
AuthorDate: Thu Jun 14 15:30:55 2018 -0700

    Fixing all lint raising-format-tuple warnings.
---
 sdks/python/apache_beam/io/avroio.py               | 10 ++---
 sdks/python/apache_beam/io/filebasedsink.py        |  3 +-
 sdks/python/apache_beam/io/filebasedsource_test.py |  2 +-
 sdks/python/apache_beam/io/gcp/bigquery.py         |  4 +-
 sdks/python/apache_beam/io/gcp/gcsfilesystem.py    |  8 ++--
 .../python/apache_beam/io/hadoopfilesystem_test.py |  3 +-
 sdks/python/apache_beam/io/iobase.py               |  2 +-
 sdks/python/apache_beam/io/restriction_trackers.py | 18 ++++----
 sdks/python/apache_beam/io/source_test_utils.py    | 52 +++++++++++-----------
 .../apache_beam/io/source_test_utils_test.py       |  2 +-
 sdks/python/apache_beam/io/textio.py               |  6 +--
 sdks/python/apache_beam/pipeline.py                | 15 ++++---
 sdks/python/apache_beam/pipeline_test.py           |  2 +-
 sdks/python/apache_beam/runners/common.py          |  8 ++--
 .../runners/dataflow/dataflow_runner.py            |  3 +-
 .../runners/dataflow/internal/apiclient.py         |  4 +-
 .../runners/direct/transform_evaluator.py          |  2 +-
 .../apache_beam/runners/portability/stager.py      |  4 +-
 sdks/python/apache_beam/utils/timestamp.py         |  4 +-
 19 files changed, 76 insertions(+), 76 deletions(-)

diff --git a/sdks/python/apache_beam/io/avroio.py 
b/sdks/python/apache_beam/io/avroio.py
index 30fc890..de883e1 100644
--- a/sdks/python/apache_beam/io/avroio.py
+++ b/sdks/python/apache_beam/io/avroio.py
@@ -195,8 +195,8 @@ class _AvroUtils(object):
                                             datafile.META_SCHEMA, decoder)
     if header.get('magic') != datafile.MAGIC:
       raise ValueError('Not an Avro file. File header should start with %s but'
-                       'started with %s instead.', datafile.MAGIC,
-                       header.get('magic'))
+                       'started with %s instead.'
+                       % (datafile.MAGIC, header.get('magic')))
 
     meta = header['meta']
 
@@ -236,8 +236,8 @@ class _AvroUtils(object):
     sync_marker = decoder.read(len(expected_sync_marker))
     if sync_marker != expected_sync_marker:
       raise ValueError('Unexpected sync marker (actual "%s" vs expected "%s"). 
'
-                       'Maybe the underlying avro file is corrupted?',
-                       sync_marker, expected_sync_marker)
+                       'Maybe the underlying avro file is corrupted?'
+                       % (sync_marker, expected_sync_marker))
     size = f.tell() - offset
     return _AvroBlock(block_bytes, num_records, codec, schema, offset, size)
 
@@ -314,7 +314,7 @@ class _AvroBlock(object):
       avroio.BinaryDecoder(cStringIO.StringIO(data[-4:])).check_crc32(result)
       return result
     else:
-      raise ValueError('Unknown codec: %r', codec)
+      raise ValueError('Unknown codec: %r' % codec)
 
   def num_records(self):
     return self._num_records
diff --git a/sdks/python/apache_beam/io/filebasedsink.py 
b/sdks/python/apache_beam/io/filebasedsink.py
index 126eb86..4c587b9 100644
--- a/sdks/python/apache_beam/io/filebasedsink.py
+++ b/sdks/python/apache_beam/io/filebasedsink.py
@@ -163,8 +163,7 @@ class FileBasedSink(iobase.Sink):
       if base_path == new_base_path:
         raise ValueError('Cannot create a temporary directory for root path '
                          'prefix %s. Please specify a file path prefix with '
-                         'at least two components.',
-                         file_path_prefix)
+                         'at least two components.' % file_path_prefix)
     path_components = [base_path,
                        'beam-temp-' + last_component + '-' + uuid.uuid1().hex]
     return FileSystems.join(*path_components)
diff --git a/sdks/python/apache_beam/io/filebasedsource_test.py 
b/sdks/python/apache_beam/io/filebasedsource_test.py
index 0110c3f..c567b24 100644
--- a/sdks/python/apache_beam/io/filebasedsource_test.py
+++ b/sdks/python/apache_beam/io/filebasedsource_test.py
@@ -99,7 +99,7 @@ def write_data(
       elif eol == EOL.LF_WITH_NOTHING_AT_LAST_LINE:
         sep = '' if i == (num_lines - 1) else sep_values[0]
       else:
-        raise ValueError('Received unknown value %s for eol.', eol)
+        raise ValueError('Received unknown value %s for eol.' % eol)
 
       f.write(data + sep)
 
diff --git a/sdks/python/apache_beam/io/gcp/bigquery.py 
b/sdks/python/apache_beam/io/gcp/bigquery.py
index 78955af..41d0833 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery.py
@@ -715,7 +715,7 @@ class BigQueryWriter(dataflow_io.NativeSinkWriter):
       self.rows_buffer = []
       if not passed:
         raise RuntimeError('Could not successfully insert rows to BigQuery'
-                           ' table [%s:%s.%s]. Errors: %s'%
+                           ' table [%s:%s.%s]. Errors: %s' %
                            (self.project_id, self.dataset_id,
                             self.table_id, errors))
 
@@ -1177,7 +1177,7 @@ class BigQueryWrapper(object):
       elif value is None:
         if not field.mode == 'NULLABLE':
           raise ValueError('Received \'None\' as the value for the field %s '
-                           'but the field is not NULLABLE.', field.name)
+                           'but the field is not NULLABLE.' % field.name)
         result[field.name] = None
       else:
         result[field.name] = self._convert_cell_value_to_dict(value, field)
diff --git a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py 
b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
index 60a0420..b861446 100644
--- a/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
+++ b/sdks/python/apache_beam/io/gcp/gcsfilesystem.py
@@ -51,7 +51,7 @@ class GCSFileSystem(FileSystem):
     Returns: full path after combining all the passed components
     """
     if not basepath.startswith(GCSFileSystem.GCS_PREFIX):
-      raise ValueError('Basepath %r must be GCS path.', basepath)
+      raise ValueError('Basepath %r must be GCS path.' % basepath)
     path = basepath
     for p in paths:
       path = path.rstrip('/') + '/' + p.lstrip('/')
@@ -72,7 +72,7 @@ class GCSFileSystem(FileSystem):
     """
     path = path.strip()
     if not path.startswith(GCSFileSystem.GCS_PREFIX):
-      raise ValueError('Path %r must be GCS path.', path)
+      raise ValueError('Path %r must be GCS path.' % path)
 
     prefix_len = len(GCSFileSystem.GCS_PREFIX)
     last_sep = path[prefix_len:].rfind('/')
@@ -84,7 +84,7 @@ class GCSFileSystem(FileSystem):
     elif last_sep < 0:
       return (path, '')
     else:
-      raise ValueError('Invalid path: %s', path)
+      raise ValueError('Invalid path: %s' % path)
 
   def mkdirs(self, path):
     """Recursively create directories for the provided path.
@@ -177,7 +177,7 @@ class GCSFileSystem(FileSystem):
       """Recursively copy the file tree from the source to the destination
       """
       if not destination.startswith(GCSFileSystem.GCS_PREFIX):
-        raise ValueError('Destination %r must be GCS path.', destination)
+        raise ValueError('Destination %r must be GCS path.' % destination)
       # Use copy_tree if the path ends with / as it is a directory
       if source.endswith('/'):
         gcsio.GcsIO().copytree(source, destination)
diff --git a/sdks/python/apache_beam/io/hadoopfilesystem_test.py 
b/sdks/python/apache_beam/io/hadoopfilesystem_test.py
index 59cd0b1..d35b8d5 100644
--- a/sdks/python/apache_beam/io/hadoopfilesystem_test.py
+++ b/sdks/python/apache_beam/io/hadoopfilesystem_test.py
@@ -121,7 +121,8 @@ class FakeHdfs(object):
     fs = self.status(path, strict=False)
     if (fs is not None and
         fs[hdfs._FILE_STATUS_TYPE] == hdfs._FILE_STATUS_TYPE_FILE):
-      raise ValueError('list must be called on a directory, got file: %s', 
path)
+      raise ValueError(
+          'list must be called on a directory, got file: %s' % path)
 
     result = []
     for file in self.files.itervalues():
diff --git a/sdks/python/apache_beam/io/iobase.py 
b/sdks/python/apache_beam/io/iobase.py
index faf33b5..612bcba 100644
--- a/sdks/python/apache_beam/io/iobase.py
+++ b/sdks/python/apache_beam/io/iobase.py
@@ -930,7 +930,7 @@ class Write(ptransform.PTransform):
       return pcoll | self.sink
     else:
       raise ValueError('A sink must inherit iobase.Sink, iobase.NativeSink, '
-                       'or be a PTransform. Received : %r', self.sink)
+                       'or be a PTransform. Received : %r' % self.sink)
 
 
 class WriteImpl(ptransform.PTransform):
diff --git a/sdks/python/apache_beam/io/restriction_trackers.py 
b/sdks/python/apache_beam/io/restriction_trackers.py
index 3e49e26..8aeecba 100644
--- a/sdks/python/apache_beam/io/restriction_trackers.py
+++ b/sdks/python/apache_beam/io/restriction_trackers.py
@@ -29,7 +29,7 @@ class OffsetRange(object):
     if start > stop:
       raise ValueError(
           'Start offset must be not be larger than the stop offset. '
-          'Received %d and %d respectively.', start, stop)
+          'Received %d and %d respectively.' % (start, stop))
     self.start = start
     self.stop = stop
 
@@ -78,10 +78,10 @@ class OffsetRestrictionTracker(RestrictionTracker):
       if self._last_claim_attempt < self._range.stop - 1:
         raise ValueError(
             'OffsetRestrictionTracker is not done since work in range [%s, %s) 
'
-            'has not been claimed.',
-            self._last_claim_attempt if self._last_claim_attempt is not None
-            else self._range.start,
-            self._range.stop)
+            'has not been claimed.'
+            % (self._last_claim_attempt if self._last_claim_attempt is not None
+               else self._range.start,
+               self._range.stop))
 
   def current_restriction(self):
     with self._lock:
@@ -100,15 +100,15 @@ class OffsetRestrictionTracker(RestrictionTracker):
       if self._last_claim_attempt and position <= self._last_claim_attempt:
         raise ValueError(
             'Positions claimed should strictly increase. Trying to claim '
-            'position %d while last claim attempt was %d.',
-            position, self._last_claim_attempt)
+            'position %d while last claim attempt was %d.'
+            % (position, self._last_claim_attempt))
 
       self._last_claim_attempt = position
       if position < self._range.start:
         raise ValueError(
             'Position to be claimed cannot be smaller than the start position '
-            'of the range. Tried to claim position %r for the range [%r, %r)',
-            position, self._range.start, self._range.stop)
+            'of the range. Tried to claim position %r for the range [%r, %r)'
+            % (position, self._range.start, self._range.stop))
 
       if position >= self._range.start and position < self._range.stop:
         self._current_position = position
diff --git a/sdks/python/apache_beam/io/source_test_utils.py 
b/sdks/python/apache_beam/io/source_test_utils.py
index e4d2f6f..05e6e9c 100644
--- a/sdks/python/apache_beam/io/source_test_utils.py
+++ b/sdks/python/apache_beam/io/source_test_utils.py
@@ -138,7 +138,7 @@ def 
assert_sources_equal_reference_source(reference_source_info, sources_info):
     raise ValueError('reference_source_info must a three-tuple where first'
                      'item of the tuple gives a '
                      'iobase.BoundedSource. Received: %r'
-                     , reference_source_info)
+                     % reference_source_info)
   reference_records = read_from_source(
       *reference_source_info)
 
@@ -152,22 +152,22 @@ def 
assert_sources_equal_reference_source(reference_source_info, sources_info):
       raise ValueError('source_info must a three tuple where first'
                        'item of the tuple gives a '
                        'iobase.BoundedSource. Received: %r'
-                       , source_info)
+                       % source_info)
     if (type(reference_source_info[0].default_output_coder()) !=
         type(source_info[0].default_output_coder())):
       raise ValueError(
           'Reference source %r and the source %r must use the same coder. '
-          'They are using %r and %r respectively instead.',
-          reference_source_info[0], source_info[0],
-          type(reference_source_info[0].default_output_coder()),
-          type(source_info[0].default_output_coder()))
+          'They are using %r and %r respectively instead.'
+          % (reference_source_info[0], source_info[0],
+             type(reference_source_info[0].default_output_coder()),
+             type(source_info[0].default_output_coder())))
     source_records.extend(read_from_source(*source_info))
 
   if len(reference_records) != len(source_records):
     raise ValueError(
         'Reference source must produce the same number of records as the '
-        'list of sources. Number of records were %d and %d instead.',
-        len(reference_records), len(source_records))
+        'list of sources. Number of records were %d and %d instead.'
+        % (len(reference_records), len(source_records)))
 
   if sorted(reference_records) != sorted(source_records):
     raise ValueError(
@@ -202,7 +202,7 @@ def assert_reentrant_reads_succeed(source_info):
   if len(expected_values) < 2:
     raise ValueError('Source is too trivial since it produces only %d '
                      'values. Please give a source that reads at least 2 '
-                     'values.', len(expected_values))
+                     'values.' % len(expected_values))
 
   for i in range(1, len(expected_values) - 1):
     read_iter = source.read(source.get_range_tracker(
@@ -222,14 +222,14 @@ def assert_reentrant_reads_succeed(source_info):
     if sorted(original_read) != sorted(expected_values):
       raise ValueError('Source did not produce expected values when '
                        'performing a reentrant read after reading %d values. '
-                       'Expected %r received %r.',
-                       i, expected_values, original_read)
+                       'Expected %r received %r.'
+                       % (i, expected_values, original_read))
 
     if sorted(reentrant_read) != sorted(expected_values):
       raise ValueError('A reentrant read of source after reading %d values '
                        'did not produce expected values. Expected %r '
-                       'received %r.',
-                       i, expected_values, reentrant_read)
+                       'received %r.'
+                       % (i, expected_values, reentrant_read))
 
 
 def assert_split_at_fraction_behavior(source, num_items_to_read_before_split,
@@ -291,8 +291,8 @@ def _assert_split_at_fraction_behavior(
     if range_tracker.stop_position() != split_result[0]:
       raise ValueError('After a successful split, the stop position of the '
                        'RangeTracker must be the same as the returned split '
-                       'position. Observed %r and %r which are different.',
-                       range_tracker.stop_position() % (split_result[0],))
+                       'position. Observed %r and %r which are different.'
+                       % (range_tracker.stop_position() % (split_result[0],)))
 
     if split_fraction < 0 or split_fraction > 1:
       raise ValueError('Split fraction must be within the range [0,1]',
@@ -320,7 +320,7 @@ def _assert_split_at_fraction_behavior(
 
   elif (expected_outcome !=
         ExpectedSplitOutcome.MUST_BE_CONSISTENT_IF_SUCCEEDS):
-    raise ValueError('Unknown type of expected outcome: %r'%
+    raise ValueError('Unknown type of expected outcome: %r' %
                      expected_outcome)
   current_items.extend([value for value in reader_iter])
 
@@ -360,19 +360,19 @@ def _verify_single_split_fraction_result(
                      'range of the primary source %r determined '
                      'by performing dynamic work rebalancing at fraction '
                      '%r produced different values. Expected '
-                     'these sources to produce the same list of values.',
-                     source,
-                     _range_to_str(*primary_range),
-                     split_fraction
+                     'these sources to produce the same list of values.'
+                     % (source,
+                        _range_to_str(*primary_range),
+                        split_fraction)
                     )
 
   if expected_items != total_items:
     raise ValueError('Items obtained by reading the source %r for primary '
                      'and residual ranges %s and %s did not produce the '
-                     'expected list of values.',
-                     source,
-                     _range_to_str(*primary_range),
-                     _range_to_str(*residual_range))
+                     'expected list of values.'
+                     % (source,
+                        _range_to_str(*primary_range),
+                        _range_to_str(*residual_range)))
 
   result = (len(primary_items),
             len(residual_items) if split_successful else -1)
@@ -526,10 +526,10 @@ def assert_split_at_fraction_exhaustive(
 
   expected_items = read_from_source(source, start_position, stop_position)
   if not expected_items:
-    raise ValueError('Source %r is empty.', source)
+    raise ValueError('Source %r is empty.' % source)
 
   if len(expected_items) == 1:
-    raise ValueError('Source %r only reads a single item.', source)
+    raise ValueError('Source %r only reads a single item.' % source)
 
   all_non_trivial_fractions = []
 
diff --git a/sdks/python/apache_beam/io/source_test_utils_test.py 
b/sdks/python/apache_beam/io/source_test_utils_test.py
index af2d4b8..9a619c4 100644
--- a/sdks/python/apache_beam/io/source_test_utils_test.py
+++ b/sdks/python/apache_beam/io/source_test_utils_test.py
@@ -58,7 +58,7 @@ class SourceTestUtilsTest(unittest.TestCase):
     if len(sources_info) < 2:
       raise ValueError('Test is too trivial since splitting only generated %d'
                        'bundles. Please adjust the test so that at least '
-                       'two splits get generated.', len(sources_info))
+                       'two splits get generated.' % len(sources_info))
 
     source_test_utils.assert_sources_equal_reference_source(
         (reference_source, None, None), sources_info)
diff --git a/sdks/python/apache_beam/io/textio.py 
b/sdks/python/apache_beam/io/textio.py
index bfe4b9f..f5fd2da 100644
--- a/sdks/python/apache_beam/io/textio.py
+++ b/sdks/python/apache_beam/io/textio.py
@@ -77,7 +77,7 @@ class _TextSource(filebasedsource.FileBasedSource):
       assert isinstance(value, integer_types)
       if value > len(self._data):
         raise ValueError('Cannot set position to %d since it\'s larger than '
-                         'size of data %d.', value, len(self._data))
+                         'size of data %d.' % (value, len(self._data)))
       self._position = value
 
     def reset(self):
@@ -121,8 +121,8 @@ class _TextSource(filebasedsource.FileBasedSource):
     self._coder = coder
     self._buffer_size = buffer_size
     if skip_header_lines < 0:
-      raise ValueError('Cannot skip negative number of header lines: %d',
-                       skip_header_lines)
+      raise ValueError('Cannot skip negative number of header lines: %d'
+                       % skip_header_lines)
     elif skip_header_lines > 10:
       logging.warning(
           'Skipping %d header lines. Skipping large number of header '
diff --git a/sdks/python/apache_beam/pipeline.py 
b/sdks/python/apache_beam/pipeline.py
index 1a2c9de..30e8272 100644
--- a/sdks/python/apache_beam/pipeline.py
+++ b/sdks/python/apache_beam/pipeline.py
@@ -121,13 +121,14 @@ class Pipeline(object):
       else:
         raise ValueError(
             'Parameter options, if specified, must be of type PipelineOptions. 
'
-            'Received : %r', options)
+            'Received : %r' % options)
     elif argv is not None:
       if isinstance(argv, list):
         self._options = PipelineOptions(argv)
       else:
         raise ValueError(
-            'Parameter argv, if specified, must be a list. Received : %r', 
argv)
+            'Parameter argv, if specified, must be a list. Received : %r'
+            % argv)
     else:
       self._options = PipelineOptions([])
 
@@ -233,8 +234,8 @@ class Pipeline(object):
             raise NotImplementedError(
                 'PTransform overriding is only supported for PTransforms that '
                 'have a single input. Tried to replace input of '
-                'AppliedPTransform %r that has %d inputs',
-                original_transform_node, len(inputs))
+                'AppliedPTransform %r that has %d inputs'
+                % original_transform_node, len(inputs))
           elif len(inputs) == 1:
             input_node = inputs[0]
           elif len(inputs) == 0:
@@ -272,7 +273,7 @@ class Pipeline(object):
                 'PTransform overriding is only supported for PTransforms that '
                 'have a single output. Tried to replace output of '
                 'AppliedPTransform %r with %r.'
-                , original_transform_node, new_output)
+                % (original_transform_node, new_output))
 
           # Recording updated outputs. This cannot be done in the same visitor
           # since if we dynamically update output type here, we'll run into
@@ -356,8 +357,8 @@ class Pipeline(object):
     class ReplacementValidator(PipelineVisitor):
       def visit_transform(self, transform_node):
         if override.matches(transform_node):
-          raise RuntimeError('Transform node %r was not replaced as expected.',
-                             transform_node)
+          raise RuntimeError('Transform node %r was not replaced as expected.'
+                             % transform_node)
 
     self.visit(ReplacementValidator())
 
diff --git a/sdks/python/apache_beam/pipeline_test.py 
b/sdks/python/apache_beam/pipeline_test.py
index d870665..72791fc 100644
--- a/sdks/python/apache_beam/pipeline_test.py
+++ b/sdks/python/apache_beam/pipeline_test.py
@@ -334,7 +334,7 @@ class PipelineTest(unittest.TestCase):
       def get_replacement_transform(self, ptransform):
         if isinstance(ptransform, DoubleParDo):
           return TripleParDo()
-        raise ValueError('Unsupported type of transform: %r', ptransform)
+        raise ValueError('Unsupported type of transform: %r' % ptransform)
 
     def get_overrides(unused_pipeline_options):
       return [MyParDoOverride()]
diff --git a/sdks/python/apache_beam/runners/common.py 
b/sdks/python/apache_beam/runners/common.py
index 45e0ed5..43bbfcf 100644
--- a/sdks/python/apache_beam/runners/common.py
+++ b/sdks/python/apache_beam/runners/common.py
@@ -148,8 +148,8 @@ class MethodWrapper(object):
 
     if not isinstance(obj_to_invoke, (DoFn, RestrictionProvider)):
       raise ValueError('\'obj_to_invoke\' has to be either a \'DoFn\' or '
-                       'a \'RestrictionProvider\'. Received %r instead.',
-                       obj_to_invoke)
+                       'a \'RestrictionProvider\'. Received %r instead.'
+                       % obj_to_invoke)
 
     args, _, _, defaults = core.get_function_arguments(
         obj_to_invoke, method_name)
@@ -321,7 +321,7 @@ def _find_param_with_default(
       not (default_as_value or default_as_type)):
     raise ValueError(
         'Exactly one of \'default_as_value\' and \'default_as_type\' should be 
'
-        'provided. Received %r and %r.', default_as_value, default_as_type)
+        'provided. Received %r and %r.' % (default_as_value, default_as_type))
 
   defaults = method.defaults
   default_as_value = default_as_value
@@ -450,7 +450,7 @@ class PerWindowInvoker(DoFnInvoker):
       if not restriction_tracker_param:
         raise ValueError(
             'A RestrictionTracker %r was provided but DoFn does not have a '
-            'RestrictionTrackerParam defined', restriction_tracker)
+            'RestrictionTrackerParam defined' % restriction_tracker)
       additional_kwargs[restriction_tracker_param] = restriction_tracker
     if self.has_windowed_inputs and len(windowed_value.windows) != 1:
       for w in windowed_value.windows:
diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py 
b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
index 81ccd70..2e5ed79 100644
--- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
+++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
@@ -809,8 +809,7 @@ class DataflowRunner(PipelineRunner):
                           transform.source.flatten_results)
       else:
         raise ValueError('BigQuery source %r must specify either a table or'
-                         ' a query',
-                         transform.source)
+                         ' a query' % transform.source)
     elif transform.source.format == 'pubsub':
       standard_options = (
           transform_node.inputs[0].pipeline.options.view_as(StandardOptions))
diff --git a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py 
b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
index 92eefbd..784166c 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/apiclient.py
@@ -704,8 +704,8 @@ class DataflowApplicationClient(object):
             .JOB_MESSAGE_ERROR)
       else:
         raise RuntimeError(
-            'Unexpected value for minimum_importance argument: %r',
-            minimum_importance)
+            'Unexpected value for minimum_importance argument: %r'
+            % minimum_importance)
     response = self._client.projects_locations_jobs_messages.List(request)
     return response.jobMessages, response.nextPageToken
 
diff --git a/sdks/python/apache_beam/runners/direct/transform_evaluator.py 
b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
index 984baca..4b39214 100644
--- a/sdks/python/apache_beam/runners/direct/transform_evaluator.py
+++ b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
@@ -263,7 +263,7 @@ class _TransformEvaluator(object):
 
   def process_element(self, element):
     """Processes a new element as part of the current bundle."""
-    raise NotImplementedError('%s do not process elements.', type(self))
+    raise NotImplementedError('%s do not process elements.' % type(self))
 
   def finish_bundle(self):
     """Finishes the bundle and produces output."""
diff --git a/sdks/python/apache_beam/runners/portability/stager.py 
b/sdks/python/apache_beam/runners/portability/stager.py
index cf2f0ea..a6bfa49 100644
--- a/sdks/python/apache_beam/runners/portability/stager.py
+++ b/sdks/python/apache_beam/runners/portability/stager.py
@@ -240,8 +240,8 @@ class Stager(object):
           resources.append(sdk_staged_filename)
         else:
           if setup_options.sdk_location == 'default':
-            raise RuntimeError('Cannot find default Beam SDK tar file "%s"',
-                               sdk_path)
+            raise RuntimeError('Cannot find default Beam SDK tar file "%s"'
+                               % sdk_path)
           elif not setup_options.sdk_location:
             logging.info('Beam SDK will not be staged since --sdk_location '
                          'is empty.')
diff --git a/sdks/python/apache_beam/utils/timestamp.py 
b/sdks/python/apache_beam/utils/timestamp.py
index e762861..e0c4093 100644
--- a/sdks/python/apache_beam/utils/timestamp.py
+++ b/sdks/python/apache_beam/utils/timestamp.py
@@ -92,7 +92,7 @@ class Timestamp(object):
       dt: A ``datetime.datetime`` object in UTC (offset-aware).
     """
     if dt.tzinfo != pytz.utc:
-      raise ValueError('dt not in UTC: %s', dt)
+      raise ValueError('dt not in UTC: %s' % dt)
     duration = dt - cls._epoch_datetime_utc()
     return Timestamp(duration.total_seconds())
 
@@ -106,7 +106,7 @@ class Timestamp(object):
     dt_args = []
     match = cls.RFC_3339_RE.match(rfc3339)
     if match is None:
-      raise ValueError('Could not parse RFC 3339 string: %s', rfc3339)
+      raise ValueError('Could not parse RFC 3339 string: %s' % rfc3339)
     for s in match.groups():
       if s is not None:
         dt_args.append(int(s))

-- 
To stop receiving notification emails like this one, please contact
pabl...@apache.org.

Reply via email to