Hello community,

here is the log from the commit of package python-google-apitools for 
openSUSE:Factory checked in at 2017-08-22 11:09:08
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/python-google-apitools (Old)
 and      /work/SRC/openSUSE:Factory/.python-google-apitools.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "python-google-apitools"

Tue Aug 22 11:09:08 2017 rev:2 rq:517860 version:0.5.11

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/python-google-apitools/python-google-apitools.changes
    2017-02-28 23:48:36.705852906 +0100
+++ 
/work/SRC/openSUSE:Factory/.python-google-apitools.new/python-google-apitools.changes
       2017-08-22 11:09:14.739610344 +0200
@@ -1,0 +2,31 @@
+Fri Jul 28 07:59:02 UTC 2017 - alarr...@suse.com
+
+- Update to version 0.5.11
+  * A fix to test mock to include service name and a small feature
+    to allow for easier conversion of python dict into message which
+    represents proto map. 
+- Update to version 0.5.10
+  * Merge pull request #157 from sb2nov/sb_update_version
+- Update to version 0.5.9
+  * Various fixes for lint, code generation, GCE metadata and ADC logic.
+- Update to version 0.5.8
+  * Allow custom error handling etc in the case of batch requests,
+    similar to what is possible in single request mode.
+- Update to version 0.5.7
+  * Updates encoding.py to not to set/reset global logger.
+  * Treat exceptions accessing GCE credential cache file as a cache miss.
+  * Add limit support for limiting size of batch request.
+- Rebase apitools-test-disable-requires-internet.patch
+- Rebase python-google-apitools.changes
+- Add create-from_old_oauth2client-module.patch to create a new module 
+  from_old_oauth2client that will contain missing files from
+  google-oauth2client. Since google-apitools require some files that
+  were removed in google-oauth2client 4.0.0, we add those missing files
+  as per solution 1 offered by oauth2client developers at
+  https://github.com/google/oauth2client/issues/470 . The files
+  are unmodified from what was released in google-oauth2client 3.0.0
+  (the last release containing them).
+- Added fix-imports.patch to adapt the import sentences of the files
+  added to from_old_auth2client.
+
+-------------------------------------------------------------------

Old:
----
  apitools-0.5.6.tar.gz

New:
----
  apitools-0.5.11.tar.gz
  create-from_old_oauth2client-module.patch
  fix-imports.patch
  locked_file.py
  multistore_file.py
  util.py

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ python-google-apitools.spec ++++++
--- /var/tmp/diff_new_pack.4veSXs/_old  2017-08-22 11:09:15.551496026 +0200
+++ /var/tmp/diff_new_pack.4veSXs/_new  2017-08-22 11:09:15.563494337 +0200
@@ -1,7 +1,7 @@
 #
 # spec file for package python-google-apitools
 #
-# Copyright (c) 2016 SUSE LINUX GmbH, Nuernberg, Germany.
+# Copyright (c) 2017 SUSE LINUX GmbH, Nuernberg, Germany.
 #
 # All modifications and additions to the file contributed by third parties
 # remain the property of their copyright owners, unless otherwise agreed
@@ -17,28 +17,35 @@
 
 
 Name:           python-google-apitools
-Version:        0.5.6
+Version:        0.5.11
 Release:        0
 Url:            https://github.com/google/apitools
 Summary:        Google API utils
 License:        Apache-2.0
 Group:          Development/Languages/Python
 Source:         apitools-%{version}.tar.gz
+Source1:        
https://raw.githubusercontent.com/google/oauth2client/6807d7d263118c5db30e5f692c6605cb2a849260/oauth2client/contrib/locked_file.py
+Source2:        
https://raw.githubusercontent.com/google/oauth2client/6807d7d263118c5db30e5f692c6605cb2a849260/oauth2client/contrib/multistore_file.py
+Source3:        
https://raw.githubusercontent.com/google/oauth2client/6807d7d263118c5db30e5f692c6605cb2a849260/oauth2client/util.py
 Patch1:         apitools-test-clientgen.patch
 Patch2:         apitools-test-fix-path-gen_client.patch
 Patch3:         apitools-test-disable-requires-internet.patch
+# PATCH-FIX-OPENSUSE create-from_old_oauth2client-module.patch - Create 
from_old_oauth2client submodule to store needed files from oauth2client not 
available in the latest version
+Patch4:         create-from_old_oauth2client-module.patch
+# PATCH-FIX-OPENSUSE fix-imports.patch - Fix imports so missing files are 
found in the from_old_oauth2client submodule
+Patch5:         fix-imports.patch
+Requires:       python-gflags           >= 2.0
+Requires:       python-google-apputils  >= 0.4.0
 Requires:       python-httplib2         >= 0.8
 Requires:       python-oauth2client     >= 1.5.2
 Requires:       python-oauth2client-gce >= 1.5.2
 Requires:       python-setuptools       >= 18.5
-Requires:       python-google-apputils  >= 0.4.0
-Requires:       python-gflags           >= 2.0
+BuildRequires:  python-gflags           >= 2.0
+BuildRequires:  python-google-apputils  >= 0.4.0
 BuildRequires:  python-httplib2         >= 0.8
 BuildRequires:  python-oauth2client     >= 1.5.2
 BuildRequires:  python-oauth2client-gce >= 1.5.2
 BuildRequires:  python-setuptools       >= 18.5
-BuildRequires:  python-google-apputils  >= 0.4.0
-BuildRequires:  python-gflags           >= 2.0
 # For testing
 BuildRequires:  python-mock             >= 1.0.1
 BuildRequires:  python-nose
@@ -59,6 +66,9 @@
 %patch1
 %patch2
 %patch3
+%patch4 -p1
+cp -a %{S:1} %{S:2} %{S:3} apitools/base/py/from_old_oauth2client
+%patch5 -p1
 find . -name "*.py" | xargs sed -i 's|#!/usr/bin/env python||'
 
 %build
@@ -77,3 +87,5 @@
 %doc LICENSE README.rst
 %{python_sitelib}/*
 %{_bindir}/gen_client
+
+%changelog

++++++ apitools-0.5.6.tar.gz -> apitools-0.5.11.tar.gz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/base/protorpclite/messages.py 
new/apitools-0.5.11/apitools/base/protorpclite/messages.py
--- old/apitools-0.5.6/apitools/base/protorpclite/messages.py   2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/protorpclite/messages.py  2017-05-08 
17:44:29.000000000 +0200
@@ -169,6 +169,7 @@
 LAST_RESERVED_FIELD_NUMBER = 19999
 
 
+# pylint: disable=no-value-for-parameter
 class _DefinitionClass(type):
     """Base meta-class used for definition meta-classes.
 
@@ -250,8 +251,7 @@
         outer_definition_name = cls.outer_definition_name()
         if outer_definition_name is None:
             return six.text_type(cls.__name__)
-        else:
-            return u'%s.%s' % (outer_definition_name, cls.__name__)
+        return u'%s.%s' % (outer_definition_name, cls.__name__)
 
     def outer_definition_name(cls):
         """Helper method for creating outer definition name.
@@ -264,8 +264,7 @@
         outer_definition = cls.message_definition()
         if not outer_definition:
             return util.get_package_for_module(cls.__module__)
-        else:
-            return outer_definition.definition_name()
+        return outer_definition.definition_name()
 
     def definition_package(cls):
         """Helper method for creating creating the package of a definition.
@@ -276,8 +275,7 @@
         outer_definition = cls.message_definition()
         if not outer_definition:
             return util.get_package_for_module(cls.__module__)
-        else:
-            return outer_definition.definition_package()
+        return outer_definition.definition_package()
 
 
 class _EnumClass(_DefinitionClass):
@@ -1103,8 +1101,7 @@
         message_class = self.__field.message_definition()
         if message_class is None:
             return self.__field, None, None
-        else:
-            return None, message_class, self.__field.number
+        return None, message_class, self.__field.number
 
     def __setstate__(self, state):
         """Enable unpickling.
@@ -1299,8 +1296,7 @@
             if self.repeated:
                 value = FieldList(self, value)
             else:
-                value = (  # pylint: disable=redefined-variable-type
-                    self.validate(value))
+                value = self.validate(value)
             message_instance._Message__tags[self.number] = value
 
     def __get__(self, message_instance, message_class):
@@ -1310,8 +1306,7 @@
         result = message_instance._Message__tags.get(self.number)
         if result is None:
             return self.default
-        else:
-            return result
+        return result
 
     def validate_element(self, value):
         """Validate single element of field.
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/base/protorpclite/messages_test.py 
new/apitools-0.5.11/apitools/base/protorpclite/messages_test.py
--- old/apitools-0.5.6/apitools/base/protorpclite/messages_test.py      
2016-12-03 14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/protorpclite/messages_test.py     
2017-05-08 17:44:29.000000000 +0200
@@ -851,7 +851,6 @@
         field = messages.FloatField(1)
         self.assertEquals(type(field.validate_element(12)), float)
         self.assertEquals(type(field.validate_element(12.0)), float)
-        # pylint: disable=redefined-variable-type
         field = messages.IntegerField(1)
         self.assertEquals(type(field.validate_element(12)), int)
         self.assertRaises(messages.ValidationError,
@@ -1659,7 +1658,6 @@
             messages.ValidationError,
             "Field val is repeated. Found: <SubMessage>",
             setattr, message, 'val', SubMessage())
-        # pylint: disable=redefined-variable-type
         message.val = [SubMessage()]
         message_field.validate(message)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/base/protorpclite/protojson.py 
new/apitools-0.5.11/apitools/base/protorpclite/protojson.py
--- old/apitools-0.5.6/apitools/base/protorpclite/protojson.py  2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/protorpclite/protojson.py 2017-05-08 
17:44:29.000000000 +0200
@@ -125,8 +125,8 @@
                     unknown_key)
                 result[unknown_key] = unrecognized_field
             return result
-        else:
-            return super(MessageJSONEncoder, self).default(value)
+
+        return super(MessageJSONEncoder, self).default(value)
 
 
 class ProtoJson(object):
@@ -272,13 +272,6 @@
                 variant = self.__find_variant(value)
                 if variant:
                     message.set_unrecognized_field(key, value, variant)
-                else:
-                    logging.warning(
-                        'No variant found for unrecognized field: %s', key)
-                continue
-
-            # This is just for consistency with the old behavior.
-            if value == []:
                 continue
 
             if field.repeated:
@@ -289,6 +282,9 @@
                                for item in value]
                 setattr(message, field.name, valid_value)
             else:
+                # This is just for consistency with the old behavior.
+                if value == []:
+                    continue
                 setattr(message, field.name, self.decode_field(field, value))
 
         return message
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/base/protorpclite/protojson_test.py 
new/apitools-0.5.11/apitools/base/protorpclite/protojson_test.py
--- old/apitools-0.5.6/apitools/base/protorpclite/protojson_test.py     
2016-12-03 14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/protorpclite/protojson_test.py    
2017-05-08 17:44:29.000000000 +0200
@@ -401,6 +401,16 @@
             MyMessage, '{"a_repeated_custom": [1, 2, 3]}')
         self.assertEquals(MyMessage(a_repeated_custom=[1, 2, 3]), message)
 
+    def testDecodeRepeatedEmpty(self):
+        message = protojson.decode_message(
+            MyMessage, '{"a_repeated": []}')
+        self.assertEquals(MyMessage(a_repeated=[]), message)
+
+    def testDecodeNone(self):
+        message = protojson.decode_message(
+            MyMessage, '{"an_integer": []}')
+        self.assertEquals(MyMessage(an_integer=None), message)
+
     def testDecodeBadBase64BytesField(self):
         """Test decoding improperly encoded base64 bytes value."""
         self.assertRaisesWithRegexpMatch(
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/base/protorpclite/test_util.py 
new/apitools-0.5.11/apitools/base/protorpclite/test_util.py
--- old/apitools-0.5.6/apitools/base/protorpclite/test_util.py  2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/protorpclite/test_util.py 2017-05-08 
17:44:29.000000000 +0200
@@ -641,5 +641,4 @@
         module_file = inspect.getfile(module_attribute)
         default = os.path.basename(module_file).split('.')[0]
         return default
-    else:
-        return module_attribute.__module__
+    return module_attribute.__module__
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/protorpclite/util.py 
new/apitools-0.5.11/apitools/base/protorpclite/util.py
--- old/apitools-0.5.6/apitools/base/protorpclite/util.py       2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/protorpclite/util.py      2017-05-08 
17:44:29.000000000 +0200
@@ -189,8 +189,7 @@
                 split_name = os.path.splitext(base_name)
                 if len(split_name) == 1:
                     return six.text_type(base_name)
-                else:
-                    return u'.'.join(split_name[:-1])
+                return u'.'.join(split_name[:-1])
 
         return six.text_type(module.__name__)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/__init__.py 
new/apitools-0.5.11/apitools/base/py/__init__.py
--- old/apitools-0.5.6/apitools/base/py/__init__.py     2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/__init__.py    2017-05-08 
17:44:29.000000000 +0200
@@ -17,6 +17,7 @@
 """Top-level imports for apitools base files."""
 
 # pylint:disable=wildcard-import
+# pylint:disable=redefined-builtin
 from apitools.base.py.base_api import *
 from apitools.base.py.batch import *
 from apitools.base.py.credentials_lib import *
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/app2.py 
new/apitools-0.5.11/apitools/base/py/app2.py
--- old/apitools-0.5.6/apitools/base/py/app2.py 2016-12-03 14:35:46.000000000 
+0100
+++ new/apitools-0.5.11/apitools/base/py/app2.py        2017-05-08 
17:44:29.000000000 +0200
@@ -15,6 +15,7 @@
 # limitations under the License.
 
 """Appcommands-compatible command class with extra fixins."""
+from __future__ import absolute_import
 from __future__ import print_function
 
 import cmd
@@ -25,11 +26,11 @@
 import traceback
 import types
 
-import six
-
+import gflags as flags
 from google.apputils import app
 from google.apputils import appcommands
-import gflags as flags
+import six
+
 
 __all__ = [
     'NewCmd',
@@ -50,8 +51,7 @@
         return s.encode('ascii')
     elif isinstance(s, str):
         return s.decode('ascii')
-    else:
-        return six.text_type(s).encode('ascii', 'backslashreplace')
+    return six.text_type(s).encode('ascii', 'backslashreplace')
 
 
 class NewCmd(appcommands.Cmd):
@@ -91,8 +91,7 @@
     def _GetFlag(self, flagname):
         if flagname in self._command_flags:
             return self._command_flags[flagname]
-        else:
-            return None
+        return None
 
     def Run(self, argv):
         """Run this command.
@@ -129,8 +128,7 @@
 
         if self._debug_mode:
             return self.RunDebug(args, {})
-        else:
-            return self.RunSafely(args, {})
+        return self.RunSafely(args, {})
 
     def RunCmdLoop(self, argv):
         """Hook for use in cmd.Cmd-based command shells."""
@@ -220,7 +218,7 @@
     def last_return_code(self):
         return self._last_return_code
 
-    def _set_prompt(self):
+    def _set_prompt(self):  # pylint: disable=invalid-name
         self.prompt = self._default_prompt
 
     def do_EOF(self, *unused_args):  # pylint: disable=invalid-name
@@ -306,11 +304,14 @@
         names.remove('do_EOF')
         return names
 
-    def do_help(self, command_name):
+    def do_help(self, arg):
         """Print the help for command_name (if present) or general help."""
 
+        command_name = arg
+
         # TODO(craigcitro): Add command-specific flags.
         def FormatOneCmd(name, command, command_names):
+            """Format one command."""
             indent_size = appcommands.GetMaxCommandLength() + 3
             if len(command_names) > 1:
                 indent = ' ' * indent_size
@@ -322,12 +323,11 @@
                 first_line = '%-*s%s' % (indent_size,
                                          name + ':', first_help_line)
                 return '\n'.join((first_line, rest))
-            else:
-                default_indent = '  '
-                return '\n' + flags.TextWrap(
-                    command.CommandGetHelp('', cmd_names=command_names),
-                    indent=default_indent,
-                    firstline_indent=default_indent) + '\n'
+            default_indent = '  '
+            return '\n' + flags.TextWrap(
+                command.CommandGetHelp('', cmd_names=command_names),
+                indent=default_indent,
+                firstline_indent=default_indent) + '\n'
 
         if not command_name:
             print('\nHelp for commands:\n')
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/base_api.py 
new/apitools-0.5.11/apitools/base/py/base_api.py
--- old/apitools-0.5.6/apitools/base/py/base_api.py     2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/base_api.py    2017-05-08 
17:44:29.000000000 +0200
@@ -340,6 +340,7 @@
     @property
     def _default_global_params(self):
         if self.__default_global_params is None:
+            # pylint: disable=not-callable
             self.__default_global_params = self.params_type()
         return self.__default_global_params
 
@@ -605,11 +606,10 @@
                 request_url=http_response.request_url)
         if self.__client.response_type_model == 'json':
             return http_response.content
-        else:
-            response_type = _LoadClass(method_config.response_type_name,
-                                       self.__client.MESSAGES_MODULE)
-            return self.__client.DeserializeMessage(
-                response_type, http_response.content)
+        response_type = _LoadClass(method_config.response_type_name,
+                                   self.__client.MESSAGES_MODULE)
+        return self.__client.DeserializeMessage(
+            response_type, http_response.content)
 
     def __SetBaseHeaders(self, http_request, client):
         """Fill in the basic headers on http_request."""
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/base_cli.py 
new/apitools-0.5.11/apitools/base/py/base_cli.py
--- old/apitools-0.5.6/apitools/base/py/base_cli.py     2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/base_cli.py    2017-05-08 
17:44:29.000000000 +0200
@@ -16,6 +16,8 @@
 
 """Base script for generated CLI."""
 
+from __future__ import absolute_import
+
 import atexit
 import code
 import logging
@@ -24,8 +26,8 @@
 import rlcompleter
 import sys
 
-from google.apputils import appcommands
 import gflags as flags
+from google.apputils import appcommands
 
 from apitools.base.py import encoding
 from apitools.base.py import exceptions
@@ -97,15 +99,13 @@
         if ('(' in readline.get_line_buffer() or
                 not callable(val)):
             return word
-        else:
-            return word + '('
+        return word + '('
 
     def complete(self, text, state):
         if not readline.get_line_buffer().strip():
             if not state:
                 return '  '
-            else:
-                return None
+            return None
         return rlcompleter.Completer.complete(self, text, state)
 
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/batch.py 
new/apitools-0.5.11/apitools/base/py/batch.py
--- old/apitools-0.5.6/apitools/base/py/batch.py        2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/batch.py       2017-05-08 
17:44:29.000000000 +0200
@@ -28,6 +28,7 @@
 import six
 from six.moves import http_client
 from six.moves import urllib_parse
+from six.moves import range  # pylint: disable=redefined-builtin
 
 from apitools.base.py import exceptions
 from apitools.base.py import http_wrapper
@@ -180,7 +181,8 @@
             http_request, self.retryable_codes, service, method_config)
         self.api_requests.append(api_request)
 
-    def Execute(self, http, sleep_between_polls=5, max_retries=5):
+    def Execute(self, http, sleep_between_polls=5, max_retries=5,
+                max_batch_size=None, batch_request_callback=None):
         """Execute all of the requests in the batch.
 
         Args:
@@ -190,33 +192,44 @@
           max_retries: Max retries. Any requests that have not succeeded by
               this number of retries simply report the last response or
               exception, whatever it happened to be.
+          max_batch_size: int, if specified requests will be split in batches
+              of given size.
+          batch_request_callback: function of (http_response, exception) passed
+              to BatchHttpRequest which will be run on any given results.
 
         Returns:
           List of ApiCalls.
         """
         requests = [request for request in self.api_requests
                     if not request.terminal_state]
+        batch_size = max_batch_size or len(requests)
 
         for attempt in range(max_retries):
             if attempt:
                 time.sleep(sleep_between_polls)
 
-            # Create a batch_http_request object and populate it with
-            # incomplete requests.
-            batch_http_request = BatchHttpRequest(batch_url=self.batch_url)
-            for request in requests:
-                batch_http_request.Add(
-                    request.http_request, request.HandleResponse)
-            batch_http_request.Execute(http)
+            for i in range(0, len(requests), batch_size):
+                # Create a batch_http_request object and populate it with
+                # incomplete requests.
+                batch_http_request = BatchHttpRequest(
+                    batch_url=self.batch_url,
+                    callback=batch_request_callback
+                )
+                for request in itertools.islice(requests,
+                                                i, i + batch_size):
+                    batch_http_request.Add(
+                        request.http_request, request.HandleResponse)
+                batch_http_request.Execute(http)
+
+                if hasattr(http.request, 'credentials'):
+                    if any(request.authorization_failed
+                           for request in itertools.islice(requests,
+                                                           i, i + batch_size)):
+                        http.request.credentials.refresh(http)
 
             # Collect retryable requests.
             requests = [request for request in self.api_requests if not
                         request.terminal_state]
-
-            if hasattr(http.request, 'credentials'):
-                if any(request.authorization_failed for request in requests):
-                    http.request.credentials.refresh(http)
-
             if not requests:
                 break
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/batch_test.py 
new/apitools-0.5.11/apitools/base/py/batch_test.py
--- old/apitools-0.5.6/apitools/base/py/batch_test.py   2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/batch_test.py  2017-05-08 
17:44:29.000000000 +0200
@@ -19,6 +19,7 @@
 
 import mock
 from six.moves import http_client
+from six.moves import range  # pylint:disable=redefined-builtin
 from six.moves.urllib import parse
 import unittest2
 
@@ -93,8 +94,7 @@
             self.assertEqual(expected_request.http_method, request.http_method)
             if isinstance(response, list):
                 return response.pop(0)
-            else:
-                return response
+            return response
 
         mock_request.side_effect = CheckRequest
 
@@ -149,9 +149,19 @@
                                   exceptions.HttpError)
 
     def testSingleRequestInBatch(self):
+        desired_url = 'https://www.example.com'
+
+        callback_was_called = []
+
+        def _Callback(response, exception):
+            self.assertEqual({'status': '200'}, response.info)
+            self.assertEqual('content', response.content)
+            self.assertEqual(desired_url, response.request_url)
+            self.assertIsNone(exception)
+            callback_was_called.append(1)
+
         mock_service = FakeService()
 
-        desired_url = 'https://www.example.com'
         batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
         # The request to be added. The actual request sent will be somewhat
         # larger, as this is added to a batch.
@@ -184,7 +194,8 @@
                 'desired_request': desired_request,
             })
 
-            api_request_responses = batch_api_request.Execute(FakeHttp())
+            api_request_responses = batch_api_request.Execute(
+                FakeHttp(), batch_request_callback=_Callback)
 
             self.assertEqual(1, len(api_request_responses))
             self.assertEqual(1, mock_request.call_count)
@@ -195,6 +206,68 @@
             self.assertEqual({'status': '200'}, response.info)
             self.assertEqual('content', response.content)
             self.assertEqual(desired_url, response.request_url)
+        self.assertEquals(1, len(callback_was_called))
+
+    def _MakeResponse(self, number_of_parts):
+        return http_wrapper.Response(
+            info={
+                'status': '200',
+                'content-type': 'multipart/mixed; boundary="boundary"',
+            },
+            content='--boundary\n' + '--boundary\n'.join(
+                textwrap.dedent("""\
+                    content-type: text/plain
+                    content-id: <id+{0}>
+
+                    HTTP/1.1 200 OK
+                    response {0} content
+
+                    """)
+                .format(i) for i in range(number_of_parts)) + '--boundary--',
+            request_url=None,
+        )
+
+    def _MakeSampleRequest(self, url, name):
+        return http_wrapper.Request(url, 'POST', {
+            'content-type': 'multipart/mixed; boundary="None"',
+            'content-length': 80,
+        }, '{0} {1}'.format(name, 'x' * (79 - len(name))))
+
+    def testMultipleRequestInBatchWithMax(self):
+        mock_service = FakeService()
+
+        desired_url = 'https://www.example.com'
+        batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
+
+        number_of_requests = 10
+        max_batch_size = 3
+        for i in range(number_of_requests):
+            batch_api_request.Add(
+                mock_service, 'unused', None,
+                {'desired_request': self._MakeSampleRequest(
+                    desired_url, 'Sample-{0}'.format(i))})
+
+        responses = []
+        for i in range(0, number_of_requests, max_batch_size):
+            responses.append(
+                self._MakeResponse(
+                    min(number_of_requests - i, max_batch_size)))
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                expected_request=http_wrapper.Request(desired_url, 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 1142,
+                }, 'x' * 1142),
+                response=responses)
+            api_request_responses = batch_api_request.Execute(
+                FakeHttp(), max_batch_size=max_batch_size)
+
+        self.assertEqual(number_of_requests, len(api_request_responses))
+        self.assertEqual(
+            -(-number_of_requests // max_batch_size),
+            mock_request.call_count)
 
     def testRefreshOnAuthFailure(self):
         mock_service = FakeService()
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/credentials_lib.py 
new/apitools-0.5.11/apitools/base/py/credentials_lib.py
--- old/apitools-0.5.6/apitools/base/py/credentials_lib.py      2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/credentials_lib.py     2017-05-08 
17:44:29.000000000 +0200
@@ -209,10 +209,11 @@
 def _GceMetadataRequest(relative_url, use_metadata_ip=False):
     """Request the given url from the GCE metadata service."""
     if use_metadata_ip:
-        base_url = 'http://169.254.169.254/'
+        base_url = os.environ.get('GCE_METADATA_IP', '169.254.169.254')
     else:
-        base_url = 'http://metadata.google.internal/'
-    url = base_url + 'computeMetadata/v1/' + relative_url
+        base_url = os.environ.get(
+            'GCE_METADATA_ROOT', 'metadata.google.internal')
+    url = 'http://' + base_url + '/computeMetadata/v1/' + relative_url
     # Extra header requirement can be found here:
     # https://developers.google.com/compute/docs/metadata
     headers = {'Metadata-Flavor': 'Google'}
@@ -302,6 +303,11 @@
                             if (creds['scopes'] in
                                     (None, cached_creds['scopes'])):
                                 scopes = cached_creds['scopes']
+                except KeyboardInterrupt:
+                    raise
+                except:  # pylint: disable=bare-except
+                    # Treat exceptions as a cache miss.
+                    pass
                 finally:
                     cache_file.unlock_and_close()
         return scopes
@@ -331,6 +337,11 @@
                         # If it's not locked, the locking process will
                         # write the same data to the file, so just
                         # continue.
+                except KeyboardInterrupt:
+                    raise
+                except:  # pylint: disable=bare-except
+                    # Treat exceptions as a cache miss.
+                    pass
                 finally:
                     cache_file.unlock_and_close()
 
@@ -368,6 +379,7 @@
         return util.NormalizeScopes(scope.strip()
                                     for scope in response.readlines())
 
+    # pylint: disable=arguments-differ
     def _refresh(self, do_request):
         """Refresh self.access_token.
 
@@ -645,6 +657,8 @@
     # cloud-platform, our scopes are a subset of cloud scopes, and the
     # ADC will work.
     cp = 'https://www.googleapis.com/auth/cloud-platform'
+    if credentials is None:
+        return None
     if not isinstance(credentials, gc) or cp in scopes:
         return credentials.create_scoped(scopes)
     return None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/base/py/credentials_lib_test.py 
new/apitools-0.5.11/apitools/base/py/credentials_lib_test.py
--- old/apitools-0.5.6/apitools/base/py/credentials_lib_test.py 2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/credentials_lib_test.py        
2017-05-08 17:44:29.000000000 +0200
@@ -80,6 +80,13 @@
             # The urllib module does weird things with header case.
             self.assertEqual('Google', req.get_header('Metadata-flavor'))
 
+    def testGetAdcNone(self):
+        # Tests that we correctly return None when ADC aren't present in
+        # the well-known file.
+        creds = credentials_lib._GetApplicationDefaultCredentials(
+            client_info={'scope': ''})
+        self.assertIsNone(creds)
+
 
 class TestGetRunFlowFlags(unittest2.TestCase):
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/encoding.py 
new/apitools-0.5.11/apitools/base/py/encoding.py
--- old/apitools-0.5.6/apitools/base/py/encoding.py     2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/encoding.py    2017-05-08 
17:44:29.000000000 +0200
@@ -20,7 +20,6 @@
 import collections
 import datetime
 import json
-import logging
 import os
 import sys
 
@@ -119,6 +118,18 @@
     return json.loads(MessageToJson(message))
 
 
+def DictToProtoMap(properties, additional_property_type, sort_items=False):
+    """Convert the given dictionary to an AdditionalProperty message."""
+    items = properties.items()
+    if sort_items:
+        items = sorted(items)
+    map_ = []
+    for key, value in items:
+        map_.append(additional_property_type.AdditionalProperty(
+            key=key, value=value))
+    return additional_property_type(additional_properties=map_)
+
+
 def PyValueToMessage(message_type, value):
     """Convert the given python value to a message of type message_type."""
     return JsonToMessage(message_type, json.dumps(value))
@@ -276,16 +287,9 @@
         if message_type in _CUSTOM_MESSAGE_CODECS:
             return _CUSTOM_MESSAGE_CODECS[
                 message_type].decoder(encoded_message)
-        # We turn off the default logging in protorpc. We may want to
-        # remove this later.
-        old_level = logging.getLogger().level
-        logging.getLogger().setLevel(logging.ERROR)
-        try:
-            result = _DecodeCustomFieldNames(message_type, encoded_message)
-            result = super(_ProtoJsonApiTools, self).decode_message(
-                message_type, result)
-        finally:
-            logging.getLogger().setLevel(old_level)
+        result = _DecodeCustomFieldNames(message_type, encoded_message)
+        result = super(_ProtoJsonApiTools, self).decode_message(
+            message_type, result)
         result = _ProcessUnknownEnums(result, encoded_message)
         result = _ProcessUnknownMessages(result, encoded_message)
         return _DecodeUnknownFields(result, encoded_message)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/encoding_test.py 
new/apitools-0.5.11/apitools/base/py/encoding_test.py
--- old/apitools-0.5.6/apitools/base/py/encoding_test.py        2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/encoding_test.py       2017-05-08 
17:44:29.000000000 +0200
@@ -198,7 +198,6 @@
             '{"nested": {"additional_properties": []}}',
             encoding.MessageToJson(
                 msg, include_fields=['nested.additional_properties']))
-        # pylint: disable=redefined-variable-type
         msg = ExtraNestedMessage(nested=msg)
         self.assertEqual(
             '{"nested": {"nested": null}}',
@@ -464,3 +463,42 @@
         encoded_msg = '{"a": [{"one": 1}]}'
         msg = encoding.JsonToMessage(RepeatedJsonValueMessage, encoded_msg)
         self.assertEqual(encoded_msg, encoding.MessageToJson(msg))
+
+    def testDictToProtoMap(self):
+        dict_ = {'key': 'value'}
+
+        encoded_msg = encoding.DictToProtoMap(dict_,
+                                              AdditionalPropertiesMessage)
+        expected_msg = AdditionalPropertiesMessage()
+        expected_msg.additional_properties = [
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key', value='value')
+        ]
+        self.assertEqual(encoded_msg, expected_msg)
+
+    def testDictToProtoMapSorted(self):
+        tuples = [('key{0:02}'.format(i), 'value') for i in range(100)]
+        dict_ = dict(tuples)
+
+        encoded_msg = encoding.DictToProtoMap(dict_,
+                                              AdditionalPropertiesMessage,
+                                              sort_items=True)
+        expected_msg = AdditionalPropertiesMessage()
+        expected_msg.additional_properties = [
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key=key, value=value)
+            for key, value in tuples
+        ]
+        self.assertEqual(encoded_msg, expected_msg)
+
+    def testDictToProtoMapNumeric(self):
+        dict_ = {'key': 1}
+
+        encoded_msg = encoding.DictToProtoMap(dict_,
+                                              AdditionalIntPropertiesMessage)
+        expected_msg = AdditionalIntPropertiesMessage()
+        expected_msg.additional_properties = [
+            AdditionalIntPropertiesMessage.AdditionalProperty(
+                key='key', value=1)
+        ]
+        self.assertEqual(encoded_msg, expected_msg)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/testing/mock.py 
new/apitools-0.5.11/apitools/base/py/testing/mock.py
--- old/apitools-0.5.6/apitools/base/py/testing/mock.py 2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/testing/mock.py        2017-05-08 
17:44:29.000000000 +0200
@@ -186,6 +186,7 @@
     """A mocked API service method."""
 
     def __init__(self, key, mocked_client, real_method):
+        self.__name__ = real_method.__name__
         self.__key = key
         self.__mocked_client = mocked_client
         self.__real_method = real_method
@@ -245,8 +246,7 @@
 def _MakeMockedService(api_name, collection_name,
                        mock_client, service, real_service):
     class MockedService(base_api.BaseApiService):
-        def __init__(self, real_client):
-            super(MockedService, self).__init__(real_client)
+        pass
 
     for method in service.GetMethodsList():
         real_method = None
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/transfer.py 
new/apitools-0.5.11/apitools/base/py/transfer.py
--- old/apitools-0.5.6/apitools/base/py/transfer.py     2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/base/py/transfer.py    2017-05-08 
17:44:29.000000000 +0200
@@ -271,9 +271,8 @@
     def __str__(self):
         if not self.initialized:
             return 'Download (uninitialized)'
-        else:
-            return 'Download with %d/%s bytes transferred from url %s' % (
-                self.progress, self.total_size, self.url)
+        return 'Download with %d/%s bytes transferred from url %s' % (
+            self.progress, self.total_size, self.url)
 
     def ConfigureRequest(self, http_request, url_builder):
         url_builder.query_params['alt'] = 'media'
@@ -648,9 +647,8 @@
     def __str__(self):
         if not self.initialized:
             return 'Upload (uninitialized)'
-        else:
-            return 'Upload with %d/%s bytes transferred for url %s' % (
-                self.progress, self.total_size or '???', self.url)
+        return 'Upload with %d/%s bytes transferred for url %s' % (
+            self.progress, self.total_size or '???', self.url)
 
     @property
     def strategy(self):
@@ -850,8 +848,7 @@
         # go ahead and pump the bytes now.
         if self.auto_transfer:
             return self.StreamInChunks()
-        else:
-            return http_response
+        return http_response
 
     def __GetLastByte(self, range_header):
         _, _, end = range_header.partition('-')
@@ -992,7 +989,6 @@
             # https://code.google.com/p/httplib2/issues/detail?id=176 which can
             # cause httplib2 to skip bytes on 401's for file objects.
             # Rework this solution to be more general.
-            # pylint: disable=redefined-variable-type
             body_stream = body_stream.read(self.chunksize)
         else:
             end = min(start + self.chunksize, self.total_size)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/base/py/util.py 
new/apitools-0.5.11/apitools/base/py/util.py
--- old/apitools-0.5.6/apitools/base/py/util.py 2016-12-03 14:35:46.000000000 
+0100
+++ new/apitools-0.5.11/apitools/base/py/util.py        2017-05-08 
17:44:29.000000000 +0200
@@ -61,9 +61,12 @@
     Returns:
       True iff we're running on a GCE instance.
     """
+    metadata_url = 'http://{}'.format(
+        os.environ.get('GCE_METADATA_ROOT', 'metadata.google.internal'))
     try:
         o = urllib_request.build_opener(urllib_request.ProxyHandler({})).open(
-            urllib_request.Request('http://metadata.google.internal'))
+            urllib_request.Request(
+                metadata_url, headers={'Metadata-Flavor': 'Google'}))
     except urllib_error.URLError:
         return False
     return (o.getcode() == http_client.OK and
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' 
old/apitools-0.5.6/apitools/gen/client_generation_test.py 
new/apitools-0.5.11/apitools/gen/client_generation_test.py
--- old/apitools-0.5.6/apitools/gen/client_generation_test.py   2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/gen/client_generation_test.py  2017-05-08 
17:44:29.000000000 +0200
@@ -22,6 +22,7 @@
 
 import unittest2
 
+from apitools.gen import gen_client
 from apitools.gen import test_utils
 
 
@@ -55,7 +56,7 @@
                 ]
                 logging.info('Testing API %s with command line: %s',
                              api, ' '.join(args))
-                retcode = subprocess.call(args)
+                retcode = gen_client.main(args)
                 if retcode == 128:
                     logging.error('Failed to fetch discovery doc, continuing.')
                     continue
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/gen/gen_client_lib.py 
new/apitools-0.5.11/apitools/gen/gen_client_lib.py
--- old/apitools-0.5.6/apitools/gen/gen_client_lib.py   2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/gen/gen_client_lib.py  2017-05-08 
17:44:29.000000000 +0200
@@ -30,8 +30,14 @@
 
 def _ApitoolsVersion():
     """Returns version of the currently installed google-apitools package."""
-    import pkg_resources
-    return pkg_resources.get_distribution('google-apitools').version
+    try:
+        import pkg_resources
+    except ImportError:
+        return 'X.X.X'
+    try:
+        return pkg_resources.get_distribution('google-apitools').version
+    except pkg_resources.DistributionNotFound:
+        return 'X.X.X'
 
 
 def _StandardQueryParametersSchema(discovery_doc):
@@ -84,7 +90,7 @@
             self.__root_package, self.__base_files_package,
             self.__protorpc_package)
         schemas = self.__discovery_doc.get('schemas', {})
-        for schema_name, schema in schemas.items():
+        for schema_name, schema in sorted(schemas.items()):
             self.__message_registry.AddDescriptorFromSchema(
                 schema_name, schema)
 
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/apitools/gen/message_registry.py 
new/apitools-0.5.11/apitools/gen/message_registry.py
--- old/apitools-0.5.6/apitools/gen/message_registry.py 2016-12-03 
14:35:46.000000000 +0100
+++ new/apitools-0.5.11/apitools/gen/message_registry.py        2017-05-08 
17:44:29.000000000 +0200
@@ -441,8 +441,7 @@
                     entry_name_hint, items.get('items'), parent_name)
                 return TypeInfo(type_name=entry_type_name,
                                 variant=messages.Variant.MESSAGE)
-            else:
-                return self.__GetTypeInfo(items, entry_name_hint)
+            return self.__GetTypeInfo(items, entry_name_hint)
         elif type_name == 'any':
             self.__AddImport('from %s import extra_types' %
                              self.__base_files_package)
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/apitools-0.5.6/setup.py new/apitools-0.5.11/setup.py
--- old/apitools-0.5.6/setup.py 2016-12-03 14:35:46.000000000 +0100
+++ new/apitools-0.5.11/setup.py        2017-05-08 17:44:29.000000000 +0200
@@ -30,7 +30,6 @@
 REQUIRED_PACKAGES = [
     'httplib2>=0.8',
     'oauth2client>=1.5.2,<4.0.0dev',
-    'setuptools>=18.5',
     'six>=1.9.0',
     ]
 
@@ -54,7 +53,7 @@
 if py_version < '2.7':
     REQUIRED_PACKAGES.append('argparse>=1.2.1')
 
-_APITOOLS_VERSION = '0.5.6'
+_APITOOLS_VERSION = '0.5.11'
 
 with open('README.rst') as fileobj:
     README = fileobj.read()

++++++ apitools-test-disable-requires-internet.patch ++++++
--- /var/tmp/diff_new_pack.4veSXs/_old  2017-08-22 11:09:15.915444780 +0200
+++ /var/tmp/diff_new_pack.4veSXs/_new  2017-08-22 11:09:15.919444217 +0200
@@ -97,10 +97,10 @@
 +except:
 +    has_network = False
 +
+ from apitools.gen import gen_client
  from apitools.gen import test_utils
  
- 
-@@ -42,6 +48,8 @@ class ClientGenerationTest(unittest2.Tes
+@@ -43,6 +49,8 @@ class ClientGenerationTest(unittest2.Tes
      @test_utils.SkipOnWindows
      @test_utils.RunOnlyOnPython27
      def testGeneration(self):

++++++ create-from_old_oauth2client-module.patch ++++++
Index: apitools-0.5.11/apitools/base/py/from_old_oauth2client/__init__.py
===================================================================
--- /dev/null
+++ apitools-0.5.11/apitools/base/py/from_old_oauth2client/__init__.py
@@ -0,0 +1,1 @@
+
++++++ fix-imports.patch ++++++
Index: apitools-0.5.11/apitools/base/py/from_old_oauth2client/locked_file.py
===================================================================
--- apitools-0.5.11.orig/apitools/base/py/from_old_oauth2client/locked_file.py
+++ apitools-0.5.11/apitools/base/py/from_old_oauth2client/locked_file.py
@@ -37,7 +37,7 @@ import logging
 import os
 import time
 
-from oauth2client import util
+from . import util
 
 
 __author__ = 'ca...@google.com (David T McWherter)'
Index: apitools-0.5.11/apitools/base/py/from_old_oauth2client/multistore_file.py
===================================================================
--- 
apitools-0.5.11.orig/apitools/base/py/from_old_oauth2client/multistore_file.py
+++ apitools-0.5.11/apitools/base/py/from_old_oauth2client/multistore_file.py
@@ -51,8 +51,8 @@ import os
 import threading
 
 from oauth2client import client
-from oauth2client import util
-from oauth2client.contrib import locked_file
+from . import util
+from . import locked_file
 
 __author__ = 'jb...@google.com (Joe Beda)'
 
Index: apitools-0.5.11/apitools/base/py/credentials_lib.py
===================================================================
--- apitools-0.5.11.orig/apitools/base/py/credentials_lib.py
+++ apitools-0.5.11/apitools/base/py/credentials_lib.py
@@ -47,12 +47,18 @@ except ImportError:
 try:
     from oauth2client.contrib import locked_file
 except ImportError:
-    from oauth2client import locked_file
+    try:
+        from oauth2client import locked_file
+    except ImportError:
+        from apitools.base.py.from_old_oauth2client import locked_file
 
 try:
     from oauth2client.contrib import multistore_file
 except ImportError:
-    from oauth2client import multistore_file
+    try:
+        from oauth2client import multistore_file
+    except ImportError:
+        from apitools.base.py.from_old_oauth2client import multistore_file
 
 try:
     import gflags
++++++ locked_file.py ++++++
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Locked file interface that should work on Unix and Windows pythons.

This module first tries to use fcntl locking to ensure serialized access
to a file, then falls back on a lock file if that is unavialable.

Usage::

    f = LockedFile('filename', 'r+b', 'rb')
    f.open_and_lock()
    if f.is_locked():
      print('Acquired filename with r+b mode')
      f.file_handle().write('locked data')
    else:
      print('Acquired filename with rb mode')
    f.unlock_and_close()

"""

from __future__ import print_function

import errno
import logging
import os
import time

from oauth2client import util


__author__ = 'ca...@google.com (David T McWherter)'

logger = logging.getLogger(__name__)


class CredentialsFileSymbolicLinkError(Exception):
    """Credentials files must not be symbolic links."""


class AlreadyLockedException(Exception):
    """Trying to lock a file that has already been locked by the LockedFile."""
    pass


def validate_file(filename):
    if os.path.islink(filename):
        raise CredentialsFileSymbolicLinkError(
            'File: {0} is a symbolic link.'.format(filename))


class _Opener(object):
    """Base class for different locking primitives."""

    def __init__(self, filename, mode, fallback_mode):
        """Create an Opener.

        Args:
            filename: string, The pathname of the file.
            mode: string, The preferred mode to access the file with.
            fallback_mode: string, The mode to use if locking fails.
        """
        self._locked = False
        self._filename = filename
        self._mode = mode
        self._fallback_mode = fallback_mode
        self._fh = None
        self._lock_fd = None

    def is_locked(self):
        """Was the file locked."""
        return self._locked

    def file_handle(self):
        """The file handle to the file. Valid only after opened."""
        return self._fh

    def filename(self):
        """The filename that is being locked."""
        return self._filename

    def open_and_lock(self, timeout, delay):
        """Open the file and lock it.

        Args:
            timeout: float, How long to try to lock for.
            delay: float, How long to wait between retries.
        """
        pass

    def unlock_and_close(self):
        """Unlock and close the file."""
        pass


class _PosixOpener(_Opener):
    """Lock files using Posix advisory lock files."""

    def open_and_lock(self, timeout, delay):
        """Open the file and lock it.

        Tries to create a .lock file next to the file we're trying to open.

        Args:
            timeout: float, How long to try to lock for.
            delay: float, How long to wait between retries.

        Raises:
            AlreadyLockedException: if the lock is already acquired.
            IOError: if the open fails.
            CredentialsFileSymbolicLinkError if the file is a symbolic link.
        """
        if self._locked:
            raise AlreadyLockedException(
                'File {0} is already locked'.format(self._filename))
        self._locked = False

        validate_file(self._filename)
        try:
            self._fh = open(self._filename, self._mode)
        except IOError as e:
            # If we can't access with _mode, try _fallback_mode and don't lock.
            if e.errno == errno.EACCES:
                self._fh = open(self._filename, self._fallback_mode)
                return

        lock_filename = self._posix_lockfile(self._filename)
        start_time = time.time()
        while True:
            try:
                self._lock_fd = os.open(lock_filename,
                                        os.O_CREAT | os.O_EXCL | os.O_RDWR)
                self._locked = True
                break

            except OSError as e:
                if e.errno != errno.EEXIST:
                    raise
                if (time.time() - start_time) >= timeout:
                    logger.warn('Could not acquire lock %s in %s seconds',
                                lock_filename, timeout)
                    # Close the file and open in fallback_mode.
                    if self._fh:
                        self._fh.close()
                    self._fh = open(self._filename, self._fallback_mode)
                    return
                time.sleep(delay)

    def unlock_and_close(self):
        """Unlock a file by removing the .lock file, and close the handle."""
        if self._locked:
            lock_filename = self._posix_lockfile(self._filename)
            os.close(self._lock_fd)
            os.unlink(lock_filename)
            self._locked = False
            self._lock_fd = None
        if self._fh:
            self._fh.close()

    def _posix_lockfile(self, filename):
        """The name of the lock file to use for posix locking."""
        return '{0}.lock'.format(filename)


class LockedFile(object):
    """Represent a file that has exclusive access."""

    @util.positional(4)
    def __init__(self, filename, mode, fallback_mode, use_native_locking=True):
        """Construct a LockedFile.

        Args:
            filename: string, The path of the file to open.
            mode: string, The mode to try to open the file with.
            fallback_mode: string, The mode to use if locking fails.
            use_native_locking: bool, Whether or not fcntl/win32 locking is
                                used.
        """
        opener = None
        if not opener and use_native_locking:
            try:
                from oauth2client.contrib._win32_opener import _Win32Opener
                opener = _Win32Opener(filename, mode, fallback_mode)
            except ImportError:
                try:
                    from oauth2client.contrib._fcntl_opener import _FcntlOpener
                    opener = _FcntlOpener(filename, mode, fallback_mode)
                except ImportError:
                    pass

        if not opener:
            opener = _PosixOpener(filename, mode, fallback_mode)

        self._opener = opener

    def filename(self):
        """Return the filename we were constructed with."""
        return self._opener._filename

    def file_handle(self):
        """Return the file_handle to the opened file."""
        return self._opener.file_handle()

    def is_locked(self):
        """Return whether we successfully locked the file."""
        return self._opener.is_locked()

    def open_and_lock(self, timeout=0, delay=0.05):
        """Open the file, trying to lock it.

        Args:
            timeout: float, The number of seconds to try to acquire the lock.
            delay: float, The number of seconds to wait between retry attempts.

        Raises:
            AlreadyLockedException: if the lock is already acquired.
            IOError: if the open fails.
        """
        self._opener.open_and_lock(timeout, delay)

    def unlock_and_close(self):
        """Unlock and close a file."""
        self._opener.unlock_and_close()
++++++ multistore_file.py ++++++
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Multi-credential file store with lock support.

This module implements a JSON credential store where multiple
credentials can be stored in one file. That file supports locking
both in a single process and across processes.

The credential themselves are keyed off of:

* client_id
* user_agent
* scope

The format of the stored data is like so::

    {
      'file_version': 1,
      'data': [
          {
              'key': {
                  'clientId': '<client id>',
                  'userAgent': '<user agent>',
                  'scope': '<scope>'
              },
              'credential': {
                  # JSON serialized Credentials.
              }
          }
      ]
    }

"""

import errno
import json
import logging
import os
import threading

from oauth2client import client
from oauth2client import util
from oauth2client.contrib import locked_file

__author__ = 'jb...@google.com (Joe Beda)'

logger = logging.getLogger(__name__)

logger.warning(
    'The oauth2client.contrib.multistore_file module has been deprecated and '
    'will be removed in the next release of oauth2client. Please migrate to '
    'multiprocess_file_storage.')

# A dict from 'filename'->_MultiStore instances
_multistores = {}
_multistores_lock = threading.Lock()


class Error(Exception):
    """Base error for this module."""


class NewerCredentialStoreError(Error):
    """The credential store is a newer version than supported."""


def _dict_to_tuple_key(dictionary):
    """Converts a dictionary to a tuple that can be used as an immutable key.

    The resulting key is always sorted so that logically equivalent
    dictionaries always produce an identical tuple for a key.

    Args:
        dictionary: the dictionary to use as the key.

    Returns:
        A tuple representing the dictionary in it's naturally sorted ordering.
    """
    return tuple(sorted(dictionary.items()))


@util.positional(4)
def get_credential_storage(filename, client_id, user_agent, scope,
                           warn_on_readonly=True):
    """Get a Storage instance for a credential.

    Args:
        filename: The JSON file storing a set of credentials
        client_id: The client_id for the credential
        user_agent: The user agent for the credential
        scope: string or iterable of strings, Scope(s) being requested
        warn_on_readonly: if True, log a warning if the store is readonly

    Returns:
        An object derived from client.Storage for getting/setting the
        credential.
    """
    # Recreate the legacy key with these specific parameters
    key = {'clientId': client_id, 'userAgent': user_agent,
           'scope': util.scopes_to_string(scope)}
    return get_credential_storage_custom_key(
        filename, key, warn_on_readonly=warn_on_readonly)


@util.positional(2)
def get_credential_storage_custom_string_key(filename, key_string,
                                             warn_on_readonly=True):
    """Get a Storage instance for a credential using a single string as a key.

    Allows you to provide a string as a custom key that will be used for
    credential storage and retrieval.

    Args:
        filename: The JSON file storing a set of credentials
        key_string: A string to use as the key for storing this credential.
        warn_on_readonly: if True, log a warning if the store is readonly

    Returns:
        An object derived from client.Storage for getting/setting the
        credential.
    """
    # Create a key dictionary that can be used
    key_dict = {'key': key_string}
    return get_credential_storage_custom_key(
        filename, key_dict, warn_on_readonly=warn_on_readonly)


@util.positional(2)
def get_credential_storage_custom_key(filename, key_dict,
                                      warn_on_readonly=True):
    """Get a Storage instance for a credential using a dictionary as a key.

    Allows you to provide a dictionary as a custom key that will be used for
    credential storage and retrieval.

    Args:
        filename: The JSON file storing a set of credentials
        key_dict: A dictionary to use as the key for storing this credential.
                  There is no ordering of the keys in the dictionary. Logically
                  equivalent dictionaries will produce equivalent storage keys.
        warn_on_readonly: if True, log a warning if the store is readonly

    Returns:
        An object derived from client.Storage for getting/setting the
        credential.
    """
    multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)
    key = _dict_to_tuple_key(key_dict)
    return multistore._get_storage(key)


@util.positional(1)
def get_all_credential_keys(filename, warn_on_readonly=True):
    """Gets all the registered credential keys in the given Multistore.

    Args:
        filename: The JSON file storing a set of credentials
        warn_on_readonly: if True, log a warning if the store is readonly

    Returns:
        A list of the credential keys present in the file.  They are returned
        as dictionaries that can be passed into
        get_credential_storage_custom_key to get the actual credentials.
    """
    multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly)
    multistore._lock()
    try:
        return multistore._get_all_credential_keys()
    finally:
        multistore._unlock()


@util.positional(1)
def _get_multistore(filename, warn_on_readonly=True):
    """A helper method to initialize the multistore with proper locking.

    Args:
        filename: The JSON file storing a set of credentials
        warn_on_readonly: if True, log a warning if the store is readonly

    Returns:
        A multistore object
    """
    filename = os.path.expanduser(filename)
    _multistores_lock.acquire()
    try:
        multistore = _multistores.setdefault(
            filename, _MultiStore(filename, warn_on_readonly=warn_on_readonly))
    finally:
        _multistores_lock.release()
    return multistore


class _MultiStore(object):
    """A file backed store for multiple credentials."""

    @util.positional(2)
    def __init__(self, filename, warn_on_readonly=True):
        """Initialize the class.

        This will create the file if necessary.
        """
        self._file = locked_file.LockedFile(filename, 'r+', 'r')
        self._thread_lock = threading.Lock()
        self._read_only = False
        self._warn_on_readonly = warn_on_readonly

        self._create_file_if_needed()

        # Cache of deserialized store. This is only valid after the
        # _MultiStore is locked or _refresh_data_cache is called. This is
        # of the form of:
        #
        # ((key, value), (key, value)...) -> OAuth2Credential
        #
        # If this is None, then the store hasn't been read yet.
        self._data = None

    class _Storage(client.Storage):
        """A Storage object that can read/write a single credential."""

        def __init__(self, multistore, key):
            self._multistore = multistore
            self._key = key

        def acquire_lock(self):
            """Acquires any lock necessary to access this Storage.

            This lock is not reentrant.
            """
            self._multistore._lock()

        def release_lock(self):
            """Release the Storage lock.

            Trying to release a lock that isn't held will result in a
            RuntimeError.
            """
            self._multistore._unlock()

        def locked_get(self):
            """Retrieve credential.

            The Storage lock must be held when this is called.

            Returns:
                oauth2client.client.Credentials
            """
            credential = self._multistore._get_credential(self._key)
            if credential:
                credential.set_store(self)
            return credential

        def locked_put(self, credentials):
            """Write a credential.

            The Storage lock must be held when this is called.

            Args:
                credentials: Credentials, the credentials to store.
            """
            self._multistore._update_credential(self._key, credentials)

        def locked_delete(self):
            """Delete a credential.

            The Storage lock must be held when this is called.

            Args:
                credentials: Credentials, the credentials to store.
            """
            self._multistore._delete_credential(self._key)

    def _create_file_if_needed(self):
        """Create an empty file if necessary.

        This method will not initialize the file. Instead it implements a
        simple version of "touch" to ensure the file has been created.
        """
        if not os.path.exists(self._file.filename()):
            old_umask = os.umask(0o177)
            try:
                open(self._file.filename(), 'a+b').close()
            finally:
                os.umask(old_umask)

    def _lock(self):
        """Lock the entire multistore."""
        self._thread_lock.acquire()
        try:
            self._file.open_and_lock()
        except (IOError, OSError) as e:
            if e.errno == errno.ENOSYS:
                logger.warn('File system does not support locking the '
                            'credentials file.')
            elif e.errno == errno.ENOLCK:
                logger.warn('File system is out of resources for writing the '
                            'credentials file (is your disk full?).')
            elif e.errno == errno.EDEADLK:
                logger.warn('Lock contention on multistore file, opening '
                            'in read-only mode.')
            elif e.errno == errno.EACCES:
                logger.warn('Cannot access credentials file.')
            else:
                raise
        if not self._file.is_locked():
            self._read_only = True
            if self._warn_on_readonly:
                logger.warn('The credentials file (%s) is not writable. '
                            'Opening in read-only mode. Any refreshed '
                            'credentials will only be '
                            'valid for this run.', self._file.filename())

        if os.path.getsize(self._file.filename()) == 0:
            logger.debug('Initializing empty multistore file')
            # The multistore is empty so write out an empty file.
            self._data = {}
            self._write()
        elif not self._read_only or self._data is None:
            # Only refresh the data if we are read/write or we haven't
            # cached the data yet. If we are readonly, we assume is isn't
            # changing out from under us and that we only have to read it
            # once. This prevents us from whacking any new access keys that
            # we have cached in memory but were unable to write out.
            self._refresh_data_cache()

    def _unlock(self):
        """Release the lock on the multistore."""
        self._file.unlock_and_close()
        self._thread_lock.release()

    def _locked_json_read(self):
        """Get the raw content of the multistore file.

        The multistore must be locked when this is called.

        Returns:
            The contents of the multistore decoded as JSON.
        """
        assert self._thread_lock.locked()
        self._file.file_handle().seek(0)
        return json.load(self._file.file_handle())

    def _locked_json_write(self, data):
        """Write a JSON serializable data structure to the multistore.

        The multistore must be locked when this is called.

        Args:
            data: The data to be serialized and written.
        """
        assert self._thread_lock.locked()
        if self._read_only:
            return
        self._file.file_handle().seek(0)
        json.dump(data, self._file.file_handle(),
                  sort_keys=True, indent=2, separators=(',', ': '))
        self._file.file_handle().truncate()

    def _refresh_data_cache(self):
        """Refresh the contents of the multistore.

        The multistore must be locked when this is called.

        Raises:
            NewerCredentialStoreError: Raised when a newer client has written
            the store.
        """
        self._data = {}
        try:
            raw_data = self._locked_json_read()
        except Exception:
            logger.warn('Credential data store could not be loaded. '
                        'Will ignore and overwrite.')
            return

        version = 0
        try:
            version = raw_data['file_version']
        except Exception:
            logger.warn('Missing version for credential data store. It may be '
                        'corrupt or an old version. Overwriting.')
        if version > 1:
            raise NewerCredentialStoreError(
                'Credential file has file_version of {0}. '
                'Only file_version of 1 is supported.'.format(version))

        credentials = []
        try:
            credentials = raw_data['data']
        except (TypeError, KeyError):
            pass

        for cred_entry in credentials:
            try:
                key, credential = self._decode_credential_from_json(cred_entry)
                self._data[key] = credential
            except:
                # If something goes wrong loading a credential, just ignore it
                logger.info('Error decoding credential, skipping',
                            exc_info=True)

    def _decode_credential_from_json(self, cred_entry):
        """Load a credential from our JSON serialization.

        Args:
            cred_entry: A dict entry from the data member of our format

        Returns:
            (key, cred) where the key is the key tuple and the cred is the
            OAuth2Credential object.
        """
        raw_key = cred_entry['key']
        key = _dict_to_tuple_key(raw_key)
        credential = None
        credential = client.Credentials.new_from_json(
            json.dumps(cred_entry['credential']))
        return (key, credential)

    def _write(self):
        """Write the cached data back out.

        The multistore must be locked.
        """
        raw_data = {'file_version': 1}
        raw_creds = []
        raw_data['data'] = raw_creds
        for (cred_key, cred) in self._data.items():
            raw_key = dict(cred_key)
            raw_cred = json.loads(cred.to_json())
            raw_creds.append({'key': raw_key, 'credential': raw_cred})
        self._locked_json_write(raw_data)

    def _get_all_credential_keys(self):
        """Gets all the registered credential keys in the multistore.

        Returns:
            A list of dictionaries corresponding to all the keys currently
            registered
        """
        return [dict(key) for key in self._data.keys()]

    def _get_credential(self, key):
        """Get a credential from the multistore.

        The multistore must be locked.

        Args:
            key: The key used to retrieve the credential

        Returns:
            The credential specified or None if not present
        """
        return self._data.get(key, None)

    def _update_credential(self, key, cred):
        """Update a credential and write the multistore.

        This must be called when the multistore is locked.

        Args:
            key: The key used to retrieve the credential
            cred: The OAuth2Credential to update/set
        """
        self._data[key] = cred
        self._write()

    def _delete_credential(self, key):
        """Delete a credential and write the multistore.

        This must be called when the multistore is locked.

        Args:
            key: The key used to retrieve the credential
        """
        try:
            del self._data[key]
        except KeyError:
            pass
        self._write()

    def _get_storage(self, key):
        """Get a Storage object to get/set a credential.

        This Storage is a 'view' into the multistore.

        Args:
            key: The key used to retrieve the credential

        Returns:
            A Storage object that can be used to get/set this cred
        """
        return self._Storage(self, key)
++++++ util.py ++++++
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Common utility library."""

import functools
import inspect
import logging

import six
from six.moves import urllib


__author__ = [
    'ra...@google.com (Rafe Kaplan)',
    'gu...@google.com (Guido van Rossum)',
]

__all__ = [
    'positional',
    'POSITIONAL_WARNING',
    'POSITIONAL_EXCEPTION',
    'POSITIONAL_IGNORE',
]

logger = logging.getLogger(__name__)

POSITIONAL_WARNING = 'WARNING'
POSITIONAL_EXCEPTION = 'EXCEPTION'
POSITIONAL_IGNORE = 'IGNORE'
POSITIONAL_SET = frozenset([POSITIONAL_WARNING, POSITIONAL_EXCEPTION,
                            POSITIONAL_IGNORE])

positional_parameters_enforcement = POSITIONAL_WARNING


def positional(max_positional_args):
    """A decorator to declare that only the first N arguments my be positional.

    This decorator makes it easy to support Python 3 style keyword-only
    parameters. For example, in Python 3 it is possible to write::

        def fn(pos1, *, kwonly1=None, kwonly1=None):
            ...

    All named parameters after ``*`` must be a keyword::

        fn(10, 'kw1', 'kw2')  # Raises exception.
        fn(10, kwonly1='kw1')  # Ok.

    Example
    ^^^^^^^

    To define a function like above, do::

        @positional(1)
        def fn(pos1, kwonly1=None, kwonly2=None):
            ...

    If no default value is provided to a keyword argument, it becomes a
    required keyword argument::

        @positional(0)
        def fn(required_kw):
            ...

    This must be called with the keyword parameter::

        fn()  # Raises exception.
        fn(10)  # Raises exception.
        fn(required_kw=10)  # Ok.

    When defining instance or class methods always remember to account for
    ``self`` and ``cls``::

        class MyClass(object):

            @positional(2)
            def my_method(self, pos1, kwonly1=None):
                ...

            @classmethod
            @positional(2)
            def my_method(cls, pos1, kwonly1=None):
                ...

    The positional decorator behavior is controlled by
    ``util.positional_parameters_enforcement``, which may be set to
    ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or
    ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do
    nothing, respectively, if a declaration is violated.

    Args:
        max_positional_arguments: Maximum number of positional arguments. All
                                  parameters after the this index must be
                                  keyword only.

    Returns:
        A decorator that prevents using arguments after max_positional_args
        from being used as positional parameters.

    Raises:
        TypeError: if a key-word only argument is provided as a positional
                   parameter, but only if
                   util.positional_parameters_enforcement is set to
                   POSITIONAL_EXCEPTION.
    """

    def positional_decorator(wrapped):
        @functools.wraps(wrapped)
        def positional_wrapper(*args, **kwargs):
            if len(args) > max_positional_args:
                plural_s = ''
                if max_positional_args != 1:
                    plural_s = 's'
                message = ('{function}() takes at most {args_max} positional '
                           'argument{plural} ({args_given} given)'.format(
                               function=wrapped.__name__,
                               args_max=max_positional_args,
                               args_given=len(args),
                               plural=plural_s))
                if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
                    raise TypeError(message)
                elif positional_parameters_enforcement == POSITIONAL_WARNING:
                    logger.warning(message)
            return wrapped(*args, **kwargs)
        return positional_wrapper

    if isinstance(max_positional_args, six.integer_types):
        return positional_decorator
    else:
        args, _, _, defaults = inspect.getargspec(max_positional_args)
        return positional(len(args) - len(defaults))(max_positional_args)


def scopes_to_string(scopes):
    """Converts scope value to a string.

    If scopes is a string then it is simply passed through. If scopes is an
    iterable then a string is returned that is all the individual scopes
    concatenated with spaces.

    Args:
        scopes: string or iterable of strings, the scopes.

    Returns:
        The scopes formatted as a single string.
    """
    if isinstance(scopes, six.string_types):
        return scopes
    else:
        return ' '.join(scopes)


def string_to_scopes(scopes):
    """Converts stringifed scope value to a list.

    If scopes is a list then it is simply passed through. If scopes is an
    string then a list of each individual scope is returned.

    Args:
        scopes: a string or iterable of strings, the scopes.

    Returns:
        The scopes in a list.
    """
    if not scopes:
        return []
    if isinstance(scopes, six.string_types):
        return scopes.split(' ')
    else:
        return scopes


def _add_query_parameter(url, name, value):
    """Adds a query parameter to a url.

    Replaces the current value if it already exists in the URL.

    Args:
        url: string, url to add the query parameter to.
        name: string, query parameter name.
        value: string, query parameter value.

    Returns:
        Updated query parameter. Does not update the url if value is None.
    """
    if value is None:
        return url
    else:
        parsed = list(urllib.parse.urlparse(url))
        q = dict(urllib.parse.parse_qsl(parsed[4]))
        q[name] = value
        parsed[4] = urllib.parse.urlencode(q)
        return urllib.parse.urlunparse(parsed)

Reply via email to