http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
deleted file mode 100644
index 4479363..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
+++ /dev/null
@@ -1,259 +0,0 @@
-# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and 
pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-# Copyright 2009 Raymond Hettinger, released under the MIT License.
-# http://code.activestate.com/recipes/576693/
-try:
-    from thread import get_ident as _get_ident
-except ImportError:
-    from dummy_thread import get_ident as _get_ident
-
-try:
-    from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
-    pass
-
-
-class OrderedDict(dict):
-    'Dictionary that remembers insertion order'
-    # An inherited dict maps keys to values.
-    # The inherited dict provides __getitem__, __len__, __contains__, and get.
-    # The remaining methods are order-aware.
-    # Big-O running times for all methods are the same as for regular 
dictionaries.
-
-    # The internal self.__map dictionary maps keys to links in a doubly linked 
list.
-    # The circular doubly linked list starts and ends with a sentinel element.
-    # The sentinel element never gets deleted (this simplifies the algorithm).
-    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
-
-    def __init__(self, *args, **kwds):
-        '''Initialize an ordered dictionary.  Signature is the same as for
-        regular dictionaries, but keyword arguments are not recommended
-        because their insertion order is arbitrary.
-
-        '''
-        if len(args) > 1:
-            raise TypeError('expected at most 1 arguments, got %d' % len(args))
-        try:
-            self.__root
-        except AttributeError:
-            self.__root = root = []                     # sentinel node
-            root[:] = [root, root, None]
-            self.__map = {}
-        self.__update(*args, **kwds)
-
-    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
-        'od.__setitem__(i, y) <==> od[i]=y'
-        # Setting a new item creates a new link which goes at the end of the 
linked
-        # list, and the inherited dictionary is updated with the new key/value 
pair.
-        if key not in self:
-            root = self.__root
-            last = root[0]
-            last[1] = root[0] = self.__map[key] = [last, root, key]
-        dict_setitem(self, key, value)
-
-    def __delitem__(self, key, dict_delitem=dict.__delitem__):
-        'od.__delitem__(y) <==> del od[y]'
-        # Deleting an existing item uses self.__map to find the link which is
-        # then removed by updating the links in the predecessor and successor 
nodes.
-        dict_delitem(self, key)
-        link_prev, link_next, key = self.__map.pop(key)
-        link_prev[1] = link_next
-        link_next[0] = link_prev
-
-    def __iter__(self):
-        'od.__iter__() <==> iter(od)'
-        root = self.__root
-        curr = root[1]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[1]
-
-    def __reversed__(self):
-        'od.__reversed__() <==> reversed(od)'
-        root = self.__root
-        curr = root[0]
-        while curr is not root:
-            yield curr[2]
-            curr = curr[0]
-
-    def clear(self):
-        'od.clear() -> None.  Remove all items from od.'
-        try:
-            for node in self.__map.itervalues():
-                del node[:]
-            root = self.__root
-            root[:] = [root, root, None]
-            self.__map.clear()
-        except AttributeError:
-            pass
-        dict.clear(self)
-
-    def popitem(self, last=True):
-        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
-        Pairs are returned in LIFO order if last is true or FIFO order if 
false.
-
-        '''
-        if not self:
-            raise KeyError('dictionary is empty')
-        root = self.__root
-        if last:
-            link = root[0]
-            link_prev = link[0]
-            link_prev[1] = root
-            root[0] = link_prev
-        else:
-            link = root[1]
-            link_next = link[1]
-            root[1] = link_next
-            link_next[0] = root
-        key = link[2]
-        del self.__map[key]
-        value = dict.pop(self, key)
-        return key, value
-
-    # -- the following methods do not depend on the internal structure --
-
-    def keys(self):
-        'od.keys() -> list of keys in od'
-        return list(self)
-
-    def values(self):
-        'od.values() -> list of values in od'
-        return [self[key] for key in self]
-
-    def items(self):
-        'od.items() -> list of (key, value) pairs in od'
-        return [(key, self[key]) for key in self]
-
-    def iterkeys(self):
-        'od.iterkeys() -> an iterator over the keys in od'
-        return iter(self)
-
-    def itervalues(self):
-        'od.itervalues -> an iterator over the values in od'
-        for k in self:
-            yield self[k]
-
-    def iteritems(self):
-        'od.iteritems -> an iterator over the (key, value) items in od'
-        for k in self:
-            yield (k, self[k])
-
-    def update(*args, **kwds):
-        '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
-
-        If E is a dict instance, does:           for k in E: od[k] = E[k]
-        If E has a .keys() method, does:         for k in E.keys(): od[k] = 
E[k]
-        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
-        In either case, this is followed by:     for k, v in F.items(): od[k] 
= v
-
-        '''
-        if len(args) > 2:
-            raise TypeError('update() takes at most 2 positional '
-                            'arguments (%d given)' % (len(args),))
-        elif not args:
-            raise TypeError('update() takes at least 1 argument (0 given)')
-        self = args[0]
-        # Make progressively weaker assumptions about "other"
-        other = ()
-        if len(args) == 2:
-            other = args[1]
-        if isinstance(other, dict):
-            for key in other:
-                self[key] = other[key]
-        elif hasattr(other, 'keys'):
-            for key in other.keys():
-                self[key] = other[key]
-        else:
-            for key, value in other:
-                self[key] = value
-        for key, value in kwds.items():
-            self[key] = value
-
-    __update = update  # let subclasses override update without breaking 
__init__
-
-    __marker = object()
-
-    def pop(self, key, default=__marker):
-        '''od.pop(k[,d]) -> v, remove specified key and return the 
corresponding value.
-        If key is not found, d is returned if given, otherwise KeyError is 
raised.
-
-        '''
-        if key in self:
-            result = self[key]
-            del self[key]
-            return result
-        if default is self.__marker:
-            raise KeyError(key)
-        return default
-
-    def setdefault(self, key, default=None):
-        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
-        if key in self:
-            return self[key]
-        self[key] = default
-        return default
-
-    def __repr__(self, _repr_running={}):
-        'od.__repr__() <==> repr(od)'
-        call_key = id(self), _get_ident()
-        if call_key in _repr_running:
-            return '...'
-        _repr_running[call_key] = 1
-        try:
-            if not self:
-                return '%s()' % (self.__class__.__name__,)
-            return '%s(%r)' % (self.__class__.__name__, self.items())
-        finally:
-            del _repr_running[call_key]
-
-    def __reduce__(self):
-        'Return state information for pickling'
-        items = [[k, self[k]] for k in self]
-        inst_dict = vars(self).copy()
-        for k in vars(OrderedDict()):
-            inst_dict.pop(k, None)
-        if inst_dict:
-            return (self.__class__, (items,), inst_dict)
-        return self.__class__, (items,)
-
-    def copy(self):
-        'od.copy() -> a shallow copy of od'
-        return self.__class__(self)
-
-    @classmethod
-    def fromkeys(cls, iterable, value=None):
-        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
-        and values equal to v (which defaults to None).
-
-        '''
-        d = cls()
-        for key in iterable:
-            d[key] = value
-        return d
-
-    def __eq__(self, other):
-        '''od.__eq__(y) <==> od==y.  Comparison to another OD is 
order-sensitive
-        while comparison to a regular mapping is order-insensitive.
-
-        '''
-        if isinstance(other, OrderedDict):
-            return len(self)==len(other) and self.items() == other.items()
-        return dict.__eq__(self, other)
-
-    def __ne__(self, other):
-        return not self == other
-
-    # -- the following methods are only used in Python 2.7 --
-
-    def viewkeys(self):
-        "od.viewkeys() -> a set-like object providing a view on od's keys"
-        return KeysView(self)
-
-    def viewvalues(self):
-        "od.viewvalues() -> an object providing a view on od's values"
-        return ValuesView(self)
-
-    def viewitems(self):
-        "od.viewitems() -> a set-like object providing a view on od's items"
-        return ItemsView(self)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py
deleted file mode 100644
index 190c023..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py
+++ /dev/null
@@ -1,868 +0,0 @@
-"""Utilities for writing code that runs on Python 2 and 3"""
-
-# Copyright (c) 2010-2015 Benjamin Peterson
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in 
all
-# copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-# SOFTWARE.
-
-from __future__ import absolute_import
-
-import functools
-import itertools
-import operator
-import sys
-import types
-
-__author__ = "Benjamin Peterson <benja...@python.org>"
-__version__ = "1.10.0"
-
-
-# Useful for very coarse version differentiation.
-PY2 = sys.version_info[0] == 2
-PY3 = sys.version_info[0] == 3
-PY34 = sys.version_info[0:2] >= (3, 4)
-
-if PY3:
-    string_types = str,
-    integer_types = int,
-    class_types = type,
-    text_type = str
-    binary_type = bytes
-
-    MAXSIZE = sys.maxsize
-else:
-    string_types = basestring,
-    integer_types = (int, long)
-    class_types = (type, types.ClassType)
-    text_type = unicode
-    binary_type = str
-
-    if sys.platform.startswith("java"):
-        # Jython always uses 32 bits.
-        MAXSIZE = int((1 << 31) - 1)
-    else:
-        # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
-        class X(object):
-
-            def __len__(self):
-                return 1 << 31
-        try:
-            len(X())
-        except OverflowError:
-            # 32-bit
-            MAXSIZE = int((1 << 31) - 1)
-        else:
-            # 64-bit
-            MAXSIZE = int((1 << 63) - 1)
-        del X
-
-
-def _add_doc(func, doc):
-    """Add documentation to a function."""
-    func.__doc__ = doc
-
-
-def _import_module(name):
-    """Import module, returning the module after the last dot."""
-    __import__(name)
-    return sys.modules[name]
-
-
-class _LazyDescr(object):
-
-    def __init__(self, name):
-        self.name = name
-
-    def __get__(self, obj, tp):
-        result = self._resolve()
-        setattr(obj, self.name, result)  # Invokes __set__.
-        try:
-            # This is a bit ugly, but it avoids running this again by
-            # removing this descriptor.
-            delattr(obj.__class__, self.name)
-        except AttributeError:
-            pass
-        return result
-
-
-class MovedModule(_LazyDescr):
-
-    def __init__(self, name, old, new=None):
-        super(MovedModule, self).__init__(name)
-        if PY3:
-            if new is None:
-                new = name
-            self.mod = new
-        else:
-            self.mod = old
-
-    def _resolve(self):
-        return _import_module(self.mod)
-
-    def __getattr__(self, attr):
-        _module = self._resolve()
-        value = getattr(_module, attr)
-        setattr(self, attr, value)
-        return value
-
-
-class _LazyModule(types.ModuleType):
-
-    def __init__(self, name):
-        super(_LazyModule, self).__init__(name)
-        self.__doc__ = self.__class__.__doc__
-
-    def __dir__(self):
-        attrs = ["__doc__", "__name__"]
-        attrs += [attr.name for attr in self._moved_attributes]
-        return attrs
-
-    # Subclasses should override this
-    _moved_attributes = []
-
-
-class MovedAttribute(_LazyDescr):
-
-    def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
-        super(MovedAttribute, self).__init__(name)
-        if PY3:
-            if new_mod is None:
-                new_mod = name
-            self.mod = new_mod
-            if new_attr is None:
-                if old_attr is None:
-                    new_attr = name
-                else:
-                    new_attr = old_attr
-            self.attr = new_attr
-        else:
-            self.mod = old_mod
-            if old_attr is None:
-                old_attr = name
-            self.attr = old_attr
-
-    def _resolve(self):
-        module = _import_module(self.mod)
-        return getattr(module, self.attr)
-
-
-class _SixMetaPathImporter(object):
-
-    """
-    A meta path importer to import six.moves and its submodules.
-
-    This class implements a PEP302 finder and loader. It should be compatible
-    with Python 2.5 and all existing versions of Python3
-    """
-
-    def __init__(self, six_module_name):
-        self.name = six_module_name
-        self.known_modules = {}
-
-    def _add_module(self, mod, *fullnames):
-        for fullname in fullnames:
-            self.known_modules[self.name + "." + fullname] = mod
-
-    def _get_module(self, fullname):
-        return self.known_modules[self.name + "." + fullname]
-
-    def find_module(self, fullname, path=None):
-        if fullname in self.known_modules:
-            return self
-        return None
-
-    def __get_module(self, fullname):
-        try:
-            return self.known_modules[fullname]
-        except KeyError:
-            raise ImportError("This loader does not know module " + fullname)
-
-    def load_module(self, fullname):
-        try:
-            # in case of a reload
-            return sys.modules[fullname]
-        except KeyError:
-            pass
-        mod = self.__get_module(fullname)
-        if isinstance(mod, MovedModule):
-            mod = mod._resolve()
-        else:
-            mod.__loader__ = self
-        sys.modules[fullname] = mod
-        return mod
-
-    def is_package(self, fullname):
-        """
-        Return true, if the named module is a package.
-
-        We need this method to get correct spec objects with
-        Python 3.4 (see PEP451)
-        """
-        return hasattr(self.__get_module(fullname), "__path__")
-
-    def get_code(self, fullname):
-        """Return None
-
-        Required, if is_package is implemented"""
-        self.__get_module(fullname)  # eventually raises ImportError
-        return None
-    get_source = get_code  # same as get_code
-
-_importer = _SixMetaPathImporter(__name__)
-
-
-class _MovedItems(_LazyModule):
-
-    """Lazy loading of moved objects"""
-    __path__ = []  # mark as package
-
-
-_moved_attributes = [
-    MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
-    MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
-    MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", 
"filterfalse"),
-    MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
-    MovedAttribute("intern", "__builtin__", "sys"),
-    MovedAttribute("map", "itertools", "builtins", "imap", "map"),
-    MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
-    MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
-    MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
-    MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else 
"imp", "reload"),
-    MovedAttribute("reduce", "__builtin__", "functools"),
-    MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
-    MovedAttribute("StringIO", "StringIO", "io"),
-    MovedAttribute("UserDict", "UserDict", "collections"),
-    MovedAttribute("UserList", "UserList", "collections"),
-    MovedAttribute("UserString", "UserString", "collections"),
-    MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
-    MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
-    MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", 
"zip_longest"),
-    MovedModule("builtins", "__builtin__"),
-    MovedModule("configparser", "ConfigParser"),
-    MovedModule("copyreg", "copy_reg"),
-    MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
-    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
-    MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
-    MovedModule("http_cookies", "Cookie", "http.cookies"),
-    MovedModule("html_entities", "htmlentitydefs", "html.entities"),
-    MovedModule("html_parser", "HTMLParser", "html.parser"),
-    MovedModule("http_client", "httplib", "http.client"),
-    MovedModule("email_mime_multipart", "email.MIMEMultipart", 
"email.mime.multipart"),
-    MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", 
"email.mime.nonmultipart"),
-    MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
-    MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
-    MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
-    MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
-    MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
-    MovedModule("cPickle", "cPickle", "pickle"),
-    MovedModule("queue", "Queue"),
-    MovedModule("reprlib", "repr"),
-    MovedModule("socketserver", "SocketServer"),
-    MovedModule("_thread", "thread", "_thread"),
-    MovedModule("tkinter", "Tkinter"),
-    MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
-    MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_scrolledtext", "ScrolledText", 
"tkinter.scrolledtext"),
-    MovedModule("tkinter_simpledialog", "SimpleDialog", 
"tkinter.simpledialog"),
-    MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
-    MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
-    MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
-    MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
-    MovedModule("tkinter_colorchooser", "tkColorChooser",
-                "tkinter.colorchooser"),
-    MovedModule("tkinter_commondialog", "tkCommonDialog",
-                "tkinter.commondialog"),
-    MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
-    MovedModule("tkinter_font", "tkFont", "tkinter.font"),
-    MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
-    MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
-                "tkinter.simpledialog"),
-    MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", 
"urllib.parse"),
-    MovedModule("urllib_error", __name__ + ".moves.urllib_error", 
"urllib.error"),
-    MovedModule("urllib", __name__ + ".moves.urllib", __name__ + 
".moves.urllib"),
-    MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
-    MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
-    MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
-]
-# Add windows specific modules.
-if sys.platform == "win32":
-    _moved_attributes += [
-        MovedModule("winreg", "_winreg"),
-    ]
-
-for attr in _moved_attributes:
-    setattr(_MovedItems, attr.name, attr)
-    if isinstance(attr, MovedModule):
-        _importer._add_module(attr, "moves." + attr.name)
-del attr
-
-_MovedItems._moved_attributes = _moved_attributes
-
-moves = _MovedItems(__name__ + ".moves")
-_importer._add_module(moves, "moves")
-
-
-class Module_six_moves_urllib_parse(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_parse"""
-
-
-_urllib_parse_moved_attributes = [
-    MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
-    MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
-    MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
-    MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
-    MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
-    MovedAttribute("urljoin", "urlparse", "urllib.parse"),
-    MovedAttribute("urlparse", "urlparse", "urllib.parse"),
-    MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
-    MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
-    MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
-    MovedAttribute("quote", "urllib", "urllib.parse"),
-    MovedAttribute("quote_plus", "urllib", "urllib.parse"),
-    MovedAttribute("unquote", "urllib", "urllib.parse"),
-    MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
-    MovedAttribute("urlencode", "urllib", "urllib.parse"),
-    MovedAttribute("splitquery", "urllib", "urllib.parse"),
-    MovedAttribute("splittag", "urllib", "urllib.parse"),
-    MovedAttribute("splituser", "urllib", "urllib.parse"),
-    MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_params", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_query", "urlparse", "urllib.parse"),
-    MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
-]
-for attr in _urllib_parse_moved_attributes:
-    setattr(Module_six_moves_urllib_parse, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_parse._moved_attributes = 
_urllib_parse_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_parse(__name__ + 
".moves.urllib_parse"),
-                      "moves.urllib_parse", "moves.urllib.parse")
-
-
-class Module_six_moves_urllib_error(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_error"""
-
-
-_urllib_error_moved_attributes = [
-    MovedAttribute("URLError", "urllib2", "urllib.error"),
-    MovedAttribute("HTTPError", "urllib2", "urllib.error"),
-    MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
-]
-for attr in _urllib_error_moved_attributes:
-    setattr(Module_six_moves_urllib_error, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_error._moved_attributes = 
_urllib_error_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_error(__name__ + 
".moves.urllib.error"),
-                      "moves.urllib_error", "moves.urllib.error")
-
-
-class Module_six_moves_urllib_request(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_request"""
-
-
-_urllib_request_moved_attributes = [
-    MovedAttribute("urlopen", "urllib2", "urllib.request"),
-    MovedAttribute("install_opener", "urllib2", "urllib.request"),
-    MovedAttribute("build_opener", "urllib2", "urllib.request"),
-    MovedAttribute("pathname2url", "urllib", "urllib.request"),
-    MovedAttribute("url2pathname", "urllib", "urllib.request"),
-    MovedAttribute("getproxies", "urllib", "urllib.request"),
-    MovedAttribute("Request", "urllib2", "urllib.request"),
-    MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
-    MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
-    MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", 
"urllib.request"),
-    MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
-    MovedAttribute("FileHandler", "urllib2", "urllib.request"),
-    MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
-    MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
-    MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
-    MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
-    MovedAttribute("urlretrieve", "urllib", "urllib.request"),
-    MovedAttribute("urlcleanup", "urllib", "urllib.request"),
-    MovedAttribute("URLopener", "urllib", "urllib.request"),
-    MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
-    MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
-]
-for attr in _urllib_request_moved_attributes:
-    setattr(Module_six_moves_urllib_request, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_request._moved_attributes = 
_urllib_request_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_request(__name__ + 
".moves.urllib.request"),
-                      "moves.urllib_request", "moves.urllib.request")
-
-
-class Module_six_moves_urllib_response(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_response"""
-
-
-_urllib_response_moved_attributes = [
-    MovedAttribute("addbase", "urllib", "urllib.response"),
-    MovedAttribute("addclosehook", "urllib", "urllib.response"),
-    MovedAttribute("addinfo", "urllib", "urllib.response"),
-    MovedAttribute("addinfourl", "urllib", "urllib.response"),
-]
-for attr in _urllib_response_moved_attributes:
-    setattr(Module_six_moves_urllib_response, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_response._moved_attributes = 
_urllib_response_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_response(__name__ + 
".moves.urllib.response"),
-                      "moves.urllib_response", "moves.urllib.response")
-
-
-class Module_six_moves_urllib_robotparser(_LazyModule):
-
-    """Lazy loading of moved objects in six.moves.urllib_robotparser"""
-
-
-_urllib_robotparser_moved_attributes = [
-    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
-]
-for attr in _urllib_robotparser_moved_attributes:
-    setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
-del attr
-
-Module_six_moves_urllib_robotparser._moved_attributes = 
_urllib_robotparser_moved_attributes
-
-_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + 
".moves.urllib.robotparser"),
-                      "moves.urllib_robotparser", "moves.urllib.robotparser")
-
-
-class Module_six_moves_urllib(types.ModuleType):
-
-    """Create a six.moves.urllib namespace that resembles the Python 3 
namespace"""
-    __path__ = []  # mark as package
-    parse = _importer._get_module("moves.urllib_parse")
-    error = _importer._get_module("moves.urllib_error")
-    request = _importer._get_module("moves.urllib_request")
-    response = _importer._get_module("moves.urllib_response")
-    robotparser = _importer._get_module("moves.urllib_robotparser")
-
-    def __dir__(self):
-        return ['parse', 'error', 'request', 'response', 'robotparser']
-
-_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
-                      "moves.urllib")
-
-
-def add_move(move):
-    """Add an item to six.moves."""
-    setattr(_MovedItems, move.name, move)
-
-
-def remove_move(name):
-    """Remove item from six.moves."""
-    try:
-        delattr(_MovedItems, name)
-    except AttributeError:
-        try:
-            del moves.__dict__[name]
-        except KeyError:
-            raise AttributeError("no such move, %r" % (name,))
-
-
-if PY3:
-    _meth_func = "__func__"
-    _meth_self = "__self__"
-
-    _func_closure = "__closure__"
-    _func_code = "__code__"
-    _func_defaults = "__defaults__"
-    _func_globals = "__globals__"
-else:
-    _meth_func = "im_func"
-    _meth_self = "im_self"
-
-    _func_closure = "func_closure"
-    _func_code = "func_code"
-    _func_defaults = "func_defaults"
-    _func_globals = "func_globals"
-
-
-try:
-    advance_iterator = next
-except NameError:
-    def advance_iterator(it):
-        return it.next()
-next = advance_iterator
-
-
-try:
-    callable = callable
-except NameError:
-    def callable(obj):
-        return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
-
-
-if PY3:
-    def get_unbound_function(unbound):
-        return unbound
-
-    create_bound_method = types.MethodType
-
-    def create_unbound_method(func, cls):
-        return func
-
-    Iterator = object
-else:
-    def get_unbound_function(unbound):
-        return unbound.im_func
-
-    def create_bound_method(func, obj):
-        return types.MethodType(func, obj, obj.__class__)
-
-    def create_unbound_method(func, cls):
-        return types.MethodType(func, None, cls)
-
-    class Iterator(object):
-
-        def next(self):
-            return type(self).__next__(self)
-
-    callable = callable
-_add_doc(get_unbound_function,
-         """Get the function out of a possibly unbound function""")
-
-
-get_method_function = operator.attrgetter(_meth_func)
-get_method_self = operator.attrgetter(_meth_self)
-get_function_closure = operator.attrgetter(_func_closure)
-get_function_code = operator.attrgetter(_func_code)
-get_function_defaults = operator.attrgetter(_func_defaults)
-get_function_globals = operator.attrgetter(_func_globals)
-
-
-if PY3:
-    def iterkeys(d, **kw):
-        return iter(d.keys(**kw))
-
-    def itervalues(d, **kw):
-        return iter(d.values(**kw))
-
-    def iteritems(d, **kw):
-        return iter(d.items(**kw))
-
-    def iterlists(d, **kw):
-        return iter(d.lists(**kw))
-
-    viewkeys = operator.methodcaller("keys")
-
-    viewvalues = operator.methodcaller("values")
-
-    viewitems = operator.methodcaller("items")
-else:
-    def iterkeys(d, **kw):
-        return d.iterkeys(**kw)
-
-    def itervalues(d, **kw):
-        return d.itervalues(**kw)
-
-    def iteritems(d, **kw):
-        return d.iteritems(**kw)
-
-    def iterlists(d, **kw):
-        return d.iterlists(**kw)
-
-    viewkeys = operator.methodcaller("viewkeys")
-
-    viewvalues = operator.methodcaller("viewvalues")
-
-    viewitems = operator.methodcaller("viewitems")
-
-_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
-_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems,
-         "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(iterlists,
-         "Return an iterator over the (key, [values]) pairs of a dictionary.")
-
-
-if PY3:
-    def b(s):
-        return s.encode("latin-1")
-
-    def u(s):
-        return s
-    unichr = chr
-    import struct
-    int2byte = struct.Struct(">B").pack
-    del struct
-    byte2int = operator.itemgetter(0)
-    indexbytes = operator.getitem
-    iterbytes = iter
-    import io
-    StringIO = io.StringIO
-    BytesIO = io.BytesIO
-    _assertCountEqual = "assertCountEqual"
-    if sys.version_info[1] <= 1:
-        _assertRaisesRegex = "assertRaisesRegexp"
-        _assertRegex = "assertRegexpMatches"
-    else:
-        _assertRaisesRegex = "assertRaisesRegex"
-        _assertRegex = "assertRegex"
-else:
-    def b(s):
-        return s
-    # Workaround for standalone backslash
-
-    def u(s):
-        return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
-    unichr = unichr
-    int2byte = chr
-
-    def byte2int(bs):
-        return ord(bs[0])
-
-    def indexbytes(buf, i):
-        return ord(buf[i])
-    iterbytes = functools.partial(itertools.imap, ord)
-    import StringIO
-    StringIO = BytesIO = StringIO.StringIO
-    _assertCountEqual = "assertItemsEqual"
-    _assertRaisesRegex = "assertRaisesRegexp"
-    _assertRegex = "assertRegexpMatches"
-_add_doc(b, """Byte literal""")
-_add_doc(u, """Text literal""")
-
-
-def assertCountEqual(self, *args, **kwargs):
-    return getattr(self, _assertCountEqual)(*args, **kwargs)
-
-
-def assertRaisesRegex(self, *args, **kwargs):
-    return getattr(self, _assertRaisesRegex)(*args, **kwargs)
-
-
-def assertRegex(self, *args, **kwargs):
-    return getattr(self, _assertRegex)(*args, **kwargs)
-
-
-if PY3:
-    exec_ = getattr(moves.builtins, "exec")
-
-    def reraise(tp, value, tb=None):
-        if value is None:
-            value = tp()
-        if value.__traceback__ is not tb:
-            raise value.with_traceback(tb)
-        raise value
-
-else:
-    def exec_(_code_, _globs_=None, _locs_=None):
-        """Execute code in a namespace."""
-        if _globs_ is None:
-            frame = sys._getframe(1)
-            _globs_ = frame.f_globals
-            if _locs_ is None:
-                _locs_ = frame.f_locals
-            del frame
-        elif _locs_ is None:
-            _locs_ = _globs_
-        exec("""exec _code_ in _globs_, _locs_""")
-
-    exec_("""def reraise(tp, value, tb=None):
-    raise tp, value, tb
-""")
-
-
-if sys.version_info[:2] == (3, 2):
-    exec_("""def raise_from(value, from_value):
-    if from_value is None:
-        raise value
-    raise value from from_value
-""")
-elif sys.version_info[:2] > (3, 2):
-    exec_("""def raise_from(value, from_value):
-    raise value from from_value
-""")
-else:
-    def raise_from(value, from_value):
-        raise value
-
-
-print_ = getattr(moves.builtins, "print", None)
-if print_ is None:
-    def print_(*args, **kwargs):
-        """The new-style print function for Python 2.4 and 2.5."""
-        fp = kwargs.pop("file", sys.stdout)
-        if fp is None:
-            return
-
-        def write(data):
-            if not isinstance(data, basestring):
-                data = str(data)
-            # If the file has an encoding, encode unicode with it.
-            if (isinstance(fp, file) and
-                    isinstance(data, unicode) and
-                    fp.encoding is not None):
-                errors = getattr(fp, "errors", None)
-                if errors is None:
-                    errors = "strict"
-                data = data.encode(fp.encoding, errors)
-            fp.write(data)
-        want_unicode = False
-        sep = kwargs.pop("sep", None)
-        if sep is not None:
-            if isinstance(sep, unicode):
-                want_unicode = True
-            elif not isinstance(sep, str):
-                raise TypeError("sep must be None or a string")
-        end = kwargs.pop("end", None)
-        if end is not None:
-            if isinstance(end, unicode):
-                want_unicode = True
-            elif not isinstance(end, str):
-                raise TypeError("end must be None or a string")
-        if kwargs:
-            raise TypeError("invalid keyword arguments to print()")
-        if not want_unicode:
-            for arg in args:
-                if isinstance(arg, unicode):
-                    want_unicode = True
-                    break
-        if want_unicode:
-            newline = unicode("\n")
-            space = unicode(" ")
-        else:
-            newline = "\n"
-            space = " "
-        if sep is None:
-            sep = space
-        if end is None:
-            end = newline
-        for i, arg in enumerate(args):
-            if i:
-                write(sep)
-            write(arg)
-        write(end)
-if sys.version_info[:2] < (3, 3):
-    _print = print_
-
-    def print_(*args, **kwargs):
-        fp = kwargs.get("file", sys.stdout)
-        flush = kwargs.pop("flush", False)
-        _print(*args, **kwargs)
-        if flush and fp is not None:
-            fp.flush()
-
-_add_doc(reraise, """Reraise an exception.""")
-
-if sys.version_info[0:2] < (3, 4):
-    def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
-              updated=functools.WRAPPER_UPDATES):
-        def wrapper(f):
-            f = functools.wraps(wrapped, assigned, updated)(f)
-            f.__wrapped__ = wrapped
-            return f
-        return wrapper
-else:
-    wraps = functools.wraps
-
-
-def with_metaclass(meta, *bases):
-    """Create a base class with a metaclass."""
-    # This requires a bit of explanation: the basic idea is to make a dummy
-    # metaclass for one level of class instantiation that replaces itself with
-    # the actual metaclass.
-    class metaclass(meta):
-
-        def __new__(cls, name, this_bases, d):
-            return meta(name, bases, d)
-    return type.__new__(metaclass, 'temporary_class', (), {})
-
-
-def add_metaclass(metaclass):
-    """Class decorator for creating a class with a metaclass."""
-    def wrapper(cls):
-        orig_vars = cls.__dict__.copy()
-        slots = orig_vars.get('__slots__')
-        if slots is not None:
-            if isinstance(slots, str):
-                slots = [slots]
-            for slots_var in slots:
-                orig_vars.pop(slots_var)
-        orig_vars.pop('__dict__', None)
-        orig_vars.pop('__weakref__', None)
-        return metaclass(cls.__name__, cls.__bases__, orig_vars)
-    return wrapper
-
-
-def python_2_unicode_compatible(klass):
-    """
-    A decorator that defines __unicode__ and __str__ methods under Python 2.
-    Under Python 3 it does nothing.
-
-    To support Python 2 and 3 with a single code base, define a __str__ method
-    returning text and apply this decorator to the class.
-    """
-    if PY2:
-        if '__str__' not in klass.__dict__:
-            raise ValueError("@python_2_unicode_compatible cannot be applied "
-                             "to %s because it doesn't define __str__()." %
-                             klass.__name__)
-        klass.__unicode__ = klass.__str__
-        klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
-    return klass
-
-
-# Complete the moves implementation.
-# This code is at the end of this module to speed up module loading.
-# Turn this module into a package.
-__path__ = []  # required for PEP 302 and PEP 451
-__package__ = __name__  # see PEP 366 @ReservedAssignment
-if globals().get("__spec__") is not None:
-    __spec__.submodule_search_locations = []  # PEP 451 @UndefinedVariable
-# Remove other six meta path importers, since they cause problems. This can
-# happen if six is removed from sys.modules and then reloaded. (Setuptools does
-# this for some reason.)
-if sys.meta_path:
-    for i, importer in enumerate(sys.meta_path):
-        # Here's some real nastiness: Another "instance" of the six module 
might
-        # be floating around. Therefore, we can't use isinstance() to check for
-        # the six meta path importer, since the other six instance will have
-        # inserted an importer with different class.
-        if (type(importer).__name__ == "_SixMetaPathImporter" and
-                importer.name == __name__):
-            del sys.meta_path[i]
-            break
-    del i, importer
-# Finally, add the importer to the meta path import hook.
-sys.meta_path.append(_importer)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
deleted file mode 100644
index dd59a75..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-try:
-    # Python 3.2+
-    from ssl import CertificateError, match_hostname
-except ImportError:
-    try:
-        # Backport of the function from a pypi module
-        from backports.ssl_match_hostname import CertificateError, 
match_hostname
-    except ImportError:
-        # Our vendored copy
-        from ._implementation import CertificateError, match_hostname
-
-# Not needed, but documenting what we provide.
-__all__ = ('CertificateError', 'match_hostname')

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
deleted file mode 100644
index 52f4287..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py
+++ /dev/null
@@ -1,105 +0,0 @@
-"""The match_hostname() function from Python 3.3.3, essential when using 
SSL."""
-
-# Note: This file is under the PSF license as the code comes from the python
-# stdlib.   http://docs.python.org/3/license.html
-
-import re
-
-__version__ = '3.4.0.2'
-
-class CertificateError(ValueError):
-    pass
-
-
-def _dnsname_match(dn, hostname, max_wildcards=1):
-    """Matching according to RFC 6125, section 6.4.3
-
-    http://tools.ietf.org/html/rfc6125#section-6.4.3
-    """
-    pats = []
-    if not dn:
-        return False
-
-    # Ported from python3-syntax:
-    # leftmost, *remainder = dn.split(r'.')
-    parts = dn.split(r'.')
-    leftmost = parts[0]
-    remainder = parts[1:]
-
-    wildcards = leftmost.count('*')
-    if wildcards > max_wildcards:
-        # Issue #17980: avoid denials of service by refusing more
-        # than one wildcard per fragment.  A survey of established
-        # policy among SSL implementations showed it to be a
-        # reasonable choice.
-        raise CertificateError(
-            "too many wildcards in certificate DNS name: " + repr(dn))
-
-    # speed up common case w/o wildcards
-    if not wildcards:
-        return dn.lower() == hostname.lower()
-
-    # RFC 6125, section 6.4.3, subitem 1.
-    # The client SHOULD NOT attempt to match a presented identifier in which
-    # the wildcard character comprises a label other than the left-most label.
-    if leftmost == '*':
-        # When '*' is a fragment by itself, it matches a non-empty dotless
-        # fragment.
-        pats.append('[^.]+')
-    elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
-        # RFC 6125, section 6.4.3, subitem 3.
-        # The client SHOULD NOT attempt to match a presented identifier
-        # where the wildcard character is embedded within an A-label or
-        # U-label of an internationalized domain name.
-        pats.append(re.escape(leftmost))
-    else:
-        # Otherwise, '*' matches any dotless string, e.g. www*
-        pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
-
-    # add the remaining fragments, ignore any wildcards
-    for frag in remainder:
-        pats.append(re.escape(frag))
-
-    pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
-    return pat.match(hostname)
-
-
-def match_hostname(cert, hostname):
-    """Verify that *cert* (in decoded format as returned by
-    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 and RFC 6125
-    rules are followed, but IP addresses are not accepted for *hostname*.
-
-    CertificateError is raised on failure. On success, the function
-    returns nothing.
-    """
-    if not cert:
-        raise ValueError("empty or no certificate")
-    dnsnames = []
-    san = cert.get('subjectAltName', ())
-    for key, value in san:
-        if key == 'DNS':
-            if _dnsname_match(value, hostname):
-                return
-            dnsnames.append(value)
-    if not dnsnames:
-        # The subject is only checked when there is no dNSName entry
-        # in subjectAltName
-        for sub in cert.get('subject', ()):
-            for key, value in sub:
-                # XXX according to RFC 2818, the most specific Common Name
-                # must be used.
-                if key == 'commonName':
-                    if _dnsname_match(value, hostname):
-                        return
-                    dnsnames.append(value)
-    if len(dnsnames) > 1:
-        raise CertificateError("hostname %r "
-            "doesn't match either of %s"
-            % (hostname, ', '.join(map(repr, dnsnames))))
-    elif len(dnsnames) == 1:
-        raise CertificateError("hostname %r "
-            "doesn't match %r"
-            % (hostname, dnsnames[0]))
-    else:
-        raise CertificateError("no appropriate commonName or "
-            "subjectAltName fields were found")

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
deleted file mode 100644
index 7ed00b1..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py
+++ /dev/null
@@ -1,367 +0,0 @@
-from __future__ import absolute_import
-import collections
-import functools
-import logging
-
-try:  # Python 3
-    from urllib.parse import urljoin
-except ImportError:
-    from urlparse import urljoin
-
-from ._collections import RecentlyUsedContainer
-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
-from .connectionpool import port_by_scheme
-from .exceptions import LocationValueError, MaxRetryError, ProxySchemeUnknown
-from .request import RequestMethods
-from .util.url import parse_url
-from .util.retry import Retry
-
-
-__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
-
-
-log = logging.getLogger(__name__)
-
-SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
-                'ssl_version', 'ca_cert_dir')
-
-# The base fields to use when determining what pool to get a connection from;
-# these do not rely on the ``connection_pool_kw`` and can be determined by the
-# URL and potentially the ``urllib3.connection.port_by_scheme`` dictionary.
-#
-# All custom key schemes should include the fields in this key at a minimum.
-BasePoolKey = collections.namedtuple('BasePoolKey', ('scheme', 'host', 'port'))
-
-# The fields to use when determining what pool to get a HTTP and HTTPS
-# connection from. All additional fields must be present in the PoolManager's
-# ``connection_pool_kw`` instance variable.
-HTTPPoolKey = collections.namedtuple(
-    'HTTPPoolKey', BasePoolKey._fields + ('timeout', 'retries', 'strict',
-                                          'block', 'source_address')
-)
-HTTPSPoolKey = collections.namedtuple(
-    'HTTPSPoolKey', HTTPPoolKey._fields + SSL_KEYWORDS
-)
-
-
-def _default_key_normalizer(key_class, request_context):
-    """
-    Create a pool key of type ``key_class`` for a request.
-
-    According to RFC 3986, both the scheme and host are case-insensitive.
-    Therefore, this function normalizes both before constructing the pool
-    key for an HTTPS request. If you wish to change this behaviour, provide
-    alternate callables to ``key_fn_by_scheme``.
-
-    :param key_class:
-        The class to use when constructing the key. This should be a namedtuple
-        with the ``scheme`` and ``host`` keys at a minimum.
-
-    :param request_context:
-        A dictionary-like object that contain the context for a request.
-        It should contain a key for each field in the :class:`HTTPPoolKey`
-    """
-    context = {}
-    for key in key_class._fields:
-        context[key] = request_context.get(key)
-    context['scheme'] = context['scheme'].lower()
-    context['host'] = context['host'].lower()
-    return key_class(**context)
-
-
-# A dictionary that maps a scheme to a callable that creates a pool key.
-# This can be used to alter the way pool keys are constructed, if desired.
-# Each PoolManager makes a copy of this dictionary so they can be configured
-# globally here, or individually on the instance.
-key_fn_by_scheme = {
-    'http': functools.partial(_default_key_normalizer, HTTPPoolKey),
-    'https': functools.partial(_default_key_normalizer, HTTPSPoolKey),
-}
-
-pool_classes_by_scheme = {
-    'http': HTTPConnectionPool,
-    'https': HTTPSConnectionPool,
-}
-
-
-class PoolManager(RequestMethods):
-    """
-    Allows for arbitrary requests while transparently keeping track of
-    necessary connection pools for you.
-
-    :param num_pools:
-        Number of connection pools to cache before discarding the least
-        recently used pool.
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-
-    :param \**connection_pool_kw:
-        Additional parameters are used to create fresh
-        :class:`urllib3.connectionpool.ConnectionPool` instances.
-
-    Example::
-
-        >>> manager = PoolManager(num_pools=2)
-        >>> r = manager.request('GET', 'http://google.com/')
-        >>> r = manager.request('GET', 'http://google.com/mail')
-        >>> r = manager.request('GET', 'http://yahoo.com/')
-        >>> len(manager.pools)
-        2
-
-    """
-
-    proxy = None
-
-    def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
-        RequestMethods.__init__(self, headers)
-        self.connection_pool_kw = connection_pool_kw
-        self.pools = RecentlyUsedContainer(num_pools,
-                                           dispose_func=lambda p: p.close())
-
-        # Locally set the pool classes and keys so other PoolManagers can
-        # override them.
-        self.pool_classes_by_scheme = pool_classes_by_scheme
-        self.key_fn_by_scheme = key_fn_by_scheme.copy()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, exc_type, exc_val, exc_tb):
-        self.clear()
-        # Return False to re-raise any potential exceptions
-        return False
-
-    def _new_pool(self, scheme, host, port):
-        """
-        Create a new :class:`ConnectionPool` based on host, port and scheme.
-
-        This method is used to actually create the connection pools handed out
-        by :meth:`connection_from_url` and companion methods. It is intended
-        to be overridden for customization.
-        """
-        pool_cls = self.pool_classes_by_scheme[scheme]
-        kwargs = self.connection_pool_kw
-        if scheme == 'http':
-            kwargs = self.connection_pool_kw.copy()
-            for kw in SSL_KEYWORDS:
-                kwargs.pop(kw, None)
-
-        return pool_cls(host, port, **kwargs)
-
-    def clear(self):
-        """
-        Empty our store of pools and direct them all to close.
-
-        This will not affect in-flight connections, but they will not be
-        re-used after completion.
-        """
-        self.pools.clear()
-
-    def connection_from_host(self, host, port=None, scheme='http'):
-        """
-        Get a :class:`ConnectionPool` based on the host, port, and scheme.
-
-        If ``port`` isn't given, it will be derived from the ``scheme`` using
-        ``urllib3.connectionpool.port_by_scheme``.
-        """
-
-        if not host:
-            raise LocationValueError("No host specified.")
-
-        request_context = self.connection_pool_kw.copy()
-        request_context['scheme'] = scheme or 'http'
-        if not port:
-            port = port_by_scheme.get(request_context['scheme'].lower(), 80)
-        request_context['port'] = port
-        request_context['host'] = host
-
-        return self.connection_from_context(request_context)
-
-    def connection_from_context(self, request_context):
-        """
-        Get a :class:`ConnectionPool` based on the request context.
-
-        ``request_context`` must at least contain the ``scheme`` key and its
-        value must be a key in ``key_fn_by_scheme`` instance variable.
-        """
-        scheme = request_context['scheme'].lower()
-        pool_key_constructor = self.key_fn_by_scheme[scheme]
-        pool_key = pool_key_constructor(request_context)
-
-        return self.connection_from_pool_key(pool_key)
-
-    def connection_from_pool_key(self, pool_key):
-        """
-        Get a :class:`ConnectionPool` based on the provided pool key.
-
-        ``pool_key`` should be a namedtuple that only contains immutable
-        objects. At a minimum it must have the ``scheme``, ``host``, and
-        ``port`` fields.
-        """
-        with self.pools.lock:
-            # If the scheme, host, or port doesn't match existing open
-            # connections, open a new ConnectionPool.
-            pool = self.pools.get(pool_key)
-            if pool:
-                return pool
-
-            # Make a fresh ConnectionPool of the desired type
-            pool = self._new_pool(pool_key.scheme, pool_key.host, 
pool_key.port)
-            self.pools[pool_key] = pool
-
-        return pool
-
-    def connection_from_url(self, url):
-        """
-        Similar to :func:`urllib3.connectionpool.connection_from_url` but
-        doesn't pass any additional parameters to the
-        :class:`urllib3.connectionpool.ConnectionPool` constructor.
-
-        Additional parameters are taken from the :class:`.PoolManager`
-        constructor.
-        """
-        u = parse_url(url)
-        return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
-
-    def urlopen(self, method, url, redirect=True, **kw):
-        """
-        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
-        with custom cross-host redirect logic and only sends the request-uri
-        portion of the ``url``.
-
-        The given ``url`` parameter must be absolute, such that an appropriate
-        :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
-        """
-        u = parse_url(url)
-        conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
-
-        kw['assert_same_host'] = False
-        kw['redirect'] = False
-        if 'headers' not in kw:
-            kw['headers'] = self.headers
-
-        if self.proxy is not None and u.scheme == "http":
-            response = conn.urlopen(method, url, **kw)
-        else:
-            response = conn.urlopen(method, u.request_uri, **kw)
-
-        redirect_location = redirect and response.get_redirect_location()
-        if not redirect_location:
-            return response
-
-        # Support relative URLs for redirecting.
-        redirect_location = urljoin(url, redirect_location)
-
-        # RFC 7231, Section 6.4.4
-        if response.status == 303:
-            method = 'GET'
-
-        retries = kw.get('retries')
-        if not isinstance(retries, Retry):
-            retries = Retry.from_int(retries, redirect=redirect)
-
-        try:
-            retries = retries.increment(method, url, response=response, 
_pool=conn)
-        except MaxRetryError:
-            if retries.raise_on_redirect:
-                raise
-            return response
-
-        kw['retries'] = retries
-        kw['redirect'] = redirect
-
-        log.info("Redirecting %s -> %s", url, redirect_location)
-        return self.urlopen(method, redirect_location, **kw)
-
-
-class ProxyManager(PoolManager):
-    """
-    Behaves just like :class:`PoolManager`, but sends all requests through
-    the defined proxy, using the CONNECT method for HTTPS URLs.
-
-    :param proxy_url:
-        The URL of the proxy to be used.
-
-    :param proxy_headers:
-        A dictionary contaning headers that will be sent to the proxy. In case
-        of HTTP they are being sent with each request, while in the
-        HTTPS/CONNECT case they are sent only once. Could be used for proxy
-        authentication.
-
-    Example:
-        >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
-        >>> r1 = proxy.request('GET', 'http://google.com/')
-        >>> r2 = proxy.request('GET', 'http://httpbin.org/')
-        >>> len(proxy.pools)
-        1
-        >>> r3 = proxy.request('GET', 'https://httpbin.org/')
-        >>> r4 = proxy.request('GET', 'https://twitter.com/')
-        >>> len(proxy.pools)
-        3
-
-    """
-
-    def __init__(self, proxy_url, num_pools=10, headers=None,
-                 proxy_headers=None, **connection_pool_kw):
-
-        if isinstance(proxy_url, HTTPConnectionPool):
-            proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
-                                        proxy_url.port)
-        proxy = parse_url(proxy_url)
-        if not proxy.port:
-            port = port_by_scheme.get(proxy.scheme, 80)
-            proxy = proxy._replace(port=port)
-
-        if proxy.scheme not in ("http", "https"):
-            raise ProxySchemeUnknown(proxy.scheme)
-
-        self.proxy = proxy
-        self.proxy_headers = proxy_headers or {}
-
-        connection_pool_kw['_proxy'] = self.proxy
-        connection_pool_kw['_proxy_headers'] = self.proxy_headers
-
-        super(ProxyManager, self).__init__(
-            num_pools, headers, **connection_pool_kw)
-
-    def connection_from_host(self, host, port=None, scheme='http'):
-        if scheme == "https":
-            return super(ProxyManager, self).connection_from_host(
-                host, port, scheme)
-
-        return super(ProxyManager, self).connection_from_host(
-            self.proxy.host, self.proxy.port, self.proxy.scheme)
-
-    def _set_proxy_headers(self, url, headers=None):
-        """
-        Sets headers needed by proxies: specifically, the Accept and Host
-        headers. Only sets headers not provided by the user.
-        """
-        headers_ = {'Accept': '*/*'}
-
-        netloc = parse_url(url).netloc
-        if netloc:
-            headers_['Host'] = netloc
-
-        if headers:
-            headers_.update(headers)
-        return headers_
-
-    def urlopen(self, method, url, redirect=True, **kw):
-        "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
-        u = parse_url(url)
-
-        if u.scheme == "http":
-            # For proxied HTTPS requests, httplib sets the necessary headers
-            # on the CONNECT to the proxy. For HTTP, we'll definitely
-            # need to set 'Host' at the very least.
-            headers = kw.get('headers', self.headers)
-            kw['headers'] = self._set_proxy_headers(url, headers)
-
-        return super(ProxyManager, self).urlopen(method, url, 
redirect=redirect, **kw)
-
-
-def proxy_from_url(url, **kw):
-    return ProxyManager(proxy_url=url, **kw)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/request.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/request.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/request.py
deleted file mode 100644
index d5aa62d..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/request.py
+++ /dev/null
@@ -1,151 +0,0 @@
-from __future__ import absolute_import
-try:
-    from urllib.parse import urlencode
-except ImportError:
-    from urllib import urlencode
-
-from .filepost import encode_multipart_formdata
-
-
-__all__ = ['RequestMethods']
-
-
-class RequestMethods(object):
-    """
-    Convenience mixin for classes who implement a :meth:`urlopen` method, such
-    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
-    :class:`~urllib3.poolmanager.PoolManager`.
-
-    Provides behavior for making common types of HTTP request methods and
-    decides which type of request field encoding to use.
-
-    Specifically,
-
-    :meth:`.request_encode_url` is for sending requests whose fields are
-    encoded in the URL (such as GET, HEAD, DELETE).
-
-    :meth:`.request_encode_body` is for sending requests whose fields are
-    encoded in the *body* of the request using multipart or www-form-urlencoded
-    (such as for POST, PUT, PATCH).
-
-    :meth:`.request` is for making any kind of request, it will look up the
-    appropriate encoding format and use one of the above two methods to make
-    the request.
-
-    Initializer parameters:
-
-    :param headers:
-        Headers to include with all requests, unless other headers are given
-        explicitly.
-    """
-
-    _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
-
-    def __init__(self, headers=None):
-        self.headers = headers or {}
-
-    def urlopen(self, method, url, body=None, headers=None,
-                encode_multipart=True, multipart_boundary=None,
-                **kw):  # Abstract
-        raise NotImplemented("Classes extending RequestMethods must implement "
-                             "their own ``urlopen`` method.")
-
-    def request(self, method, url, fields=None, headers=None, **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the appropriate encoding of
-        ``fields`` based on the ``method`` used.
-
-        This is a convenience method that requires the least amount of manual
-        effort. It can be used in most situations, while still having the
-        option to drop down to more specific methods when necessary, such as
-        :meth:`request_encode_url`, :meth:`request_encode_body`,
-        or even the lowest level :meth:`urlopen`.
-        """
-        method = method.upper()
-
-        if method in self._encode_url_methods:
-            return self.request_encode_url(method, url, fields=fields,
-                                           headers=headers,
-                                           **urlopen_kw)
-        else:
-            return self.request_encode_body(method, url, fields=fields,
-                                            headers=headers,
-                                            **urlopen_kw)
-
-    def request_encode_url(self, method, url, fields=None, headers=None,
-                           **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the ``fields`` encoded in
-        the url. This is useful for request methods like GET, HEAD, DELETE, 
etc.
-        """
-        if headers is None:
-            headers = self.headers
-
-        extra_kw = {'headers': headers}
-        extra_kw.update(urlopen_kw)
-
-        if fields:
-            url += '?' + urlencode(fields)
-
-        return self.urlopen(method, url, **extra_kw)
-
-    def request_encode_body(self, method, url, fields=None, headers=None,
-                            encode_multipart=True, multipart_boundary=None,
-                            **urlopen_kw):
-        """
-        Make a request using :meth:`urlopen` with the ``fields`` encoded in
-        the body. This is useful for request methods like POST, PUT, PATCH, 
etc.
-
-        When ``encode_multipart=True`` (default), then
-        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
-        the payload with the appropriate content type. Otherwise
-        :meth:`urllib.urlencode` is used with the
-        'application/x-www-form-urlencoded' content type.
-
-        Multipart encoding must be used when posting files, and it's reasonably
-        safe to use it in other times too. However, it may break request
-        signing, such as with OAuth.
-
-        Supports an optional ``fields`` parameter of key/value strings AND
-        key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
-        the MIME type is optional. For example::
-
-            fields = {
-                'foo': 'bar',
-                'fakefile': ('foofile.txt', 'contents of foofile'),
-                'realfile': ('barfile.txt', open('realfile').read()),
-                'typedfile': ('bazfile.bin', open('bazfile').read(),
-                              'image/jpeg'),
-                'nonamefile': 'contents of nonamefile field',
-            }
-
-        When uploading a file, providing a filename (the first parameter of the
-        tuple) is optional but recommended to best mimick behavior of browsers.
-
-        Note that if ``headers`` are supplied, the 'Content-Type' header will
-        be overwritten because it depends on the dynamic random boundary string
-        which is used to compose the body of the request. The random boundary
-        string can be explicitly set with the ``multipart_boundary`` parameter.
-        """
-        if headers is None:
-            headers = self.headers
-
-        extra_kw = {'headers': {}}
-
-        if fields:
-            if 'body' in urlopen_kw:
-                raise TypeError(
-                    "request got values for both 'fields' and 'body', can only 
specify one.")
-
-            if encode_multipart:
-                body, content_type = encode_multipart_formdata(fields, 
boundary=multipart_boundary)
-            else:
-                body, content_type = urlencode(fields), 
'application/x-www-form-urlencoded'
-
-            extra_kw['body'] = body
-            extra_kw['headers'] = {'Content-Type': content_type}
-
-        extra_kw['headers'].update(headers)
-        extra_kw.update(urlopen_kw)
-
-        return self.urlopen(method, url, **extra_kw)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/response.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/response.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/response.py
deleted file mode 100644
index 5567903..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/response.py
+++ /dev/null
@@ -1,530 +0,0 @@
-from __future__ import absolute_import
-from contextlib import contextmanager
-import zlib
-import io
-from socket import timeout as SocketTimeout
-from socket import error as SocketError
-
-from ._collections import HTTPHeaderDict
-from .exceptions import (
-    ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
-)
-from .packages.six import string_types as basestring, binary_type, PY3
-from .packages.six.moves import http_client as httplib
-from .connection import HTTPException, BaseSSLError
-from .util.response import is_fp_closed, is_response_to_head
-
-
-class DeflateDecoder(object):
-
-    def __init__(self):
-        self._first_try = True
-        self._data = binary_type()
-        self._obj = zlib.decompressobj()
-
-    def __getattr__(self, name):
-        return getattr(self._obj, name)
-
-    def decompress(self, data):
-        if not data:
-            return data
-
-        if not self._first_try:
-            return self._obj.decompress(data)
-
-        self._data += data
-        try:
-            return self._obj.decompress(data)
-        except zlib.error:
-            self._first_try = False
-            self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
-            try:
-                return self.decompress(self._data)
-            finally:
-                self._data = None
-
-
-class GzipDecoder(object):
-
-    def __init__(self):
-        self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
-
-    def __getattr__(self, name):
-        return getattr(self._obj, name)
-
-    def decompress(self, data):
-        if not data:
-            return data
-        return self._obj.decompress(data)
-
-
-def _get_decoder(mode):
-    if mode == 'gzip':
-        return GzipDecoder()
-
-    return DeflateDecoder()
-
-
-class HTTPResponse(io.IOBase):
-    """
-    HTTP Response container.
-
-    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
-    loaded and decoded on-demand when the ``data`` property is accessed.  This
-    class is also compatible with the Python standard library's :mod:`io`
-    module, and can hence be treated as a readable object in the context of 
that
-    framework.
-
-    Extra parameters for behaviour not present in httplib.HTTPResponse:
-
-    :param preload_content:
-        If True, the response's body will be preloaded during construction.
-
-    :param decode_content:
-        If True, attempts to decode specific content-encoding's based on 
headers
-        (like 'gzip' and 'deflate') will be skipped and raw data will be used
-        instead.
-
-    :param original_response:
-        When this HTTPResponse wrapper is generated from an 
httplib.HTTPResponse
-        object, it's convenient to include the original for debug purposes. 
It's
-        otherwise unused.
-    """
-
-    CONTENT_DECODERS = ['gzip', 'deflate']
-    REDIRECT_STATUSES = [301, 302, 303, 307, 308]
-
-    def __init__(self, body='', headers=None, status=0, version=0, reason=None,
-                 strict=0, preload_content=True, decode_content=True,
-                 original_response=None, pool=None, connection=None):
-
-        if isinstance(headers, HTTPHeaderDict):
-            self.headers = headers
-        else:
-            self.headers = HTTPHeaderDict(headers)
-        self.status = status
-        self.version = version
-        self.reason = reason
-        self.strict = strict
-        self.decode_content = decode_content
-
-        self._decoder = None
-        self._body = None
-        self._fp = None
-        self._original_response = original_response
-        self._fp_bytes_read = 0
-
-        if body and isinstance(body, (basestring, binary_type)):
-            self._body = body
-
-        self._pool = pool
-        self._connection = connection
-
-        if hasattr(body, 'read'):
-            self._fp = body
-
-        # Are we using the chunked-style of transfer encoding?
-        self.chunked = False
-        self.chunk_left = None
-        tr_enc = self.headers.get('transfer-encoding', '').lower()
-        # Don't incur the penalty of creating a list and then discarding it
-        encodings = (enc.strip() for enc in tr_enc.split(","))
-        if "chunked" in encodings:
-            self.chunked = True
-
-        # If requested, preload the body.
-        if preload_content and not self._body:
-            self._body = self.read(decode_content=decode_content)
-
-    def get_redirect_location(self):
-        """
-        Should we redirect and where to?
-
-        :returns: Truthy redirect location string if we got a redirect status
-            code and valid location. ``None`` if redirect status and no
-            location. ``False`` if not a redirect status code.
-        """
-        if self.status in self.REDIRECT_STATUSES:
-            return self.headers.get('location')
-
-        return False
-
-    def release_conn(self):
-        if not self._pool or not self._connection:
-            return
-
-        self._pool._put_conn(self._connection)
-        self._connection = None
-
-    @property
-    def data(self):
-        # For backwords-compat with earlier urllib3 0.4 and earlier.
-        if self._body:
-            return self._body
-
-        if self._fp:
-            return self.read(cache_content=True)
-
-    @property
-    def connection(self):
-        return self._connection
-
-    def tell(self):
-        """
-        Obtain the number of bytes pulled over the wire so far. May differ from
-        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
-        are encoded on the wire (e.g, compressed).
-        """
-        return self._fp_bytes_read
-
-    def _init_decoder(self):
-        """
-        Set-up the _decoder attribute if necessar.
-        """
-        # Note: content-encoding value should be case-insensitive, per RFC 7230
-        # Section 3.2
-        content_encoding = self.headers.get('content-encoding', '').lower()
-        if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
-            self._decoder = _get_decoder(content_encoding)
-
-    def _decode(self, data, decode_content, flush_decoder):
-        """
-        Decode the data passed in and potentially flush the decoder.
-        """
-        try:
-            if decode_content and self._decoder:
-                data = self._decoder.decompress(data)
-        except (IOError, zlib.error) as e:
-            content_encoding = self.headers.get('content-encoding', '').lower()
-            raise DecodeError(
-                "Received response with content-encoding: %s, but "
-                "failed to decode it." % content_encoding, e)
-
-        if flush_decoder and decode_content:
-            data += self._flush_decoder()
-
-        return data
-
-    def _flush_decoder(self):
-        """
-        Flushes the decoder. Should only be called if the decoder is actually
-        being used.
-        """
-        if self._decoder:
-            buf = self._decoder.decompress(b'')
-            return buf + self._decoder.flush()
-
-        return b''
-
-    @contextmanager
-    def _error_catcher(self):
-        """
-        Catch low-level python exceptions, instead re-raising urllib3
-        variants, so that low-level exceptions are not leaked in the
-        high-level api.
-
-        On exit, release the connection back to the pool.
-        """
-        clean_exit = False
-
-        try:
-            try:
-                yield
-
-            except SocketTimeout:
-                # FIXME: Ideally we'd like to include the url in the 
ReadTimeoutError but
-                # there is yet no clean way to get at it from this context.
-                raise ReadTimeoutError(self._pool, None, 'Read timed out.')
-
-            except BaseSSLError as e:
-                # FIXME: Is there a better way to differentiate between 
SSLErrors?
-                if 'read operation timed out' not in str(e):  # Defensive:
-                    # This shouldn't happen but just in case we're missing an 
edge
-                    # case, let's avoid swallowing SSL errors.
-                    raise
-
-                raise ReadTimeoutError(self._pool, None, 'Read timed out.')
-
-            except (HTTPException, SocketError) as e:
-                # This includes IncompleteRead.
-                raise ProtocolError('Connection broken: %r' % e, e)
-
-            # If no exception is thrown, we should avoid cleaning up
-            # unnecessarily.
-            clean_exit = True
-        finally:
-            # If we didn't terminate cleanly, we need to throw away our
-            # connection.
-            if not clean_exit:
-                # The response may not be closed but we're not going to use it
-                # anymore so close it now to ensure that the connection is
-                # released back to the pool.
-                if self._original_response:
-                    self._original_response.close()
-
-                # Closing the response may not actually be sufficient to close
-                # everything, so if we have a hold of the connection close that
-                # too.
-                if self._connection:
-                    self._connection.close()
-
-            # If we hold the original response but it's closed now, we should
-            # return the connection back to the pool.
-            if self._original_response and self._original_response.isclosed():
-                self.release_conn()
-
-    def read(self, amt=None, decode_content=None, cache_content=False):
-        """
-        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
-        parameters: ``decode_content`` and ``cache_content``.
-
-        :param amt:
-            How much of the content to read. If specified, caching is skipped
-            because it doesn't make sense to cache partial content as the full
-            response.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-
-        :param cache_content:
-            If True, will save the returned data such that the same result is
-            returned despite of the state of the underlying file object. This
-            is useful if you want the ``.data`` property to continue working
-            after having ``.read()`` the file object. (Overridden if ``amt`` is
-            set.)
-        """
-        self._init_decoder()
-        if decode_content is None:
-            decode_content = self.decode_content
-
-        if self._fp is None:
-            return
-
-        flush_decoder = False
-        data = None
-
-        with self._error_catcher():
-            if amt is None:
-                # cStringIO doesn't like amt=None
-                data = self._fp.read()
-                flush_decoder = True
-            else:
-                cache_content = False
-                data = self._fp.read(amt)
-                if amt != 0 and not data:  # Platform-specific: Buggy versions 
of Python.
-                    # Close the connection when no data is returned
-                    #
-                    # This is redundant to what httplib/http.client _should_
-                    # already do.  However, versions of python released before
-                    # December 15, 2012 (http://bugs.python.org/issue16298) do
-                    # not properly close the connection in all cases. There is
-                    # no harm in redundantly calling close.
-                    self._fp.close()
-                    flush_decoder = True
-
-        if data:
-            self._fp_bytes_read += len(data)
-
-            data = self._decode(data, decode_content, flush_decoder)
-
-            if cache_content:
-                self._body = data
-
-        return data
-
-    def stream(self, amt=2**16, decode_content=None):
-        """
-        A generator wrapper for the read() method. A call will block until
-        ``amt`` bytes have been read from the connection or until the
-        connection is closed.
-
-        :param amt:
-            How much of the content to read. The generator will return up to
-            much data per iteration, but may return less. This is particularly
-            likely when using compressed data. However, the empty string will
-            never be returned.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-        """
-        if self.chunked:
-            for line in self.read_chunked(amt, decode_content=decode_content):
-                yield line
-        else:
-            while not is_fp_closed(self._fp):
-                data = self.read(amt=amt, decode_content=decode_content)
-
-                if data:
-                    yield data
-
-    @classmethod
-    def from_httplib(ResponseCls, r, **response_kw):
-        """
-        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
-        corresponding :class:`urllib3.response.HTTPResponse` object.
-
-        Remaining parameters are passed to the HTTPResponse constructor, along
-        with ``original_response=r``.
-        """
-        headers = r.msg
-
-        if not isinstance(headers, HTTPHeaderDict):
-            if PY3:  # Python 3
-                headers = HTTPHeaderDict(headers.items())
-            else:  # Python 2
-                headers = HTTPHeaderDict.from_httplib(headers)
-
-        # HTTPResponse objects in Python 3 don't have a .strict attribute
-        strict = getattr(r, 'strict', 0)
-        resp = ResponseCls(body=r,
-                           headers=headers,
-                           status=r.status,
-                           version=r.version,
-                           reason=r.reason,
-                           strict=strict,
-                           original_response=r,
-                           **response_kw)
-        return resp
-
-    # Backwards-compatibility methods for httplib.HTTPResponse
-    def getheaders(self):
-        return self.headers
-
-    def getheader(self, name, default=None):
-        return self.headers.get(name, default)
-
-    # Overrides from io.IOBase
-    def close(self):
-        if not self.closed:
-            self._fp.close()
-
-        if self._connection:
-            self._connection.close()
-
-    @property
-    def closed(self):
-        if self._fp is None:
-            return True
-        elif hasattr(self._fp, 'closed'):
-            return self._fp.closed
-        elif hasattr(self._fp, 'isclosed'):  # Python 2
-            return self._fp.isclosed()
-        else:
-            return True
-
-    def fileno(self):
-        if self._fp is None:
-            raise IOError("HTTPResponse has no file to get a fileno from")
-        elif hasattr(self._fp, "fileno"):
-            return self._fp.fileno()
-        else:
-            raise IOError("The file-like object this HTTPResponse is wrapped "
-                          "around has no file descriptor")
-
-    def flush(self):
-        if self._fp is not None and hasattr(self._fp, 'flush'):
-            return self._fp.flush()
-
-    def readable(self):
-        # This method is required for `io` module compatibility.
-        return True
-
-    def readinto(self, b):
-        # This method is required for `io` module compatibility.
-        temp = self.read(len(b))
-        if len(temp) == 0:
-            return 0
-        else:
-            b[:len(temp)] = temp
-            return len(temp)
-
-    def _update_chunk_length(self):
-        # First, we'll figure out length of a chunk and then
-        # we'll try to read it from socket.
-        if self.chunk_left is not None:
-            return
-        line = self._fp.fp.readline()
-        line = line.split(b';', 1)[0]
-        try:
-            self.chunk_left = int(line, 16)
-        except ValueError:
-            # Invalid chunked protocol response, abort.
-            self.close()
-            raise httplib.IncompleteRead(line)
-
-    def _handle_chunk(self, amt):
-        returned_chunk = None
-        if amt is None:
-            chunk = self._fp._safe_read(self.chunk_left)
-            returned_chunk = chunk
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-        elif amt < self.chunk_left:
-            value = self._fp._safe_read(amt)
-            self.chunk_left = self.chunk_left - amt
-            returned_chunk = value
-        elif amt == self.chunk_left:
-            value = self._fp._safe_read(amt)
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-            returned_chunk = value
-        else:  # amt > self.chunk_left
-            returned_chunk = self._fp._safe_read(self.chunk_left)
-            self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
-            self.chunk_left = None
-        return returned_chunk
-
-    def read_chunked(self, amt=None, decode_content=None):
-        """
-        Similar to :meth:`HTTPResponse.read`, but with an additional
-        parameter: ``decode_content``.
-
-        :param decode_content:
-            If True, will attempt to decode the body based on the
-            'content-encoding' header.
-        """
-        self._init_decoder()
-        # FIXME: Rewrite this method and make it a class with a better 
structured logic.
-        if not self.chunked:
-            raise ResponseNotChunked(
-                "Response is not chunked. "
-                "Header 'transfer-encoding: chunked' is missing.")
-
-        # Don't bother reading the body of a HEAD request.
-        if self._original_response and 
is_response_to_head(self._original_response):
-            self._original_response.close()
-            return
-
-        with self._error_catcher():
-            while True:
-                self._update_chunk_length()
-                if self.chunk_left == 0:
-                    break
-                chunk = self._handle_chunk(amt)
-                decoded = self._decode(chunk, decode_content=decode_content,
-                                       flush_decoder=False)
-                if decoded:
-                    yield decoded
-
-            if decode_content:
-                # On CPython and PyPy, we should never need to flush the
-                # decoder. However, on Jython we *might* need to, so
-                # lets defensively do it anyway.
-                decoded = self._flush_decoder()
-                if decoded:  # Platform-specific: Jython.
-                    yield decoded
-
-            # Chunk content ends with \r\n: discard it.
-            while True:
-                line = self._fp.fp.readline()
-                if not line:
-                    # Some sites may not end with '\r\n'.
-                    break
-                if line == b'\r\n':
-                    break
-
-            # We read everything; close the "file".
-            if self._original_response:
-                self._original_response.close()

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/__init__.py
----------------------------------------------------------------------
diff --git 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/__init__.py
 
b/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/__init__.py
deleted file mode 100644
index 4778cf9..0000000
--- 
a/env2/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/__init__.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from __future__ import absolute_import
-# For backwards compatibility, provide imports that used to be here.
-from .connection import is_connection_dropped
-from .request import make_headers
-from .response import is_fp_closed
-from .ssl_ import (
-    SSLContext,
-    HAS_SNI,
-    IS_PYOPENSSL,
-    assert_fingerprint,
-    resolve_cert_reqs,
-    resolve_ssl_version,
-    ssl_wrap_socket,
-)
-from .timeout import (
-    current_time,
-    Timeout,
-)
-
-from .retry import Retry
-from .url import (
-    get_host,
-    parse_url,
-    split_first,
-    Url,
-)
-
-__all__ = (
-    'HAS_SNI',
-    'IS_PYOPENSSL',
-    'SSLContext',
-    'Retry',
-    'Timeout',
-    'Url',
-    'assert_fingerprint',
-    'current_time',
-    'is_connection_dropped',
-    'is_fp_closed',
-    'get_host',
-    'parse_url',
-    'make_headers',
-    'resolve_cert_reqs',
-    'resolve_ssl_version',
-    'split_first',
-    'ssl_wrap_socket',
-)


Reply via email to