http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/container.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/container.py 
b/env2/lib/python2.7/site-packages/docker/api/container.py
deleted file mode 100644
index b8507d8..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/container.py
+++ /dev/null
@@ -1,460 +0,0 @@
-import six
-import warnings
-from datetime import datetime
-
-from .. import errors
-from .. import utils
-from ..utils.utils import create_networking_config, create_endpoint_config
-
-
-class ContainerApiMixin(object):
-    @utils.check_resource
-    def attach(self, container, stdout=True, stderr=True,
-               stream=False, logs=False):
-        params = {
-            'logs': logs and 1 or 0,
-            'stdout': stdout and 1 or 0,
-            'stderr': stderr and 1 or 0,
-            'stream': stream and 1 or 0
-        }
-
-        headers = {
-            'Connection': 'Upgrade',
-            'Upgrade': 'tcp'
-        }
-
-        u = self._url("/containers/{0}/attach", container)
-        response = self._post(u, headers=headers, params=params, stream=stream)
-
-        return self._read_from_socket(response, stream)
-
-    @utils.check_resource
-    def attach_socket(self, container, params=None, ws=False):
-        if params is None:
-            params = {
-                'stdout': 1,
-                'stderr': 1,
-                'stream': 1
-            }
-
-        if ws:
-            return self._attach_websocket(container, params)
-
-        headers = {
-            'Connection': 'Upgrade',
-            'Upgrade': 'tcp'
-        }
-
-        u = self._url("/containers/{0}/attach", container)
-        return self._get_raw_response_socket(
-            self.post(
-                u, None, params=self._attach_params(params), stream=True,
-                headers=headers
-            )
-        )
-
-    @utils.check_resource
-    def commit(self, container, repository=None, tag=None, message=None,
-               author=None, changes=None, conf=None):
-        params = {
-            'container': container,
-            'repo': repository,
-            'tag': tag,
-            'comment': message,
-            'author': author,
-            'changes': changes
-        }
-        u = self._url("/commit")
-        return self._result(self._post_json(u, data=conf, params=params),
-                            json=True)
-
-    def containers(self, quiet=False, all=False, trunc=False, latest=False,
-                   since=None, before=None, limit=-1, size=False,
-                   filters=None):
-        params = {
-            'limit': 1 if latest else limit,
-            'all': 1 if all else 0,
-            'size': 1 if size else 0,
-            'trunc_cmd': 1 if trunc else 0,
-            'since': since,
-            'before': before
-        }
-        if filters:
-            params['filters'] = utils.convert_filters(filters)
-        u = self._url("/containers/json")
-        res = self._result(self._get(u, params=params), True)
-
-        if quiet:
-            return [{'Id': x['Id']} for x in res]
-        if trunc:
-            for x in res:
-                x['Id'] = x['Id'][:12]
-        return res
-
-    @utils.check_resource
-    def copy(self, container, resource):
-        if utils.version_gte(self._version, '1.20'):
-            warnings.warn(
-                'Client.copy() is deprecated for API version >= 1.20, '
-                'please use get_archive() instead',
-                DeprecationWarning
-            )
-        res = self._post_json(
-            self._url("/containers/{0}/copy".format(container)),
-            data={"Resource": resource},
-            stream=True
-        )
-        self._raise_for_status(res)
-        return res.raw
-
-    def create_container(self, image, command=None, hostname=None, user=None,
-                         detach=False, stdin_open=False, tty=False,
-                         mem_limit=None, ports=None, environment=None,
-                         dns=None, volumes=None, volumes_from=None,
-                         network_disabled=False, name=None, entrypoint=None,
-                         cpu_shares=None, working_dir=None, domainname=None,
-                         memswap_limit=None, cpuset=None, host_config=None,
-                         mac_address=None, labels=None, volume_driver=None,
-                         stop_signal=None, networking_config=None):
-
-        if isinstance(volumes, six.string_types):
-            volumes = [volumes, ]
-
-        if host_config and utils.compare_version('1.15', self._version) < 0:
-            raise errors.InvalidVersion(
-                'host_config is not supported in API < 1.15'
-            )
-
-        config = self.create_container_config(
-            image, command, hostname, user, detach, stdin_open,
-            tty, mem_limit, ports, environment, dns, volumes, volumes_from,
-            network_disabled, entrypoint, cpu_shares, working_dir, domainname,
-            memswap_limit, cpuset, host_config, mac_address, labels,
-            volume_driver, stop_signal, networking_config,
-        )
-        return self.create_container_from_config(config, name)
-
-    def create_container_config(self, *args, **kwargs):
-        return utils.create_container_config(self._version, *args, **kwargs)
-
-    def create_container_from_config(self, config, name=None):
-        u = self._url("/containers/create")
-        params = {
-            'name': name
-        }
-        res = self._post_json(u, data=config, params=params)
-        return self._result(res, True)
-
-    def create_host_config(self, *args, **kwargs):
-        if not kwargs:
-            kwargs = {}
-        if 'version' in kwargs:
-            raise TypeError(
-                "create_host_config() got an unexpected "
-                "keyword argument 'version'"
-            )
-        kwargs['version'] = self._version
-        return utils.create_host_config(*args, **kwargs)
-
-    def create_networking_config(self, *args, **kwargs):
-        return create_networking_config(*args, **kwargs)
-
-    def create_endpoint_config(self, *args, **kwargs):
-        return create_endpoint_config(self._version, *args, **kwargs)
-
-    @utils.check_resource
-    def diff(self, container):
-        return self._result(
-            self._get(self._url("/containers/{0}/changes", container)), True
-        )
-
-    @utils.check_resource
-    def export(self, container):
-        res = self._get(
-            self._url("/containers/{0}/export", container), stream=True
-        )
-        self._raise_for_status(res)
-        return res.raw
-
-    @utils.check_resource
-    @utils.minimum_version('1.20')
-    def get_archive(self, container, path):
-        params = {
-            'path': path
-        }
-        url = self._url('/containers/{0}/archive', container)
-        res = self._get(url, params=params, stream=True)
-        self._raise_for_status(res)
-        encoded_stat = res.headers.get('x-docker-container-path-stat')
-        return (
-            res.raw,
-            utils.decode_json_header(encoded_stat) if encoded_stat else None
-        )
-
-    @utils.check_resource
-    def inspect_container(self, container):
-        return self._result(
-            self._get(self._url("/containers/{0}/json", container)), True
-        )
-
-    @utils.check_resource
-    def kill(self, container, signal=None):
-        url = self._url("/containers/{0}/kill", container)
-        params = {}
-        if signal is not None:
-            if not isinstance(signal, six.string_types):
-                signal = int(signal)
-            params['signal'] = signal
-        res = self._post(url, params=params)
-
-        self._raise_for_status(res)
-
-    @utils.check_resource
-    def logs(self, container, stdout=True, stderr=True, stream=False,
-             timestamps=False, tail='all', since=None, follow=None):
-        if utils.compare_version('1.11', self._version) >= 0:
-            if follow is None:
-                follow = stream
-            params = {'stderr': stderr and 1 or 0,
-                      'stdout': stdout and 1 or 0,
-                      'timestamps': timestamps and 1 or 0,
-                      'follow': follow and 1 or 0,
-                      }
-            if utils.compare_version('1.13', self._version) >= 0:
-                if tail != 'all' and (not isinstance(tail, int) or tail < 0):
-                    tail = 'all'
-                params['tail'] = tail
-
-            if since is not None:
-                if utils.compare_version('1.19', self._version) < 0:
-                    raise errors.InvalidVersion(
-                        'since is not supported in API < 1.19'
-                    )
-                else:
-                    if isinstance(since, datetime):
-                        params['since'] = utils.datetime_to_timestamp(since)
-                    elif (isinstance(since, int) and since > 0):
-                        params['since'] = since
-            url = self._url("/containers/{0}/logs", container)
-            res = self._get(url, params=params, stream=stream)
-            return self._get_result(container, stream, res)
-        return self.attach(
-            container,
-            stdout=stdout,
-            stderr=stderr,
-            stream=stream,
-            logs=True
-        )
-
-    @utils.check_resource
-    def pause(self, container):
-        url = self._url('/containers/{0}/pause', container)
-        res = self._post(url)
-        self._raise_for_status(res)
-
-    @utils.check_resource
-    def port(self, container, private_port):
-        res = self._get(self._url("/containers/{0}/json", container))
-        self._raise_for_status(res)
-        json_ = res.json()
-        private_port = str(private_port)
-        h_ports = None
-
-        # Port settings is None when the container is running with
-        # network_mode=host.
-        port_settings = json_.get('NetworkSettings', {}).get('Ports')
-        if port_settings is None:
-            return None
-
-        if '/' in private_port:
-            return port_settings.get(private_port)
-
-        h_ports = port_settings.get(private_port + '/tcp')
-        if h_ports is None:
-            h_ports = port_settings.get(private_port + '/udp')
-
-        return h_ports
-
-    @utils.check_resource
-    @utils.minimum_version('1.20')
-    def put_archive(self, container, path, data):
-        params = {'path': path}
-        url = self._url('/containers/{0}/archive', container)
-        res = self._put(url, params=params, data=data)
-        self._raise_for_status(res)
-        return res.status_code == 200
-
-    @utils.check_resource
-    def remove_container(self, container, v=False, link=False, force=False):
-        params = {'v': v, 'link': link, 'force': force}
-        res = self._delete(
-            self._url("/containers/{0}", container), params=params
-        )
-        self._raise_for_status(res)
-
-    @utils.minimum_version('1.17')
-    @utils.check_resource
-    def rename(self, container, name):
-        url = self._url("/containers/{0}/rename", container)
-        params = {'name': name}
-        res = self._post(url, params=params)
-        self._raise_for_status(res)
-
-    @utils.check_resource
-    def resize(self, container, height, width):
-        params = {'h': height, 'w': width}
-        url = self._url("/containers/{0}/resize", container)
-        res = self._post(url, params=params)
-        self._raise_for_status(res)
-
-    @utils.check_resource
-    def restart(self, container, timeout=10):
-        params = {'t': timeout}
-        url = self._url("/containers/{0}/restart", container)
-        res = self._post(url, params=params)
-        self._raise_for_status(res)
-
-    @utils.check_resource
-    def start(self, container, binds=None, port_bindings=None, lxc_conf=None,
-              publish_all_ports=None, links=None, privileged=None,
-              dns=None, dns_search=None, volumes_from=None, network_mode=None,
-              restart_policy=None, cap_add=None, cap_drop=None, devices=None,
-              extra_hosts=None, read_only=None, pid_mode=None, ipc_mode=None,
-              security_opt=None, ulimits=None):
-
-        if utils.compare_version('1.10', self._version) < 0:
-            if dns is not None:
-                raise errors.InvalidVersion(
-                    'dns is only supported for API version >= 1.10'
-                )
-            if volumes_from is not None:
-                raise errors.InvalidVersion(
-                    'volumes_from is only supported for API version >= 1.10'
-                )
-
-        if utils.compare_version('1.15', self._version) < 0:
-            if security_opt is not None:
-                raise errors.InvalidVersion(
-                    'security_opt is only supported for API version >= 1.15'
-                )
-            if ipc_mode:
-                raise errors.InvalidVersion(
-                    'ipc_mode is only supported for API version >= 1.15'
-                )
-
-        if utils.compare_version('1.17', self._version) < 0:
-            if read_only is not None:
-                raise errors.InvalidVersion(
-                    'read_only is only supported for API version >= 1.17'
-                )
-            if pid_mode is not None:
-                raise errors.InvalidVersion(
-                    'pid_mode is only supported for API version >= 1.17'
-                )
-
-        if utils.compare_version('1.18', self._version) < 0:
-            if ulimits is not None:
-                raise errors.InvalidVersion(
-                    'ulimits is only supported for API version >= 1.18'
-                )
-
-        start_config_kwargs = dict(
-            binds=binds, port_bindings=port_bindings, lxc_conf=lxc_conf,
-            publish_all_ports=publish_all_ports, links=links, dns=dns,
-            privileged=privileged, dns_search=dns_search, cap_add=cap_add,
-            cap_drop=cap_drop, volumes_from=volumes_from, devices=devices,
-            network_mode=network_mode, restart_policy=restart_policy,
-            extra_hosts=extra_hosts, read_only=read_only, pid_mode=pid_mode,
-            ipc_mode=ipc_mode, security_opt=security_opt, ulimits=ulimits
-        )
-        start_config = None
-
-        if any(v is not None for v in start_config_kwargs.values()):
-            if utils.compare_version('1.15', self._version) > 0:
-                warnings.warn(
-                    'Passing host config parameters in start() is deprecated. '
-                    'Please use host_config in create_container instead!',
-                    DeprecationWarning
-                )
-            start_config = self.create_host_config(**start_config_kwargs)
-
-        url = self._url("/containers/{0}/start", container)
-        res = self._post_json(url, data=start_config)
-        self._raise_for_status(res)
-
-    @utils.minimum_version('1.17')
-    @utils.check_resource
-    def stats(self, container, decode=None, stream=True):
-        url = self._url("/containers/{0}/stats", container)
-        if stream:
-            return self._stream_helper(self._get(url, stream=True),
-                                       decode=decode)
-        else:
-            return self._result(self._get(url, params={'stream': False}),
-                                json=True)
-
-    @utils.check_resource
-    def stop(self, container, timeout=10):
-        params = {'t': timeout}
-        url = self._url("/containers/{0}/stop", container)
-
-        res = self._post(url, params=params,
-                         timeout=(timeout + (self.timeout or 0)))
-        self._raise_for_status(res)
-
-    @utils.check_resource
-    def top(self, container, ps_args=None):
-        u = self._url("/containers/{0}/top", container)
-        params = {}
-        if ps_args is not None:
-            params['ps_args'] = ps_args
-        return self._result(self._get(u, params=params), True)
-
-    @utils.check_resource
-    def unpause(self, container):
-        url = self._url('/containers/{0}/unpause', container)
-        res = self._post(url)
-        self._raise_for_status(res)
-
-    @utils.minimum_version('1.22')
-    @utils.check_resource
-    def update_container(
-        self, container, blkio_weight=None, cpu_period=None, cpu_quota=None,
-        cpu_shares=None, cpuset_cpus=None, cpuset_mems=None, mem_limit=None,
-        mem_reservation=None, memswap_limit=None, kernel_memory=None
-    ):
-        url = self._url('/containers/{0}/update', container)
-        data = {}
-        if blkio_weight:
-            data['BlkioWeight'] = blkio_weight
-        if cpu_period:
-            data['CpuPeriod'] = cpu_period
-        if cpu_shares:
-            data['CpuShares'] = cpu_shares
-        if cpu_quota:
-            data['CpuQuota'] = cpu_quota
-        if cpuset_cpus:
-            data['CpusetCpus'] = cpuset_cpus
-        if cpuset_mems:
-            data['CpusetMems'] = cpuset_mems
-        if mem_limit:
-            data['Memory'] = utils.parse_bytes(mem_limit)
-        if mem_reservation:
-            data['MemoryReservation'] = utils.parse_bytes(mem_reservation)
-        if memswap_limit:
-            data['MemorySwap'] = utils.parse_bytes(memswap_limit)
-        if kernel_memory:
-            data['KernelMemory'] = utils.parse_bytes(kernel_memory)
-
-        res = self._post_json(url, data=data)
-        return self._result(res, True)
-
-    @utils.check_resource
-    def wait(self, container, timeout=None):
-        url = self._url("/containers/{0}/wait", container)
-        res = self._post(url, timeout=timeout)
-        self._raise_for_status(res)
-        json_ = res.json()
-        if 'StatusCode' in json_:
-            return json_['StatusCode']
-        return -1

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/daemon.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/daemon.py 
b/env2/lib/python2.7/site-packages/docker/api/daemon.py
deleted file mode 100644
index 9ebe73c..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/daemon.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import os
-import warnings
-from datetime import datetime
-
-from ..auth import auth
-from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING
-from ..utils import utils
-
-
-class DaemonApiMixin(object):
-    def events(self, since=None, until=None, filters=None, decode=None):
-        if isinstance(since, datetime):
-            since = utils.datetime_to_timestamp(since)
-
-        if isinstance(until, datetime):
-            until = utils.datetime_to_timestamp(until)
-
-        if filters:
-            filters = utils.convert_filters(filters)
-
-        params = {
-            'since': since,
-            'until': until,
-            'filters': filters
-        }
-
-        return self._stream_helper(
-            self.get(self._url('/events'), params=params, stream=True),
-            decode=decode
-        )
-
-    def info(self):
-        return self._result(self._get(self._url("/info")), True)
-
-    def login(self, username, password=None, email=None, registry=None,
-              reauth=False, insecure_registry=False, dockercfg_path=None):
-        if insecure_registry:
-            warnings.warn(
-                INSECURE_REGISTRY_DEPRECATION_WARNING.format('login()'),
-                DeprecationWarning
-            )
-
-        # If we don't have any auth data so far, try reloading the config file
-        # one more time in case anything showed up in there.
-        # If dockercfg_path is passed check to see if the config file exists,
-        # if so load that config.
-        if dockercfg_path and os.path.exists(dockercfg_path):
-            self._auth_configs = auth.load_config(dockercfg_path)
-        elif not self._auth_configs:
-            self._auth_configs = auth.load_config()
-
-        authcfg = auth.resolve_authconfig(self._auth_configs, registry)
-        # If we found an existing auth config for this registry and username
-        # combination, we can return it immediately unless reauth is requested.
-        if authcfg and authcfg.get('username', None) == username \
-                and not reauth:
-            return authcfg
-
-        req_data = {
-            'username': username,
-            'password': password,
-            'email': email,
-            'serveraddress': registry,
-        }
-
-        response = self._post_json(self._url('/auth'), data=req_data)
-        if response.status_code == 200:
-            self._auth_configs[registry or auth.INDEX_NAME] = req_data
-        return self._result(response, json=True)
-
-    def ping(self):
-        return self._result(self._get(self._url('/_ping')))
-
-    def version(self, api_version=True):
-        url = self._url("/version", versioned_api=api_version)
-        return self._result(self._get(url), json=True)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/exec_api.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/exec_api.py 
b/env2/lib/python2.7/site-packages/docker/api/exec_api.py
deleted file mode 100644
index 6e49996..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/exec_api.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import six
-
-from .. import errors
-from .. import utils
-
-
-class ExecApiMixin(object):
-    @utils.minimum_version('1.15')
-    @utils.check_resource
-    def exec_create(self, container, cmd, stdout=True, stderr=True,
-                    stdin=False, tty=False, privileged=False, user=''):
-        if privileged and utils.compare_version('1.19', self._version) < 0:
-            raise errors.InvalidVersion(
-                'Privileged exec is not supported in API < 1.19'
-            )
-        if user and utils.compare_version('1.19', self._version) < 0:
-            raise errors.InvalidVersion(
-                'User-specific exec is not supported in API < 1.19'
-            )
-        if isinstance(cmd, six.string_types):
-            cmd = utils.split_command(cmd)
-
-        data = {
-            'Container': container,
-            'User': user,
-            'Privileged': privileged,
-            'Tty': tty,
-            'AttachStdin': stdin,
-            'AttachStdout': stdout,
-            'AttachStderr': stderr,
-            'Cmd': cmd
-        }
-
-        url = self._url('/containers/{0}/exec', container)
-        res = self._post_json(url, data=data)
-        return self._result(res, True)
-
-    @utils.minimum_version('1.16')
-    def exec_inspect(self, exec_id):
-        if isinstance(exec_id, dict):
-            exec_id = exec_id.get('Id')
-        res = self._get(self._url("/exec/{0}/json", exec_id))
-        return self._result(res, True)
-
-    @utils.minimum_version('1.15')
-    def exec_resize(self, exec_id, height=None, width=None):
-        if isinstance(exec_id, dict):
-            exec_id = exec_id.get('Id')
-
-        params = {'h': height, 'w': width}
-        url = self._url("/exec/{0}/resize", exec_id)
-        res = self._post(url, params=params)
-        self._raise_for_status(res)
-
-    @utils.minimum_version('1.15')
-    def exec_start(self, exec_id, detach=False, tty=False, stream=False,
-                   socket=False):
-        # we want opened socket if socket == True
-        if isinstance(exec_id, dict):
-            exec_id = exec_id.get('Id')
-
-        data = {
-            'Tty': tty,
-            'Detach': detach
-        }
-
-        headers = {} if detach else {
-            'Connection': 'Upgrade',
-            'Upgrade': 'tcp'
-        }
-
-        res = self._post_json(
-            self._url('/exec/{0}/start', exec_id),
-            headers=headers,
-            data=data,
-            stream=True
-        )
-
-        if socket:
-            return self._get_raw_response_socket(res)
-        return self._read_from_socket(res, stream)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/image.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/image.py 
b/env2/lib/python2.7/site-packages/docker/api/image.py
deleted file mode 100644
index 7f25f9d..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/image.py
+++ /dev/null
@@ -1,270 +0,0 @@
-import logging
-import os
-import six
-import warnings
-
-from ..auth import auth
-from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING
-from .. import utils
-from .. import errors
-
-log = logging.getLogger(__name__)
-
-
-class ImageApiMixin(object):
-
-    @utils.check_resource
-    def get_image(self, image):
-        res = self._get(self._url("/images/{0}/get", image), stream=True)
-        self._raise_for_status(res)
-        return res.raw
-
-    @utils.check_resource
-    def history(self, image):
-        res = self._get(self._url("/images/{0}/history", image))
-        return self._result(res, True)
-
-    def images(self, name=None, quiet=False, all=False, viz=False,
-               filters=None):
-        if viz:
-            if utils.compare_version('1.7', self._version) >= 0:
-                raise Exception('Viz output is not supported in API >= 1.7!')
-            return self._result(self._get(self._url("images/viz")))
-        params = {
-            'filter': name,
-            'only_ids': 1 if quiet else 0,
-            'all': 1 if all else 0,
-        }
-        if filters:
-            params['filters'] = utils.convert_filters(filters)
-        res = self._result(self._get(self._url("/images/json"), params=params),
-                           True)
-        if quiet:
-            return [x['Id'] for x in res]
-        return res
-
-    def import_image(self, src=None, repository=None, tag=None, image=None,
-                     changes=None, stream_src=False):
-        if not (src or image):
-            raise errors.DockerException(
-                'Must specify src or image to import from'
-            )
-        u = self._url('/images/create')
-
-        params = _import_image_params(
-            repository, tag, image,
-            src=(src if isinstance(src, six.string_types) else None),
-            changes=changes
-        )
-        headers = {'Content-Type': 'application/tar'}
-
-        if image or params.get('fromSrc') != '-':  # from image or URL
-            return self._result(
-                self._post(u, data=None, params=params)
-            )
-        elif isinstance(src, six.string_types):  # from file path
-            with open(src, 'rb') as f:
-                return self._result(
-                    self._post(
-                        u, data=f, params=params, headers=headers, timeout=None
-                    )
-                )
-        else:  # from raw data
-            if stream_src:
-                headers['Transfer-Encoding'] = 'chunked'
-            return self._result(
-                self._post(u, data=src, params=params, headers=headers)
-            )
-
-    def import_image_from_data(self, data, repository=None, tag=None,
-                               changes=None):
-        u = self._url('/images/create')
-        params = _import_image_params(
-            repository, tag, src='-', changes=changes
-        )
-        headers = {'Content-Type': 'application/tar'}
-        return self._result(
-            self._post(
-                u, data=data, params=params, headers=headers, timeout=None
-            )
-        )
-        return self.import_image(
-            src=data, repository=repository, tag=tag, changes=changes
-        )
-
-    def import_image_from_file(self, filename, repository=None, tag=None,
-                               changes=None):
-        return self.import_image(
-            src=filename, repository=repository, tag=tag, changes=changes
-        )
-
-    def import_image_from_stream(self, stream, repository=None, tag=None,
-                                 changes=None):
-        return self.import_image(
-            src=stream, stream_src=True, repository=repository, tag=tag,
-            changes=changes
-        )
-
-    def import_image_from_url(self, url, repository=None, tag=None,
-                              changes=None):
-        return self.import_image(
-            src=url, repository=repository, tag=tag, changes=changes
-        )
-
-    def import_image_from_image(self, image, repository=None, tag=None,
-                                changes=None):
-        return self.import_image(
-            image=image, repository=repository, tag=tag, changes=changes
-        )
-
-    @utils.check_resource
-    def insert(self, image, url, path):
-        if utils.compare_version('1.12', self._version) >= 0:
-            raise errors.DeprecatedMethod(
-                'insert is not available for API version >=1.12'
-            )
-        api_url = self._url("/images/{0}/insert", image)
-        params = {
-            'url': url,
-            'path': path
-        }
-        return self._result(self._post(api_url, params=params))
-
-    @utils.check_resource
-    def inspect_image(self, image):
-        return self._result(
-            self._get(self._url("/images/{0}/json", image)), True
-        )
-
-    def load_image(self, data):
-        res = self._post(self._url("/images/load"), data=data)
-        self._raise_for_status(res)
-
-    def pull(self, repository, tag=None, stream=False,
-             insecure_registry=False, auth_config=None, decode=False):
-        if insecure_registry:
-            warnings.warn(
-                INSECURE_REGISTRY_DEPRECATION_WARNING.format('pull()'),
-                DeprecationWarning
-            )
-
-        if not tag:
-            repository, tag = utils.parse_repository_tag(repository)
-        registry, repo_name = auth.resolve_repository_name(repository)
-
-        params = {
-            'tag': tag,
-            'fromImage': repository
-        }
-        headers = {}
-
-        if utils.compare_version('1.5', self._version) >= 0:
-            if auth_config is None:
-                header = auth.get_config_header(self, registry)
-                if header:
-                    headers['X-Registry-Auth'] = header
-            else:
-                log.debug('Sending supplied auth config')
-                headers['X-Registry-Auth'] = auth.encode_header(auth_config)
-
-        response = self._post(
-            self._url('/images/create'), params=params, headers=headers,
-            stream=stream, timeout=None
-        )
-
-        self._raise_for_status(response)
-
-        if stream:
-            return self._stream_helper(response, decode=decode)
-
-        return self._result(response)
-
-    def push(self, repository, tag=None, stream=False,
-             insecure_registry=False, auth_config=None, decode=False):
-        if insecure_registry:
-            warnings.warn(
-                INSECURE_REGISTRY_DEPRECATION_WARNING.format('push()'),
-                DeprecationWarning
-            )
-
-        if not tag:
-            repository, tag = utils.parse_repository_tag(repository)
-        registry, repo_name = auth.resolve_repository_name(repository)
-        u = self._url("/images/{0}/push", repository)
-        params = {
-            'tag': tag
-        }
-        headers = {}
-
-        if utils.compare_version('1.5', self._version) >= 0:
-            if auth_config is None:
-                header = auth.get_config_header(self, registry)
-                if header:
-                    headers['X-Registry-Auth'] = header
-            else:
-                log.debug('Sending supplied auth config')
-                headers['X-Registry-Auth'] = auth.encode_header(auth_config)
-
-        response = self._post_json(
-            u, None, headers=headers, stream=stream, params=params
-        )
-
-        self._raise_for_status(response)
-
-        if stream:
-            return self._stream_helper(response, decode=decode)
-
-        return self._result(response)
-
-    @utils.check_resource
-    def remove_image(self, image, force=False, noprune=False):
-        params = {'force': force, 'noprune': noprune}
-        res = self._delete(self._url("/images/{0}", image), params=params)
-        self._raise_for_status(res)
-
-    def search(self, term):
-        return self._result(
-            self._get(self._url("/images/search"), params={'term': term}),
-            True
-        )
-
-    @utils.check_resource
-    def tag(self, image, repository, tag=None, force=False):
-        params = {
-            'tag': tag,
-            'repo': repository,
-            'force': 1 if force else 0
-        }
-        url = self._url("/images/{0}/tag", image)
-        res = self._post(url, params=params)
-        self._raise_for_status(res)
-        return res.status_code == 201
-
-
-def is_file(src):
-    try:
-        return (
-            isinstance(src, six.string_types) and
-            os.path.isfile(src)
-        )
-    except TypeError:  # a data string will make isfile() raise a TypeError
-        return False
-
-
-def _import_image_params(repo, tag, image=None, src=None,
-                         changes=None):
-    params = {
-        'repo': repo,
-        'tag': tag,
-    }
-    if image:
-        params['fromImage'] = image
-    elif src and not is_file(src):
-        params['fromSrc'] = src
-    else:
-        params['fromSrc'] = '-'
-
-    if changes:
-        params['changes'] = changes
-
-    return params

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/network.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/network.py 
b/env2/lib/python2.7/site-packages/docker/api/network.py
deleted file mode 100644
index 0ee0dab..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/network.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import json
-
-from ..errors import InvalidVersion
-from ..utils import check_resource, minimum_version
-from ..utils import version_lt
-
-
-class NetworkApiMixin(object):
-    @minimum_version('1.21')
-    def networks(self, names=None, ids=None):
-        filters = {}
-        if names:
-            filters['name'] = names
-        if ids:
-            filters['id'] = ids
-
-        params = {'filters': json.dumps(filters)}
-
-        url = self._url("/networks")
-        res = self._get(url, params=params)
-        return self._result(res, json=True)
-
-    @minimum_version('1.21')
-    def create_network(self, name, driver=None, options=None, ipam=None,
-                       check_duplicate=None, internal=False, labels=None,
-                       enable_ipv6=False):
-        if options is not None and not isinstance(options, dict):
-            raise TypeError('options must be a dictionary')
-
-        data = {
-            'Name': name,
-            'Driver': driver,
-            'Options': options,
-            'IPAM': ipam,
-            'CheckDuplicate': check_duplicate
-        }
-
-        if labels is not None:
-            if version_lt(self._version, '1.23'):
-                raise InvalidVersion(
-                    'network labels were introduced in API 1.23'
-                )
-            if not isinstance(labels, dict):
-                raise TypeError('labels must be a dictionary')
-            data["Labels"] = labels
-
-        if enable_ipv6:
-            if version_lt(self._version, '1.23'):
-                raise InvalidVersion(
-                    'enable_ipv6 was introduced in API 1.23'
-                )
-            data['EnableIPv6'] = True
-
-        if internal:
-            if version_lt(self._version, '1.22'):
-                raise InvalidVersion('Internal networks are not '
-                                     'supported in API version < 1.22')
-            data['Internal'] = True
-
-        url = self._url("/networks/create")
-        res = self._post_json(url, data=data)
-        return self._result(res, json=True)
-
-    @minimum_version('1.21')
-    def remove_network(self, net_id):
-        url = self._url("/networks/{0}", net_id)
-        res = self._delete(url)
-        self._raise_for_status(res)
-
-    @minimum_version('1.21')
-    def inspect_network(self, net_id):
-        url = self._url("/networks/{0}", net_id)
-        res = self._get(url)
-        return self._result(res, json=True)
-
-    @check_resource
-    @minimum_version('1.21')
-    def connect_container_to_network(self, container, net_id,
-                                     ipv4_address=None, ipv6_address=None,
-                                     aliases=None, links=None,
-                                     link_local_ips=None):
-        data = {
-            "Container": container,
-            "EndpointConfig": self.create_endpoint_config(
-                aliases=aliases, links=links, ipv4_address=ipv4_address,
-                ipv6_address=ipv6_address, link_local_ips=link_local_ips
-            ),
-        }
-
-        url = self._url("/networks/{0}/connect", net_id)
-        res = self._post_json(url, data=data)
-        self._raise_for_status(res)
-
-    @check_resource
-    @minimum_version('1.21')
-    def disconnect_container_from_network(self, container, net_id,
-                                          force=False):
-        data = {"Container": container}
-        if force:
-            if version_lt(self._version, '1.22'):
-                raise InvalidVersion(
-                    'Forced disconnect was introduced in API 1.22'
-                )
-            data['Force'] = force
-        url = self._url("/networks/{0}/disconnect", net_id)
-        res = self._post_json(url, data=data)
-        self._raise_for_status(res)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/service.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/service.py 
b/env2/lib/python2.7/site-packages/docker/api/service.py
deleted file mode 100644
index baebbad..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/service.py
+++ /dev/null
@@ -1,105 +0,0 @@
-from .. import errors
-from .. import utils
-from ..auth import auth
-
-
-class ServiceApiMixin(object):
-    @utils.minimum_version('1.24')
-    def create_service(
-            self, task_template, name=None, labels=None, mode=None,
-            update_config=None, networks=None, endpoint_config=None
-    ):
-        url = self._url('/services/create')
-        headers = {}
-        image = task_template.get('ContainerSpec', {}).get('Image', None)
-        if image is None:
-            raise errors.DockerException(
-                'Missing mandatory Image key in ContainerSpec'
-            )
-        registry, repo_name = auth.resolve_repository_name(image)
-        auth_header = auth.get_config_header(self, registry)
-        if auth_header:
-            headers['X-Registry-Auth'] = auth_header
-        data = {
-            'Name': name,
-            'Labels': labels,
-            'TaskTemplate': task_template,
-            'Mode': mode,
-            'UpdateConfig': update_config,
-            'Networks': networks,
-            'Endpoint': endpoint_config
-        }
-        return self._result(
-            self._post_json(url, data=data, headers=headers), True
-        )
-
-    @utils.minimum_version('1.24')
-    @utils.check_resource
-    def inspect_service(self, service):
-        url = self._url('/services/{0}', service)
-        return self._result(self._get(url), True)
-
-    @utils.minimum_version('1.24')
-    @utils.check_resource
-    def inspect_task(self, task):
-        url = self._url('/tasks/{0}', task)
-        return self._result(self._get(url), True)
-
-    @utils.minimum_version('1.24')
-    @utils.check_resource
-    def remove_service(self, service):
-        url = self._url('/services/{0}', service)
-        resp = self._delete(url)
-        self._raise_for_status(resp)
-        return True
-
-    @utils.minimum_version('1.24')
-    def services(self, filters=None):
-        params = {
-            'filters': utils.convert_filters(filters) if filters else None
-        }
-        url = self._url('/services')
-        return self._result(self._get(url, params=params), True)
-
-    @utils.minimum_version('1.24')
-    def tasks(self, filters=None):
-        params = {
-            'filters': utils.convert_filters(filters) if filters else None
-        }
-        url = self._url('/tasks')
-        return self._result(self._get(url, params=params), True)
-
-    @utils.minimum_version('1.24')
-    @utils.check_resource
-    def update_service(self, service, version, task_template=None, name=None,
-                       labels=None, mode=None, update_config=None,
-                       networks=None, endpoint_config=None):
-        url = self._url('/services/{0}/update', service)
-        data = {}
-        headers = {}
-        if name is not None:
-            data['Name'] = name
-        if labels is not None:
-            data['Labels'] = labels
-        if mode is not None:
-            data['Mode'] = mode
-        if task_template is not None:
-            image = task_template.get('ContainerSpec', {}).get('Image', None)
-            if image is not None:
-                registry, repo_name = auth.resolve_repository_name(image)
-                auth_header = auth.get_config_header(self, registry)
-                if auth_header:
-                    headers['X-Registry-Auth'] = auth_header
-            data['TaskTemplate'] = task_template
-        if update_config is not None:
-            data['UpdateConfig'] = update_config
-        if networks is not None:
-            data['Networks'] = networks
-        if endpoint_config is not None:
-            data['Endpoint'] = endpoint_config
-
-        resp = self._post_json(
-            url, data=data, params={'version': version}, headers=headers
-        )
-        self._raise_for_status(resp)
-        return True

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/swarm.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/swarm.py 
b/env2/lib/python2.7/site-packages/docker/api/swarm.py
deleted file mode 100644
index d099364..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/swarm.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from .. import utils
-import logging
-log = logging.getLogger(__name__)
-
-
-class SwarmApiMixin(object):
-
-    def create_swarm_spec(self, *args, **kwargs):
-        return utils.SwarmSpec(*args, **kwargs)
-
-    @utils.minimum_version('1.24')
-    def init_swarm(self, advertise_addr=None, listen_addr='0.0.0.0:2377',
-                   force_new_cluster=False, swarm_spec=None):
-        url = self._url('/swarm/init')
-        if swarm_spec is not None and not isinstance(swarm_spec, dict):
-            raise TypeError('swarm_spec must be a dictionary')
-        data = {
-            'AdvertiseAddr': advertise_addr,
-            'ListenAddr': listen_addr,
-            'ForceNewCluster': force_new_cluster,
-            'Spec': swarm_spec,
-        }
-        response = self._post_json(url, data=data)
-        self._raise_for_status(response)
-        return True
-
-    @utils.minimum_version('1.24')
-    def inspect_swarm(self):
-        url = self._url('/swarm')
-        return self._result(self._get(url), True)
-
-    @utils.check_resource
-    @utils.minimum_version('1.24')
-    def inspect_node(self, node_id):
-        url = self._url('/nodes/{0}', node_id)
-        return self._result(self._get(url), True)
-
-    @utils.minimum_version('1.24')
-    def join_swarm(self, remote_addrs, join_token, listen_addr=None,
-                   advertise_addr=None):
-        data = {
-            "RemoteAddrs": remote_addrs,
-            "ListenAddr": listen_addr,
-            "JoinToken": join_token,
-            "AdvertiseAddr": advertise_addr,
-        }
-        url = self._url('/swarm/join')
-        response = self._post_json(url, data=data)
-        self._raise_for_status(response)
-        return True
-
-    @utils.minimum_version('1.24')
-    def leave_swarm(self, force=False):
-        url = self._url('/swarm/leave')
-        response = self._post(url, params={'force': force})
-        self._raise_for_status(response)
-        return True
-
-    @utils.minimum_version('1.24')
-    def nodes(self, filters=None):
-        url = self._url('/nodes')
-        params = {}
-        if filters:
-            params['filters'] = utils.convert_filters(filters)
-
-        return self._result(self._get(url, params=params), True)
-
-    @utils.minimum_version('1.24')
-    def update_swarm(self, version, swarm_spec=None, rotate_worker_token=False,
-                     rotate_manager_token=False):
-        url = self._url('/swarm/update')
-        response = self._post_json(url, data=swarm_spec, params={
-            'rotateWorkerToken': rotate_worker_token,
-            'rotateManagerToken': rotate_manager_token,
-            'version': version
-        })
-        self._raise_for_status(response)
-        return True

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/api/volume.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/api/volume.py 
b/env2/lib/python2.7/site-packages/docker/api/volume.py
deleted file mode 100644
index afc72cb..0000000
--- a/env2/lib/python2.7/site-packages/docker/api/volume.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from .. import errors
-from .. import utils
-
-
-class VolumeApiMixin(object):
-    @utils.minimum_version('1.21')
-    def volumes(self, filters=None):
-        params = {
-            'filters': utils.convert_filters(filters) if filters else None
-        }
-        url = self._url('/volumes')
-        return self._result(self._get(url, params=params), True)
-
-    @utils.minimum_version('1.21')
-    def create_volume(self, name, driver=None, driver_opts=None, labels=None):
-        url = self._url('/volumes/create')
-        if driver_opts is not None and not isinstance(driver_opts, dict):
-            raise TypeError('driver_opts must be a dictionary')
-
-        data = {
-            'Name': name,
-            'Driver': driver,
-            'DriverOpts': driver_opts,
-        }
-
-        if labels is not None:
-            if utils.compare_version('1.23', self._version) < 0:
-                raise errors.InvalidVersion(
-                    'volume labels were introduced in API 1.23'
-                )
-            if not isinstance(labels, dict):
-                raise TypeError('labels must be a dictionary')
-            data["Labels"] = labels
-
-        return self._result(self._post_json(url, data=data), True)
-
-    @utils.minimum_version('1.21')
-    def inspect_volume(self, name):
-        url = self._url('/volumes/{0}', name)
-        return self._result(self._get(url), True)
-
-    @utils.minimum_version('1.21')
-    def remove_volume(self, name):
-        url = self._url('/volumes/{0}', name)
-        resp = self._delete(url)
-        self._raise_for_status(resp)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/auth/__init__.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/auth/__init__.py 
b/env2/lib/python2.7/site-packages/docker/auth/__init__.py
deleted file mode 100644
index 6fc83f8..0000000
--- a/env2/lib/python2.7/site-packages/docker/auth/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from .auth import (
-    INDEX_NAME,
-    INDEX_URL,
-    encode_header,
-    load_config,
-    resolve_authconfig,
-    resolve_repository_name,
-)  # flake8: noqa
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/auth/auth.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/auth/auth.py 
b/env2/lib/python2.7/site-packages/docker/auth/auth.py
deleted file mode 100644
index dc0baea..0000000
--- a/env2/lib/python2.7/site-packages/docker/auth/auth.py
+++ /dev/null
@@ -1,303 +0,0 @@
-import base64
-import json
-import logging
-import os
-
-import dockerpycreds
-import six
-
-from .. import errors
-
-INDEX_NAME = 'docker.io'
-INDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME)
-DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json')
-LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg'
-TOKEN_USERNAME = '<token>'
-
-log = logging.getLogger(__name__)
-
-
-def resolve_repository_name(repo_name):
-    if '://' in repo_name:
-        raise errors.InvalidRepository(
-            'Repository name cannot contain a scheme ({0})'.format(repo_name)
-        )
-
-    index_name, remote_name = split_repo_name(repo_name)
-    if index_name[0] == '-' or index_name[-1] == '-':
-        raise errors.InvalidRepository(
-            'Invalid index name ({0}). Cannot begin or end with a'
-            ' hyphen.'.format(index_name)
-        )
-    return resolve_index_name(index_name), remote_name
-
-
-def resolve_index_name(index_name):
-    index_name = convert_to_hostname(index_name)
-    if index_name == 'index.' + INDEX_NAME:
-        index_name = INDEX_NAME
-    return index_name
-
-
-def get_config_header(client, registry):
-    log.debug('Looking for auth config')
-    if not client._auth_configs:
-        log.debug(
-            "No auth config in memory - loading from filesystem"
-        )
-        client._auth_configs = load_config()
-    authcfg = resolve_authconfig(client._auth_configs, registry)
-    # Do not fail here if no authentication exists for this
-    # specific registry as we can have a readonly pull. Just
-    # put the header if we can.
-    if authcfg:
-        log.debug('Found auth config')
-        # auth_config needs to be a dict in the format used by
-        # auth.py username , password, serveraddress, email
-        return encode_header(authcfg)
-    log.debug('No auth config found')
-    return None
-
-
-def split_repo_name(repo_name):
-    parts = repo_name.split('/', 1)
-    if len(parts) == 1 or (
-        '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost'
-    ):
-        # This is a docker index repo (ex: username/foobar or ubuntu)
-        return INDEX_NAME, repo_name
-    return tuple(parts)
-
-
-def resolve_authconfig(authconfig, registry=None):
-    """
-    Returns the authentication data from the given auth configuration for a
-    specific registry. As with the Docker client, legacy entries in the config
-    with full URLs are stripped down to hostnames before checking for a match.
-    Returns None if no match was found.
-    """
-    if 'credsStore' in authconfig:
-        log.debug(
-            'Using credentials store "{0}"'.format(authconfig['credsStore'])
-        )
-        return _resolve_authconfig_credstore(
-            authconfig, registry, authconfig['credsStore']
-        )
-    # Default to the public index server
-    registry = resolve_index_name(registry) if registry else INDEX_NAME
-    log.debug("Looking for auth entry for {0}".format(repr(registry)))
-
-    if registry in authconfig:
-        log.debug("Found {0}".format(repr(registry)))
-        return authconfig[registry]
-
-    for key, config in six.iteritems(authconfig):
-        if resolve_index_name(key) == registry:
-            log.debug("Found {0}".format(repr(key)))
-            return config
-
-    log.debug("No entry found")
-    return None
-
-
-def _resolve_authconfig_credstore(authconfig, registry, credstore_name):
-    if not registry or registry == INDEX_NAME:
-        # The ecosystem is a little schizophrenic with index.docker.io VS
-        # docker.io - in that case, it seems the full URL is necessary.
-        registry = 'https://index.docker.io/v1/'
-    log.debug("Looking for auth entry for {0}".format(repr(registry)))
-    store = dockerpycreds.Store(credstore_name)
-    try:
-        data = store.get(registry)
-        res = {
-            'ServerAddress': registry,
-        }
-        if data['Username'] == TOKEN_USERNAME:
-            res['IdentityToken'] = data['Secret']
-        else:
-            res.update({
-                'Username': data['Username'],
-                'Password': data['Secret'],
-            })
-        return res
-    except dockerpycreds.CredentialsNotFound as e:
-        log.debug('No entry found')
-        return None
-    except dockerpycreds.StoreError as e:
-        raise errors.DockerException(
-            'Credentials store error: {0}'.format(repr(e))
-        )
-
-
-def convert_to_hostname(url):
-    return url.replace('http://', '').replace('https://', '').split('/', 1)[0]
-
-
-def decode_auth(auth):
-    if isinstance(auth, six.string_types):
-        auth = auth.encode('ascii')
-    s = base64.b64decode(auth)
-    login, pwd = s.split(b':', 1)
-    return login.decode('utf8'), pwd.decode('utf8')
-
-
-def encode_header(auth):
-    auth_json = json.dumps(auth).encode('ascii')
-    return base64.urlsafe_b64encode(auth_json)
-
-
-def parse_auth(entries, raise_on_error=False):
-    """
-    Parses authentication entries
-
-    Args:
-      entries:        Dict of authentication entries.
-      raise_on_error: If set to true, an invalid format will raise
-                      InvalidConfigFile
-
-    Returns:
-      Authentication registry.
-    """
-
-    conf = {}
-    for registry, entry in six.iteritems(entries):
-        if not isinstance(entry, dict):
-            log.debug(
-                'Config entry for key {0} is not auth config'.format(registry)
-            )
-            # We sometimes fall back to parsing the whole config as if it was
-            # the auth config by itself, for legacy purposes. In that case, we
-            # fail silently and return an empty conf if any of the keys is not
-            # formatted properly.
-            if raise_on_error:
-                raise errors.InvalidConfigFile(
-                    'Invalid configuration for registry {0}'.format(registry)
-                )
-            return {}
-        if 'identitytoken' in entry:
-            log.debug('Found an IdentityToken entry for registry {0}'.format(
-                registry
-            ))
-            conf[registry] = {
-                'IdentityToken': entry['identitytoken']
-            }
-            continue  # Other values are irrelevant if we have a token, skip.
-
-        if 'auth' not in entry:
-            # Starting with engine v1.11 (API 1.23), an empty dictionary is
-            # a valid value in the auths config.
-            # https://github.com/docker/compose/issues/3265
-            log.debug(
-                'Auth data for {0} is absent. Client might be using a '
-                'credentials store instead.'
-            )
-            conf[registry] = {}
-            continue
-
-        username, password = decode_auth(entry['auth'])
-        log.debug(
-            'Found entry (registry={0}, username={1})'
-            .format(repr(registry), repr(username))
-        )
-
-        conf[registry] = {
-            'username': username,
-            'password': password,
-            'email': entry.get('email'),
-            'serveraddress': registry,
-        }
-    return conf
-
-
-def find_config_file(config_path=None):
-    environment_path = os.path.join(
-        os.environ.get('DOCKER_CONFIG'),
-        os.path.basename(DOCKER_CONFIG_FILENAME)
-    ) if os.environ.get('DOCKER_CONFIG') else None
-
-    paths = filter(None, [
-        config_path,  # 1
-        environment_path,  # 2
-        os.path.join(os.path.expanduser('~'), DOCKER_CONFIG_FILENAME),  # 3
-        os.path.join(
-            os.path.expanduser('~'), LEGACY_DOCKER_CONFIG_FILENAME
-        )  # 4
-    ])
-
-    log.debug("Trying paths: {0}".format(repr(paths)))
-
-    for path in paths:
-        if os.path.exists(path):
-            log.debug("Found file at path: {0}".format(path))
-            return path
-
-    log.debug("No config file found")
-
-    return None
-
-
-def load_config(config_path=None):
-    """
-    Loads authentication data from a Docker configuration file in the given
-    root directory or if config_path is passed use given path.
-    Lookup priority:
-        explicit config_path parameter > DOCKER_CONFIG environment variable >
-        ~/.docker/config.json > ~/.dockercfg
-    """
-    config_file = find_config_file(config_path)
-
-    if not config_file:
-        return {}
-
-    try:
-        with open(config_file) as f:
-            data = json.load(f)
-            res = {}
-            if data.get('auths'):
-                log.debug("Found 'auths' section")
-                res.update(parse_auth(data['auths'], raise_on_error=True))
-            if data.get('HttpHeaders'):
-                log.debug("Found 'HttpHeaders' section")
-                res.update({'HttpHeaders': data['HttpHeaders']})
-            if data.get('credsStore'):
-                log.debug("Found 'credsStore' section")
-                res.update({'credsStore': data['credsStore']})
-            if res:
-                return res
-            else:
-                log.debug("Couldn't find 'auths' or 'HttpHeaders' sections")
-                f.seek(0)
-                return parse_auth(json.load(f))
-    except (IOError, KeyError, ValueError) as e:
-        # Likely missing new Docker config file or it's in an
-        # unknown format, continue to attempt to read old location
-        # and format.
-        log.debug(e)
-
-    log.debug("Attempting to parse legacy auth file format")
-    try:
-        data = []
-        with open(config_file) as f:
-            for line in f.readlines():
-                data.append(line.strip().split(' = ')[1])
-            if len(data) < 2:
-                # Not enough data
-                raise errors.InvalidConfigFile(
-                    'Invalid or empty configuration file!'
-                )
-
-        username, password = decode_auth(data[0])
-        return {
-            INDEX_NAME: {
-                'username': username,
-                'password': password,
-                'email': data[1],
-                'serveraddress': INDEX_URL,
-            }
-        }
-    except Exception as e:
-        log.debug(e)
-        pass
-
-    log.debug("All parsing attempts failed - returning empty config")
-    return {}

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/client.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/client.py 
b/env2/lib/python2.7/site-packages/docker/client.py
deleted file mode 100644
index 3fa19e0..0000000
--- a/env2/lib/python2.7/site-packages/docker/client.py
+++ /dev/null
@@ -1,406 +0,0 @@
-import json
-import struct
-from functools import partial
-
-import requests
-import requests.exceptions
-import six
-import websocket
-
-
-from . import api
-from . import constants
-from . import errors
-from .auth import auth
-from .ssladapter import ssladapter
-from .tls import TLSConfig
-from .transport import UnixAdapter
-from .utils import utils, check_resource, update_headers, kwargs_from_env
-from .utils.socket import frames_iter
-try:
-    from .transport import NpipeAdapter
-except ImportError:
-    pass
-
-
-def from_env(**kwargs):
-    return Client.from_env(**kwargs)
-
-
-class Client(
-        requests.Session,
-        api.BuildApiMixin,
-        api.ContainerApiMixin,
-        api.DaemonApiMixin,
-        api.ExecApiMixin,
-        api.ImageApiMixin,
-        api.NetworkApiMixin,
-        api.ServiceApiMixin,
-        api.SwarmApiMixin,
-        api.VolumeApiMixin):
-    def __init__(self, base_url=None, version=None,
-                 timeout=constants.DEFAULT_TIMEOUT_SECONDS, tls=False,
-                 user_agent=constants.DEFAULT_USER_AGENT,
-                 num_pools=constants.DEFAULT_NUM_POOLS):
-        super(Client, self).__init__()
-
-        if tls and not base_url:
-            raise errors.TLSParameterError(
-                'If using TLS, the base_url argument must be provided.'
-            )
-
-        self.base_url = base_url
-        self.timeout = timeout
-        self.headers['User-Agent'] = user_agent
-
-        self._auth_configs = auth.load_config()
-
-        base_url = utils.parse_host(
-            base_url, constants.IS_WINDOWS_PLATFORM, tls=bool(tls)
-        )
-        if base_url.startswith('http+unix://'):
-            self._custom_adapter = UnixAdapter(
-                base_url, timeout, num_pools=num_pools
-            )
-            self.mount('http+docker://', self._custom_adapter)
-            self._unmount('http://', 'https://')
-            self.base_url = 'http+docker://localunixsocket'
-        elif base_url.startswith('npipe://'):
-            if not constants.IS_WINDOWS_PLATFORM:
-                raise errors.DockerException(
-                    'The npipe:// protocol is only supported on Windows'
-                )
-            try:
-                self._custom_adapter = NpipeAdapter(
-                    base_url, timeout, num_pools=num_pools
-                )
-            except NameError:
-                raise errors.DockerException(
-                    'Install pypiwin32 package to enable npipe:// support'
-                )
-            self.mount('http+docker://', self._custom_adapter)
-            self.base_url = 'http+docker://localnpipe'
-        else:
-            # Use SSLAdapter for the ability to specify SSL version
-            if isinstance(tls, TLSConfig):
-                tls.configure_client(self)
-            elif tls:
-                self._custom_adapter = ssladapter.SSLAdapter(
-                    pool_connections=num_pools
-                )
-                self.mount('https://', self._custom_adapter)
-            self.base_url = base_url
-
-        # version detection needs to be after unix adapter mounting
-        if version is None:
-            self._version = constants.DEFAULT_DOCKER_API_VERSION
-        elif isinstance(version, six.string_types):
-            if version.lower() == 'auto':
-                self._version = self._retrieve_server_version()
-            else:
-                self._version = version
-        else:
-            raise errors.DockerException(
-                'Version parameter must be a string or None. Found {0}'.format(
-                    type(version).__name__
-                )
-            )
-
-    @classmethod
-    def from_env(cls, **kwargs):
-        version = kwargs.pop('version', None)
-        return cls(version=version, **kwargs_from_env(**kwargs))
-
-    def _retrieve_server_version(self):
-        try:
-            return self.version(api_version=False)["ApiVersion"]
-        except KeyError:
-            raise errors.DockerException(
-                'Invalid response from docker daemon: key "ApiVersion"'
-                ' is missing.'
-            )
-        except Exception as e:
-            raise errors.DockerException(
-                'Error while fetching server API version: {0}'.format(e)
-            )
-
-    def _set_request_timeout(self, kwargs):
-        """Prepare the kwargs for an HTTP request by inserting the timeout
-        parameter, if not already present."""
-        kwargs.setdefault('timeout', self.timeout)
-        return kwargs
-
-    @update_headers
-    def _post(self, url, **kwargs):
-        return self.post(url, **self._set_request_timeout(kwargs))
-
-    @update_headers
-    def _get(self, url, **kwargs):
-        return self.get(url, **self._set_request_timeout(kwargs))
-
-    @update_headers
-    def _put(self, url, **kwargs):
-        return self.put(url, **self._set_request_timeout(kwargs))
-
-    @update_headers
-    def _delete(self, url, **kwargs):
-        return self.delete(url, **self._set_request_timeout(kwargs))
-
-    def _url(self, pathfmt, *args, **kwargs):
-        for arg in args:
-            if not isinstance(arg, six.string_types):
-                raise ValueError(
-                    'Expected a string but found {0} ({1}) '
-                    'instead'.format(arg, type(arg))
-                )
-
-        quote_f = partial(six.moves.urllib.parse.quote_plus, safe="/:")
-        args = map(quote_f, args)
-
-        if kwargs.get('versioned_api', True):
-            return '{0}/v{1}{2}'.format(
-                self.base_url, self._version, pathfmt.format(*args)
-            )
-        else:
-            return '{0}{1}'.format(self.base_url, pathfmt.format(*args))
-
-    def _raise_for_status(self, response, explanation=None):
-        """Raises stored :class:`APIError`, if one occurred."""
-        try:
-            response.raise_for_status()
-        except requests.exceptions.HTTPError as e:
-            if e.response.status_code == 404:
-                raise errors.NotFound(e, response, explanation=explanation)
-            raise errors.APIError(e, response, explanation=explanation)
-
-    def _result(self, response, json=False, binary=False):
-        assert not (json and binary)
-        self._raise_for_status(response)
-
-        if json:
-            return response.json()
-        if binary:
-            return response.content
-        return response.text
-
-    def _post_json(self, url, data, **kwargs):
-        # Go <1.1 can't unserialize null to a string
-        # so we do this disgusting thing here.
-        data2 = {}
-        if data is not None:
-            for k, v in six.iteritems(data):
-                if v is not None:
-                    data2[k] = v
-
-        if 'headers' not in kwargs:
-            kwargs['headers'] = {}
-        kwargs['headers']['Content-Type'] = 'application/json'
-        return self._post(url, data=json.dumps(data2), **kwargs)
-
-    def _attach_params(self, override=None):
-        return override or {
-            'stdout': 1,
-            'stderr': 1,
-            'stream': 1
-        }
-
-    @check_resource
-    def _attach_websocket(self, container, params=None):
-        url = self._url("/containers/{0}/attach/ws", container)
-        req = requests.Request("POST", url, params=self._attach_params(params))
-        full_url = req.prepare().url
-        full_url = full_url.replace("http://";, "ws://", 1)
-        full_url = full_url.replace("https://";, "wss://", 1)
-        return self._create_websocket_connection(full_url)
-
-    def _create_websocket_connection(self, url):
-        return websocket.create_connection(url)
-
-    def _get_raw_response_socket(self, response):
-        self._raise_for_status(response)
-        if self.base_url == "http+docker://localnpipe":
-            sock = response.raw._fp.fp.raw.sock
-        elif six.PY3:
-            sock = response.raw._fp.fp.raw
-            if self.base_url.startswith("https://";):
-                sock = sock._sock
-        else:
-            sock = response.raw._fp.fp._sock
-        try:
-            # Keep a reference to the response to stop it being garbage
-            # collected. If the response is garbage collected, it will
-            # close TLS sockets.
-            sock._response = response
-        except AttributeError:
-            # UNIX sockets can't have attributes set on them, but that's
-            # fine because we won't be doing TLS over them
-            pass
-
-        return sock
-
-    def _stream_helper(self, response, decode=False):
-        """Generator for data coming from a chunked-encoded HTTP response."""
-        if response.raw._fp.chunked:
-            reader = response.raw
-            while not reader.closed:
-                # this read call will block until we get a chunk
-                data = reader.read(1)
-                if not data:
-                    break
-                if reader._fp.chunk_left:
-                    data += reader.read(reader._fp.chunk_left)
-                if decode:
-                    if six.PY3:
-                        data = data.decode('utf-8')
-                    # remove the trailing newline
-                    data = data.strip()
-                    # split the data at any newlines
-                    data_list = data.split("\r\n")
-                    # load and yield each line seperately
-                    for data in data_list:
-                        data = json.loads(data)
-                        yield data
-                else:
-                    yield data
-        else:
-            # Response isn't chunked, meaning we probably
-            # encountered an error immediately
-            yield self._result(response, json=decode)
-
-    def _multiplexed_buffer_helper(self, response):
-        """A generator of multiplexed data blocks read from a buffered
-        response."""
-        buf = self._result(response, binary=True)
-        walker = 0
-        while True:
-            if len(buf[walker:]) < 8:
-                break
-            _, length = struct.unpack_from('>BxxxL', buf[walker:])
-            start = walker + constants.STREAM_HEADER_SIZE_BYTES
-            end = start + length
-            walker = end
-            yield buf[start:end]
-
-    def _multiplexed_response_stream_helper(self, response):
-        """A generator of multiplexed data blocks coming from a response
-        stream."""
-
-        # Disable timeout on the underlying socket to prevent
-        # Read timed out(s) for long running processes
-        socket = self._get_raw_response_socket(response)
-        self._disable_socket_timeout(socket)
-
-        while True:
-            header = response.raw.read(constants.STREAM_HEADER_SIZE_BYTES)
-            if not header:
-                break
-            _, length = struct.unpack('>BxxxL', header)
-            if not length:
-                continue
-            data = response.raw.read(length)
-            if not data:
-                break
-            yield data
-
-    def _stream_raw_result_old(self, response):
-        ''' Stream raw output for API versions below 1.6 '''
-        self._raise_for_status(response)
-        for line in response.iter_lines(chunk_size=1,
-                                        decode_unicode=True):
-            # filter out keep-alive new lines
-            if line:
-                yield line
-
-    def _stream_raw_result(self, response):
-        ''' Stream result for TTY-enabled container above API 1.6 '''
-        self._raise_for_status(response)
-        for out in response.iter_content(chunk_size=1, decode_unicode=True):
-            yield out
-
-    def _read_from_socket(self, response, stream):
-        socket = self._get_raw_response_socket(response)
-
-        if stream:
-            return frames_iter(socket)
-        else:
-            return six.binary_type().join(frames_iter(socket))
-
-    def _disable_socket_timeout(self, socket):
-        """ Depending on the combination of python version and whether we're
-        connecting over http or https, we might need to access _sock, which
-        may or may not exist; or we may need to just settimeout on socket
-        itself, which also may or may not have settimeout on it. To avoid
-        missing the correct one, we try both.
-
-        We also do not want to set the timeout if it is already disabled, as
-        you run the risk of changing a socket that was non-blocking to
-        blocking, for example when using gevent.
-        """
-        sockets = [socket, getattr(socket, '_sock', None)]
-
-        for s in sockets:
-            if not hasattr(s, 'settimeout'):
-                continue
-
-            timeout = -1
-
-            if hasattr(s, 'gettimeout'):
-                timeout = s.gettimeout()
-
-            # Don't change the timeout if it is already disabled.
-            if timeout is None or timeout == 0.0:
-                continue
-
-            s.settimeout(None)
-
-    def _get_result(self, container, stream, res):
-        cont = self.inspect_container(container)
-        return self._get_result_tty(stream, res, cont['Config']['Tty'])
-
-    def _get_result_tty(self, stream, res, is_tty):
-        # Stream multi-plexing was only introduced in API v1.6. Anything
-        # before that needs old-style streaming.
-        if utils.compare_version('1.6', self._version) < 0:
-            return self._stream_raw_result_old(res)
-
-        # We should also use raw streaming (without keep-alives)
-        # if we're dealing with a tty-enabled container.
-        if is_tty:
-            return self._stream_raw_result(res) if stream else \
-                self._result(res, binary=True)
-
-        self._raise_for_status(res)
-        sep = six.binary_type()
-        if stream:
-            return self._multiplexed_response_stream_helper(res)
-        else:
-            return sep.join(
-                [x for x in self._multiplexed_buffer_helper(res)]
-            )
-
-    def _unmount(self, *args):
-        for proto in args:
-            self.adapters.pop(proto)
-
-    def get_adapter(self, url):
-        try:
-            return super(Client, self).get_adapter(url)
-        except requests.exceptions.InvalidSchema as e:
-            if self._custom_adapter:
-                return self._custom_adapter
-            else:
-                raise e
-
-    @property
-    def api_version(self):
-        return self._version
-
-
-class AutoVersionClient(Client):
-    def __init__(self, *args, **kwargs):
-        if 'version' in kwargs and kwargs['version']:
-            raise errors.DockerException(
-                'Can not specify version for AutoVersionClient'
-            )
-        kwargs['version'] = 'auto'
-        super(AutoVersionClient, self).__init__(*args, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/constants.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/constants.py 
b/env2/lib/python2.7/site-packages/docker/constants.py
deleted file mode 100644
index 0c9a020..0000000
--- a/env2/lib/python2.7/site-packages/docker/constants.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import sys
-from .version import version
-
-DEFAULT_DOCKER_API_VERSION = '1.24'
-DEFAULT_TIMEOUT_SECONDS = 60
-STREAM_HEADER_SIZE_BYTES = 8
-CONTAINER_LIMITS_KEYS = [
-    'memory', 'memswap', 'cpushares', 'cpusetcpus'
-]
-
-INSECURE_REGISTRY_DEPRECATION_WARNING = \
-    'The `insecure_registry` argument to {} ' \
-    'is deprecated and non-functional. Please remove it.'
-
-IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
-
-DEFAULT_USER_AGENT = "docker-py/{0}".format(version)
-DEFAULT_NUM_POOLS = 25

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/errors.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/errors.py 
b/env2/lib/python2.7/site-packages/docker/errors.py
deleted file mode 100644
index 97be802..0000000
--- a/env2/lib/python2.7/site-packages/docker/errors.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import requests
-
-
-class APIError(requests.exceptions.HTTPError):
-    def __init__(self, message, response, explanation=None):
-        # requests 1.2 supports response as a keyword argument, but
-        # requests 1.1 doesn't
-        super(APIError, self).__init__(message)
-        self.response = response
-
-        self.explanation = explanation
-
-        if self.explanation is None and response.content:
-            self.explanation = response.content.strip()
-
-    def __str__(self):
-        message = super(APIError, self).__str__()
-
-        if self.is_client_error():
-            message = '{0} Client Error: {1}'.format(
-                self.response.status_code, self.response.reason)
-
-        elif self.is_server_error():
-            message = '{0} Server Error: {1}'.format(
-                self.response.status_code, self.response.reason)
-
-        if self.explanation:
-            message = '{0} ("{1}")'.format(message, self.explanation)
-
-        return message
-
-    def is_client_error(self):
-        return 400 <= self.response.status_code < 500
-
-    def is_server_error(self):
-        return 500 <= self.response.status_code < 600
-
-
-class DockerException(Exception):
-    pass
-
-
-class NotFound(APIError):
-    pass
-
-
-class InvalidVersion(DockerException):
-    pass
-
-
-class InvalidRepository(DockerException):
-    pass
-
-
-class InvalidConfigFile(DockerException):
-    pass
-
-
-class DeprecatedMethod(DockerException):
-    pass
-
-
-class TLSParameterError(DockerException):
-    def __init__(self, msg):
-        self.msg = msg
-
-    def __str__(self):
-        return self.msg + (". TLS configurations should map the Docker CLI "
-                           "client configurations. See "
-                           "https://docs.docker.com/engine/articles/https/ "
-                           "for API details.")
-
-
-class NullResource(DockerException, ValueError):
-    pass

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/ssladapter/__init__.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/ssladapter/__init__.py 
b/env2/lib/python2.7/site-packages/docker/ssladapter/__init__.py
deleted file mode 100644
index 1a5e1bb..0000000
--- a/env2/lib/python2.7/site-packages/docker/ssladapter/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .ssladapter import SSLAdapter # flake8: noqa

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/ssladapter/ssladapter.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/ssladapter/ssladapter.py 
b/env2/lib/python2.7/site-packages/docker/ssladapter/ssladapter.py
deleted file mode 100644
index e17dfad..0000000
--- a/env2/lib/python2.7/site-packages/docker/ssladapter/ssladapter.py
+++ /dev/null
@@ -1,66 +0,0 @@
-""" Resolves OpenSSL issues in some servers:
-      https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
-      https://github.com/kennethreitz/requests/pull/799
-"""
-import sys
-
-from distutils.version import StrictVersion
-from requests.adapters import HTTPAdapter
-
-try:
-    import requests.packages.urllib3 as urllib3
-except ImportError:
-    import urllib3
-
-
-PoolManager = urllib3.poolmanager.PoolManager
-
-# Monkey-patching match_hostname with a version that supports
-# IP-address checking. Not necessary for Python 3.5 and above
-if sys.version_info[0] < 3 or sys.version_info[1] < 5:
-    from backports.ssl_match_hostname import match_hostname
-    urllib3.connection.match_hostname = match_hostname
-
-
-class SSLAdapter(HTTPAdapter):
-    '''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
-    def __init__(self, ssl_version=None, assert_hostname=None,
-                 assert_fingerprint=None, **kwargs):
-        self.ssl_version = ssl_version
-        self.assert_hostname = assert_hostname
-        self.assert_fingerprint = assert_fingerprint
-        super(SSLAdapter, self).__init__(**kwargs)
-
-    def init_poolmanager(self, connections, maxsize, block=False):
-        kwargs = {
-            'num_pools': connections,
-            'maxsize': maxsize,
-            'block': block,
-            'assert_hostname': self.assert_hostname,
-            'assert_fingerprint': self.assert_fingerprint,
-        }
-        if self.ssl_version and self.can_override_ssl_version():
-            kwargs['ssl_version'] = self.ssl_version
-
-        self.poolmanager = PoolManager(**kwargs)
-
-    def get_connection(self, *args, **kwargs):
-        """
-        Ensure assert_hostname is set correctly on our pool
-
-        We already take care of a normal poolmanager via init_poolmanager
-
-        But we still need to take care of when there is a proxy poolmanager
-        """
-        conn = super(SSLAdapter, self).get_connection(*args, **kwargs)
-        if conn.assert_hostname != self.assert_hostname:
-            conn.assert_hostname = self.assert_hostname
-        return conn
-
-    def can_override_ssl_version(self):
-        urllib_ver = urllib3.__version__.split('-')[0]
-        if urllib_ver is None:
-            return False
-        if urllib_ver == 'dev':
-            return True
-        return StrictVersion(urllib_ver) > StrictVersion('1.5')

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/tls.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/tls.py 
b/env2/lib/python2.7/site-packages/docker/tls.py
deleted file mode 100644
index 7abfa60..0000000
--- a/env2/lib/python2.7/site-packages/docker/tls.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import os
-import ssl
-
-from . import errors
-from .ssladapter import ssladapter
-
-
-class TLSConfig(object):
-    cert = None
-    ca_cert = None
-    verify = None
-    ssl_version = None
-
-    def __init__(self, client_cert=None, ca_cert=None, verify=None,
-                 ssl_version=None, assert_hostname=None,
-                 assert_fingerprint=None):
-        # Argument compatibility/mapping with
-        # https://docs.docker.com/engine/articles/https/
-        # This diverges from the Docker CLI in that users can specify 'tls'
-        # here, but also disable any public/default CA pool verification by
-        # leaving tls_verify=False
-
-        self.assert_hostname = assert_hostname
-        self.assert_fingerprint = assert_fingerprint
-
-        # TLS v1.0 seems to be the safest default; SSLv23 fails in mysterious
-        # ways: https://github.com/docker/docker-py/issues/963
-
-        self.ssl_version = ssl_version or ssl.PROTOCOL_TLSv1
-
-        # "tls" and "tls_verify" must have both or neither cert/key files
-        # In either case, Alert the user when both are expected, but any are
-        # missing.
-
-        if client_cert:
-            try:
-                tls_cert, tls_key = client_cert
-            except ValueError:
-                raise errors.TLSParameterError(
-                    'client_config must be a tuple of'
-                    ' (client certificate, key file)'
-                )
-
-            if not (tls_cert and tls_key) or (not os.path.isfile(tls_cert) or
-               not os.path.isfile(tls_key)):
-                raise errors.TLSParameterError(
-                    'Path to a certificate and key files must be provided'
-                    ' through the client_config param'
-                )
-            self.cert = (tls_cert, tls_key)
-
-        # If verify is set, make sure the cert exists
-        self.verify = verify
-        self.ca_cert = ca_cert
-        if self.verify and self.ca_cert and not os.path.isfile(self.ca_cert):
-            raise errors.TLSParameterError(
-                'Invalid CA certificate provided for `tls_ca_cert`.'
-            )
-
-    def configure_client(self, client):
-        client.ssl_version = self.ssl_version
-
-        if self.verify and self.ca_cert:
-            client.verify = self.ca_cert
-        else:
-            client.verify = self.verify
-
-        if self.cert:
-            client.cert = self.cert
-
-        client.mount('https://', ssladapter.SSLAdapter(
-            ssl_version=self.ssl_version,
-            assert_hostname=self.assert_hostname,
-            assert_fingerprint=self.assert_fingerprint,
-        ))

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/transport/__init__.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/transport/__init__.py 
b/env2/lib/python2.7/site-packages/docker/transport/__init__.py
deleted file mode 100644
index 46dfdf8..0000000
--- a/env2/lib/python2.7/site-packages/docker/transport/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-# flake8: noqa
-from .unixconn import UnixAdapter
-try:
-    from .npipeconn import NpipeAdapter
-    from .npipesocket import NpipeSocket
-except ImportError:
-    pass
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-senssoft-tap/blob/6a81d1e7/env2/lib/python2.7/site-packages/docker/transport/npipeconn.py
----------------------------------------------------------------------
diff --git a/env2/lib/python2.7/site-packages/docker/transport/npipeconn.py 
b/env2/lib/python2.7/site-packages/docker/transport/npipeconn.py
deleted file mode 100644
index 017738e..0000000
--- a/env2/lib/python2.7/site-packages/docker/transport/npipeconn.py
+++ /dev/null
@@ -1,103 +0,0 @@
-import six
-import requests.adapters
-
-from .. import constants
-from .npipesocket import NpipeSocket
-
-if six.PY3:
-    import http.client as httplib
-else:
-    import httplib
-
-try:
-    import requests.packages.urllib3 as urllib3
-except ImportError:
-    import urllib3
-
-RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
-
-
-class NpipeHTTPConnection(httplib.HTTPConnection, object):
-    def __init__(self, npipe_path, timeout=60):
-        super(NpipeHTTPConnection, self).__init__(
-            'localhost', timeout=timeout
-        )
-        self.npipe_path = npipe_path
-        self.timeout = timeout
-
-    def connect(self):
-        sock = NpipeSocket()
-        sock.settimeout(self.timeout)
-        sock.connect(self.npipe_path)
-        self.sock = sock
-
-
-class NpipeHTTPConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
-    def __init__(self, npipe_path, timeout=60, maxsize=10):
-        super(NpipeHTTPConnectionPool, self).__init__(
-            'localhost', timeout=timeout, maxsize=maxsize
-        )
-        self.npipe_path = npipe_path
-        self.timeout = timeout
-
-    def _new_conn(self):
-        return NpipeHTTPConnection(
-            self.npipe_path, self.timeout
-        )
-
-    # When re-using connections, urllib3 tries to call select() on our
-    # NpipeSocket instance, causing a crash. To circumvent this, we override
-    # _get_conn, where that check happens.
-    def _get_conn(self, timeout):
-        conn = None
-        try:
-            conn = self.pool.get(block=self.block, timeout=timeout)
-
-        except AttributeError:  # self.pool is None
-            raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
-
-        except six.moves.queue.Empty:
-            if self.block:
-                raise urllib3.exceptions.EmptyPoolError(
-                    self,
-                    "Pool reached maximum size and no more "
-                    "connections are allowed."
-                )
-            pass  # Oh well, we'll create a new connection then
-
-        return conn or self._new_conn()
-
-
-class NpipeAdapter(requests.adapters.HTTPAdapter):
-    def __init__(self, base_url, timeout=60,
-                 num_pools=constants.DEFAULT_NUM_POOLS):
-        self.npipe_path = base_url.replace('npipe://', '')
-        self.timeout = timeout
-        self.pools = RecentlyUsedContainer(
-            num_pools, dispose_func=lambda p: p.close()
-        )
-        super(NpipeAdapter, self).__init__()
-
-    def get_connection(self, url, proxies=None):
-        with self.pools.lock:
-            pool = self.pools.get(url)
-            if pool:
-                return pool
-
-            pool = NpipeHTTPConnectionPool(
-                self.npipe_path, self.timeout
-            )
-            self.pools[url] = pool
-
-        return pool
-
-    def request_url(self, request, proxies):
-        # The select_proxy utility in requests errors out when the provided URL
-        # doesn't have a hostname, like is the case when using a UNIX socket.
-        # Since proxies are an irrelevant notion in the case of UNIX sockets
-        # anyway, we simply return the path URL directly.
-        # See also: https://github.com/docker/docker-py/issues/811
-        return request.path_url
-
-    def close(self):
-        self.pools.clear()


Reply via email to