http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/daemon.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/daemon.py b/aria/parser/utils/daemon.py
deleted file mode 100644
index b47eea1..0000000
--- a/aria/parser/utils/daemon.py
+++ /dev/null
@@ -1,70 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 'daemon'
-
-try:
-    import os
-    import signal
-    from time import sleep
-    from .console import puts, Colored
-    from daemon import DaemonContext
-    from daemon.pidfile import TimeoutPIDLockFile
-    from daemon.runner import is_pidfile_stale
-
-    def start_daemon(pidfile_path, log_path, acquire_timeout=5):
-        pidfile = TimeoutPIDLockFile(pidfile_path, 
acquire_timeout=acquire_timeout)
-        if is_pidfile_stale(pidfile):
-            pidfile.break_lock()
-        if pidfile.is_locked():
-            pid = pidfile.read_pid()
-            if pid is not None:
-                puts(Colored.red('Already running at pid: %d' % pid))
-            else:
-                puts(Colored.red('Already running'))
-            return None
-        logfile = open(log_path, 'w+t')
-        puts(Colored.blue('Starting'))
-        return DaemonContext(pidfile=pidfile, stdout=logfile, stderr=logfile)
-
-    def stop_daemon(pidfile_path, acquire_timeout=5):
-        pidfile = TimeoutPIDLockFile(pidfile_path, 
acquire_timeout=acquire_timeout)
-        pid = pidfile.read_pid()
-        if pid is not None:
-            puts(Colored.blue('Stopping pid: %d' % pid))
-            os.kill(pid, signal.SIGTERM)
-            while pidfile.is_locked():
-                puts(Colored.cyan('Waiting...'))
-                sleep(0.1)
-            puts(Colored.blue('Stopped'))
-        else:
-            puts(Colored.red('Not running'))
-
-    def status_daemon(pidfile_path, acquire_timeout=5):
-        pid = TimeoutPIDLockFile(pidfile_path, 
acquire_timeout=acquire_timeout).read_pid()
-        if pid is not None:
-            puts(Colored.blue('Running at pid: %d' % pid))
-        else:
-            puts(Colored.blue('Not running'))
-
-except ImportError:
-    def start_daemon(*args, **kwargs):
-        puts(Colored.red('Cannot start daemon in this environment'))
-
-    def stop_daemon(*args, **kwargs):
-        puts(Colored.red('Not running'))
-
-    def status_daemon(*args, **kwargs):
-        puts(Colored.blue('Not running'))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/exceptions.py b/aria/parser/utils/exceptions.py
deleted file mode 100644
index 0370bb3..0000000
--- a/aria/parser/utils/exceptions.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-import linecache
-
-from clint.textui import indent
-from .console import (puts, Colored)
-
-
-def print_exception(e, full=True, cause=False, traceback=None):
-    """
-    Prints the exception with nice colors and such.
-    """
-    def format_heading(e):
-        return '%s%s: %s' % (Colored.red('Caused by ') if cause else '', 
Colored.red(
-            e.__class__.__name__, bold=True), Colored.red(e))
-
-    puts(format_heading(e))
-    if full:
-        if cause:
-            if traceback:
-                print_traceback(traceback)
-        else:
-            print_traceback()
-    if hasattr(e, 'cause') and e.cause:
-        traceback = e.cause_traceback if hasattr(e, 'cause_traceback') else 
None
-        print_exception(e.cause, full=full, cause=True, traceback=traceback)
-
-def print_traceback(traceback=None):
-    """
-    Prints the traceback with nice colors and such.
-    """
-
-    if traceback is None:
-        _, _, traceback = sys.exc_info()
-    while traceback is not None:
-        frame = traceback.tb_frame
-        lineno = traceback.tb_lineno
-        code = frame.f_code
-        filename = code.co_filename
-        name = code.co_name
-        with indent(2):
-            puts('File "%s", line %s, in %s' % (Colored.blue(filename),
-                                                Colored.cyan(lineno),
-                                                Colored.cyan(name)))
-            linecache.checkcache(filename)
-            line = linecache.getline(filename, lineno, frame.f_globals)
-            if line:
-                with indent(2):
-                    puts(Colored.black(line.strip()))
-        traceback = traceback.tb_next

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/formatting.py b/aria/parser/utils/formatting.py
deleted file mode 100644
index 0a7b34d..0000000
--- a/aria/parser/utils/formatting.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 
'collections'
-
-import json
-from types import MethodType
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict
-from ruamel import yaml  # @UnresolvedImport
-
-from aria.parser.utils.collections import (FrozenList, FrozenDict, StrictList, 
StrictDict)
-
-# Add our types to ruamel.yaml (for round trips)
-yaml.representer.RoundTripRepresenter.add_representer(
-    FrozenList, yaml.representer.RoundTripRepresenter.represent_list)
-yaml.representer.RoundTripRepresenter.add_representer(
-    FrozenDict, yaml.representer.RoundTripRepresenter.represent_dict)
-yaml.representer.RoundTripRepresenter.add_representer(
-    StrictList, yaml.representer.RoundTripRepresenter.represent_list)
-yaml.representer.RoundTripRepresenter.add_representer(
-    StrictDict, yaml.representer.RoundTripRepresenter.represent_dict)
-
-# Without this, ruamel.yaml will output "!!omap" types, which is
-# technically correct but unnecessarily verbose for our uses
-yaml.representer.RoundTripRepresenter.add_representer(
-    OrderedDict, yaml.representer.RoundTripRepresenter.represent_dict)
-
-
-class JsonAsRawEncoder(json.JSONEncoder):
-    """
-    A :class:`JSONEncoder` that will use the :code:`as_raw` property of objects
-    if available.
-    """
-    def raw_encoder_default(self, obj):
-        try:
-            return iter(obj)
-        except TypeError:
-            if hasattr(obj, 'as_raw'):
-                return as_raw(obj)
-            return str(obj)
-        return super(JsonAsRawEncoder, self).default(obj)
-
-    def __init__(self, *args, **kwargs):
-        kwargs['default'] = self.raw_encoder_default
-        super(JsonAsRawEncoder, self).__init__(*args, **kwargs)
-
-
-class YamlAsRawDumper(yaml.dumper.RoundTripDumper):  # pylint: 
disable=too-many-ancestors
-    """
-    A :class:`RoundTripDumper` that will use the :code:`as_raw` property of 
objects
-    if available.
-    """
-
-    def represent_data(self, data):
-        if hasattr(data, 'as_raw'):
-            data = as_raw(data)
-        return super(YamlAsRawDumper, self).represent_data(data)
-
-
-def full_type_name(value):
-    """
-    The full class name of a type or object.
-    """
-
-    if not isinstance(value, type):
-        value = value.__class__
-    module = str(value.__module__)
-    name = str(value.__name__)
-    return name if module == '__builtin__' else '%s.%s' % (module, name)
-
-
-def safe_str(value):
-    """
-    Like :code:`str` coercion, but makes sure that Unicode strings are properly
-    encoded, and will never return None.
-    """
-
-    try:
-        return str(value)
-    except UnicodeEncodeError:
-        return unicode(value).encode('utf8')
-
-
-def safe_repr(value):
-    """
-    Like :code:`repr`, but calls :code:`as_raw` and :code:`as_agnostic` first.
-    """
-
-    return repr(as_agnostic(as_raw(value)))
-
-
-def string_list_as_string(strings):
-    """
-    Nice representation of a list of strings.
-    """
-
-    return ', '.join('"%s"' % safe_str(v) for v in strings)
-
-
-def as_raw(value):
-    """
-    Converts values using their :code:`as_raw` property, if it exists, 
recursively.
-    """
-
-    if hasattr(value, 'as_raw'):
-        value = value.as_raw
-        if isinstance(value, MethodType):
-            # Old-style Python classes don't support properties
-            value = value()
-    elif isinstance(value, list):
-        value = list(value)
-        for i, _ in enumerate(value):
-            value[i] = as_raw(value[i])
-    elif isinstance(value, dict):
-        value = dict(value)
-        for k, v in value.iteritems():
-            value[k] = as_raw(v)
-    return value
-
-
-def as_raw_list(value):
-    """
-    Assuming value is a list, converts its values using :code:`as_raw`.
-    """
-
-    if value is None:
-        return []
-    if isinstance(value, dict):
-        value = value.itervalues()
-    return [as_raw(v) for v in value]
-
-
-def as_raw_dict(value):
-    """
-    Assuming value is a dict, converts its values using :code:`as_raw`.
-    The keys are left as is.
-    """
-
-    if value is None:
-        return OrderedDict()
-    return OrderedDict((
-        (k, as_raw(v)) for k, v in value.iteritems()))
-
-
-def as_agnostic(value):
-    """
-    Converts subclasses of list and dict to standard lists and dicts, and 
Unicode strings
-    to non-Unicode if possible, recursively.
-
-    Useful for creating human-readable output of structures.
-    """
-
-    if isinstance(value, unicode):
-        try:
-            value = str(value)
-        except UnicodeEncodeError:
-            pass
-    elif isinstance(value, list):
-        value = list(value)
-    elif isinstance(value, dict):
-        value = dict(value)
-
-    if isinstance(value, list):
-        for i, _ in enumerate(value):
-            value[i] = as_agnostic(value[i])
-    elif isinstance(value, dict):
-        for k, v in value.iteritems():
-            value[k] = as_agnostic(v)
-
-    return value
-
-
-def json_dumps(value, indent=2):
-    """
-    JSON dumps that supports Unicode and the :code:`as_raw` property of objects
-    if available.
-    """
-
-    return json.dumps(value, indent=indent, ensure_ascii=False, 
cls=JsonAsRawEncoder)
-
-
-def yaml_dumps(value, indent=2):
-    """
-    YAML dumps that supports Unicode and the :code:`as_raw` property of objects
-    if available.
-    """
-
-    return yaml.dump(value, indent=indent, allow_unicode=True, 
Dumper=YamlAsRawDumper)
-
-
-def yaml_loads(value):
-    return yaml.load(value, Loader=yaml.SafeLoader)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/imports.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/imports.py b/aria/parser/utils/imports.py
deleted file mode 100644
index 8f97156..0000000
--- a/aria/parser/utils/imports.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-def import_fullname(name, paths=None):
-    """
-    Imports a variable or class based on a full name, optionally searching for 
it in the paths.
-    """
-    paths = paths or []
-    if name is None:
-        return None
-
-    def do_import(name):
-        if name and ('.' in name):
-            module_name, name = name.rsplit('.', 1)
-            return getattr(__import__(module_name, fromlist=[name], level=0), 
name)
-        else:
-            raise ImportError('import not found: %s' % name)
-
-    try:
-        return do_import(name)
-    except ImportError:
-        for path in paths:
-            try:
-                return do_import('%s.%s' % (path, name))
-            except Exception as e:
-                raise ImportError('cannot import %s, because %s' % (name, e))
-
-    raise ImportError('import not found: %s' % name)
-
-def import_modules(name):
-    """
-    Imports a module and all its sub-modules, recursively.
-    Relies on modules defining a 'MODULES' attribute listing their sub-module 
names.
-    """
-
-    module = __import__(name, fromlist=['MODULES'], level=0)
-    if hasattr(module, 'MODULES'):
-        for module_ in module.MODULES:
-            import_modules('%s.%s' % (name, module_))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/openclose.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/openclose.py b/aria/parser/utils/openclose.py
deleted file mode 100644
index 19740eb..0000000
--- a/aria/parser/utils/openclose.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-class OpenClose(object):
-    """
-    Wraps an object that has open() and close() methods to support the "with" 
keyword.
-    """
-
-    def __init__(self, wrapped):
-        self.wrapped = wrapped
-
-    def __enter__(self):
-        if hasattr(self.wrapped, 'open'):
-            self.wrapped.open()
-        return self.wrapped
-
-    def __exit__(self, the_type, value, traceback):
-        if hasattr(self.wrapped, 'close'):
-            self.wrapped.close()
-        return False

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/rest_client.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/rest_client.py b/aria/parser/utils/rest_client.py
deleted file mode 100644
index 905e372..0000000
--- a/aria/parser/utils/rest_client.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import json
-import urllib2
-
-def call_rest(url, payload=None, with_payload_method='PUT'):
-    """
-    REST call with JSON decoding of the response and JSON payloads.
-    """
-
-    if payload:
-        if not isinstance(payload, basestring):
-            payload = json.dumps(payload)
-        # PUT or POST
-        response = urllib2.urlopen(MethodRequest(
-            url,
-            payload,
-            {'Content-Type': 'application/json'}, method=with_payload_method))
-    else:
-        # GET
-        response = urllib2.urlopen(url)
-    response = response.read().decode()
-    return json.loads(response)
-
-#
-# Utils
-#
-
-class MethodRequest(urllib2.Request):
-    """
-    Workaround to support all HTTP methods.
-
-    From `here <https://gist.github.com/logic/2715756>`__.
-    """
-
-    def __init__(self, *args, **kwargs):
-        if 'method' in kwargs:
-            self._method = kwargs['method']
-            del kwargs['method']
-        else:
-            self._method = None
-        urllib2.Request.__init__(self, *args, **kwargs)
-
-    def get_method(self, *args, **kwargs):
-        return self._method if self._method is not None else 
urllib2.Request.get_method(
-            self, *args, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/rest_server.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/rest_server.py b/aria/parser/utils/rest_server.py
deleted file mode 100644
index 17823f1..0000000
--- a/aria/parser/utils/rest_server.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 
'collections'
-
-import os
-import re
-import shutil
-import json
-import sys
-import BaseHTTPServer
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict
-from ..utils import (puts, Colored)
-
-class RestServer(object):
-    """
-    Straightforward REST server.
-
-    Supports custom handling of all HTTP verbs, with special (optional) 
support for JSON, as well
-    as serving straightforward static files via GET.
-
-    Properties:
-
-    * :code:`configuration`: An optional configuration object
-    * :code:`port`: HTTP server port
-    * :code:`routes`: :class:`OrderedDict` of routes (see below)
-    * :code:`static_root`: Root directory for static files
-    * :code:`json_encoder`: :class:`JSONEncoder` for responses
-    * :code:`json_decoder`: :class:`JSONDecoder` for requests
-    * :code:`unicode`: True to support Unicode
-
-    The route keys are regular expressions for matching the path. They are 
checked in order, which
-    is why it's important to use :class:`OrderedDict`.
-
-    The route values are dicts with the following optional fields:
-
-    * :code:`GET`: Function to handle GET for this route
-    * :code:`PUT`: Function to handle PUT for this route
-    * :code:`POST`: Function to handle POST for this route
-    * :code:`DELETE`: Function to handle DELETE for this route
-    * :code:`file`: Attach a static file to this route; it is the path to
-            the file to return relative to :code:`static_root` (if 
:code:`file` is
-            set then :code:`GET`/:code:`PUT`/:code:`POST`/:code:`DELETE` are 
ignored)
-    * :code:`media_type`: Media type to set for responses to this
-            route (except error message, which will be in "text/plan")
-
-    The :code:`GET`/:code:`PUT`/:code:`POST`/:code:`DELETE` handler functions 
all receive a single
-    argument: an instance of :class:`RestRequestHandler`.
-
-    If you return None, then a 404 error will be generated. Otherwise, it will 
be a 200 response
-    with the return value will be written to it. If the :code:`media_type` for 
the route was set to
-    "application/json", then the return value will first be encoded into JSON 
using the configured
-    :code:`json_encoder`.
-
-    If you want to write the response yourself, set :code:`handled=True` on the
-    :class:`RestRequestHandler`, which will cause the return value to be 
ignored (you won't have to
-    return anything). If all you want to do is send an error message, then use
-    :code:`send_plain_text_response`.
-
-    If you raise an (uncaught) exception, then a 500 error will be generated 
with the exception
-    message.
-
-    To get the payload (for :code:`PUT`/:code:`POST`) use :code:`payload` on 
the
-    :class:`RestRequestHandler` for plain text, or :code:`json_payload` to use 
the configured
-    :code:`json_decoder`. Note that it's up to you to check for JSON decoding 
exceptions and return
-    an appropriate 400 error message.
-    """
-
-    def __init__(self):
-        self.configuration = None
-        self.port = 8080
-        self.routes = OrderedDict()
-        self.static_root = '.'
-        self.json_encoder = json.JSONEncoder(ensure_ascii=False, 
separators=(',', ':'))
-        self.json_decoder = json.JSONDecoder(object_pairs_hook=OrderedDict)
-        self.unicode = True
-
-    def start(self, daemon=False):
-        """
-        Starts the REST server.
-        """
-
-        if self.unicode:
-            # Fixes issues with decoding HTTP responses
-            # (Not such a great solution! But there doesn't seem to be a 
better way)
-            reload(sys)
-            sys.setdefaultencoding('utf8')  # @UndefinedVariable
-
-        http_server = BaseHTTPServer.HTTPServer(('', self.port), 
rest_request_handler(self))
-        if daemon:
-            print 'Running HTTP server daemon at port %d' % self.port
-        else:
-            puts(Colored.red('Running HTTP server at port %d, use CTRL-C to 
exit' % self.port))
-        try:
-            http_server.serve_forever()
-        except KeyboardInterrupt:
-            pass
-        puts(Colored.red('Stopping HTTP server'))
-        http_server.server_close()
-
-class RestRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
-    """
-    Handler for :class:`RestServer`.
-    """
-
-    def __init__(self, rest_server, *args, **kwargs):
-        self.rest_server = rest_server
-        self.handled = False
-        self.matched_re = None
-        self.matched_route = None
-        # Old-style Python classes don't support super
-        BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
-
-    @property
-    def content_length(self):
-        return int(self.headers.getheader('content-length', 0))
-
-    @property
-    def payload(self):
-        return self.rfile.read(self.content_length)
-
-    @property
-    def json_payload(self):
-        return self.rest_server.json_decoder.decode(self.payload)
-
-    def match_route(self):
-        for path_re, route in self.rest_server.routes.iteritems():
-            if re.match(path_re, self.path):
-                return path_re, route
-        return None, None
-
-    def send_plain_text_response(self, status, content):
-        self.send_response(status)
-        self.send_header('Content-type', 'text/plain')
-        self.end_headers()
-        self.wfile.write(content)
-        self.handled = True
-
-    def send_content_type(self, route=None):
-        if route is None:
-            _, route = self.match_route()
-        media_type = route.get('media_type')
-        if media_type is not None:
-            self.send_header('Content-type', media_type)
-        return media_type
-
-    def _handle_file(self, method):
-        if method != 'GET':
-            self.send_plain_text_response(405, '%s is not supported\n' % 
method)
-            return
-
-        try:
-            matched_route_file = open(os.path.join(
-                self.rest_server.static_root,
-                self.matched_route['file']))
-            try:
-                self.send_response(200)
-                self.send_content_type(self.matched_route)
-                self.end_headers()
-                shutil.copyfileobj(matched_route_file, self.wfile)
-            finally:
-                matched_route_file.close()
-        except IOError:
-            self.send_plain_text_response(404, 'Not found\n')
-        return
-
-    def handle_method(self, method):
-        # pylint: disable=too-many-return-statements
-        self.matched_re, self.matched_route = self.match_route()
-
-        if self.matched_route is None:
-            self.send_plain_text_response(404, 'Not found\n')
-            return
-
-        if method == 'HEAD':
-            self.send_response(200)
-            self.send_content_type(self.matched_route)
-            self.end_headers()
-            return
-
-        if 'file' in self.matched_route:
-            self._handle_file(method)
-            return
-
-        if method not in self.matched_route:
-            self.send_plain_text_response(405, '%s is not supported\n' % 
method)
-            return
-
-        try:
-            content = self.matched_route[method](self)
-        except Exception as e:
-            self.send_plain_text_response(500, 'Internal error: %s\n' % e)
-            return
-
-        if self.handled:
-            return
-
-        if content is None:
-            self.send_plain_text_response(404, 'Not found\n')
-            return
-
-        self.send_response(200)
-        media_type = self.send_content_type(self.matched_route)
-        self.end_headers()
-
-        if method == 'DELETE':
-            # No content for DELETE
-            return
-
-        if media_type == 'application/json':
-            self.wfile.write(self.rest_server.json_encoder.encode(content))
-        else:
-            self.wfile.write(content)
-
-    # BaseHTTPRequestHandler
-    # pylint: disable=invalid-name
-    def do_HEAD(self):
-        self.handle_method('HEAD')
-
-    def do_GET(self):
-        self.handle_method('GET')
-
-    def do_POST(self):
-        self.handle_method('POST')
-
-    def do_PUT(self):
-        self.handle_method('PUT')
-
-    def do_DELETE(self):
-        self.handle_method('DELETE')
-
-#
-# Utils
-#
-
-def rest_request_handler(rest_server):
-    return lambda *args, **kwargs: RestRequestHandler(rest_server, *args, 
**kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/threading.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/threading.py b/aria/parser/utils/threading.py
deleted file mode 100644
index 575d011..0000000
--- a/aria/parser/utils/threading.py
+++ /dev/null
@@ -1,252 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import absolute_import  # so we can import standard 'threading'
-
-import itertools
-import multiprocessing
-from threading import (Thread, Lock)
-from Queue import (Queue, Full, Empty)
-
-from .exceptions import print_exception
-
-class ExecutorException(Exception):
-    pass
-
-class DaemonThread(Thread):
-    def __init__(self, *args, **kwargs):
-        super(DaemonThread, self).__init__(*args, **kwargs)
-        self.daemon = True
-
-    def run(self):
-        """
-        We're overriding `Thread.run` in order to avoid annoying (but 
harmless) error
-        messages during shutdown. The problem is that CPython nullifies the
-        global state _before_ shutting down daemon threads, so that exceptions
-        might happen, and then `Thread.__bootstrap_inner` prints them out.
-
-        Our solution is to swallow these exceptions here.
-
-        The side effect is that uncaught exceptions in our own thread code 
will _not_
-        be printed out as usual, so it's our responsibility to catch them in 
our
-        code.
-        """
-
-        try:
-            super(DaemonThread, self).run()
-        except SystemExit as e:
-            # This exception should be bubbled up
-            raise e
-        except BaseException:
-            # Exceptions might occur in daemon threads during interpreter 
shutdown
-            pass
-
-# https://gist.github.com/tliron/81dd915166b0bfc64be08b4f8e22c835
-class FixedThreadPoolExecutor(object):
-    """
-    Executes tasks in a fixed thread pool.
-
-    Makes sure to gather all returned results and thrown exceptions in one 
place, in order of task
-    submission.
-
-    Example::
-
-        def sum(arg1, arg2):
-            return arg1 + arg2
-
-        executor = FixedThreadPoolExecutor(10)
-        try:
-            for value in range(100):
-                executor.submit(sum, value, value)
-            executor.drain()
-        except:
-            executor.close()
-        executor.raise_first()
-        print executor.returns
-
-    You can also use it with the Python "with" keyword, in which case you 
don't need to call "close"
-    explicitly::
-
-        with FixedThreadPoolExecutor(10) as executor:
-            for value in range(100):
-                executor.submit(sum, value, value)
-            executor.drain()
-            executor.raise_first()
-            print executor.returns
-    """
-
-    _CYANIDE = object()  # Special task marker used to kill worker threads.
-
-    def __init__(self,
-                 size=multiprocessing.cpu_count() * 2 + 1,
-                 timeout=None,
-                 print_exceptions=False):
-        """
-        :param size: Number of threads in the pool (fixed).
-        :param timeout: Timeout in seconds for all
-               blocking operations. (Defaults to none, meaning no timeout)
-        :param print_exceptions: Set to true in order to
-               print exceptions from tasks. (Defaults to false)
-        """
-
-        self.size = size
-        self.timeout = timeout
-        self.print_exceptions = print_exceptions
-
-        self._tasks = Queue()
-        self._returns = {}
-        self._exceptions = {}
-        self._id_creator = itertools.count()
-        self._lock = Lock() # for console output
-
-        self._workers = []
-        for index in range(size):
-            worker = DaemonThread(
-                name='%s%d' % (self.__class__.__name__, index),
-                target=self._thread_worker)
-            worker.start()
-            self._workers.append(worker)
-
-    def submit(self, func, *args, **kwargs):
-        """
-        Submit a task for execution.
-
-        The task will be called ASAP on the next available worker thread in 
the pool.
-
-        Will raise an :class:`ExecutorException` exception if cannot be 
submitted.
-        """
-
-        try:
-            self._tasks.put((self._id_creator.next(), func, args, kwargs), 
timeout=self.timeout)
-        except Full:
-            raise ExecutorException('cannot submit task: queue is full')
-
-    def close(self):
-        """
-        Blocks until all current tasks finish execution and all worker threads 
are dead.
-
-        You cannot submit tasks anymore after calling this.
-
-        This is called automatically upon exit if you are using the "with" 
keyword.
-        """
-
-        self.drain()
-        while self.is_alive:
-            try:
-                self._tasks.put(self._CYANIDE, timeout=self.timeout)
-            except Full:
-                raise ExecutorException('cannot close executor: a thread seems 
to be hanging')
-        self._workers = None
-
-    def drain(self):
-        """
-        Blocks until all current tasks finish execution, but leaves the worker 
threads alive.
-        """
-
-        self._tasks.join()  # oddly, the API does not support a timeout 
parameter
-
-    @property
-    def is_alive(self):
-        """
-        True if any of the worker threads are alive.
-        """
-
-        for worker in self._workers:
-            if worker.is_alive():
-                return True
-        return False
-
-    @property
-    def returns(self):
-        """
-        The returned values from all tasks, in order of submission.
-        """
-
-        return [self._returns[k] for k in sorted(self._returns)]
-
-    @property
-    def exceptions(self):
-        """
-        The raised exceptions from all tasks, in order of submission.
-        """
-
-        return [self._exceptions[k] for k in sorted(self._exceptions)]
-
-    def raise_first(self):
-        """
-        If exceptions were thrown by any task, then the first one will be 
raised.
-
-        This is rather arbitrary: proper handling would involve iterating all 
the
-        exceptions. However, if you want to use the "raise" mechanism, you are
-        limited to raising only one of them.
-        """
-
-        exceptions = self.exceptions
-        if exceptions:
-            raise exceptions[0]
-
-    def _thread_worker(self):
-        while True:
-            if not self._execute_next_task():
-                break
-
-    def _execute_next_task(self):
-        try:
-            task = self._tasks.get(timeout=self.timeout)
-        except Empty:
-            # Happens if timeout is reached
-            return True
-        if task == self._CYANIDE:
-            # Time to die :(
-            return False
-        self._execute_task(*task)
-        return True
-
-    def _execute_task(self, task_id, func, args, kwargs):
-        try:
-            result = func(*args, **kwargs)
-            self._returns[task_id] = result
-        except Exception as e:
-            self._exceptions[task_id] = e
-            if self.print_exceptions:
-                with self._lock:
-                    print_exception(e)
-        self._tasks.task_done()
-
-    def __enter__(self):
-        return self
-
-    def __exit__(self, the_type, value, traceback):
-        self.close()
-        return False
-
-class LockedList(list):
-    """
-    A list that supports the "with" keyword with a built-in lock.
-
-    Though Python lists are thread-safe in that they will not raise exceptions
-    during concurrent access, they do not guarantee atomicity. This class will
-    let you gain atomicity when needed.
-    """
-
-    def __init__(self, *args, **kwargs):
-        super(LockedList, self).__init__(*args, **kwargs)
-        self.lock = Lock()
-
-    def __enter__(self):
-        return self.lock.__enter__()
-
-    def __exit__(self, the_type, value, traceback):
-        return self.lock.__exit__(the_type, value, traceback)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/utils/uris.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/uris.py b/aria/parser/utils/uris.py
deleted file mode 100644
index 1686517..0000000
--- a/aria/parser/utils/uris.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import urlparse
-
-def as_file(uri):
-    """
-    If the URI is a file (either the :code:`file` scheme or no scheme), then 
returns the absolute
-    path. Otherwise, returns None.
-    """
-
-    url = urlparse.urlparse(uri)
-    if (not url.scheme) or (url.scheme == 'file'):
-        return os.path.abspath(url.path)
-    return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/validation/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/validation/context.py 
b/aria/parser/validation/context.py
index e0355e3..d81f1cd 100644
--- a/aria/parser/validation/context.py
+++ b/aria/parser/validation/context.py
@@ -14,7 +14,12 @@
 # limitations under the License.
 
 from .issue import Issue
-from ..utils import (LockedList, FrozenList, print_exception, puts, Colored, 
indent, as_raw)
+from ...utils.threading import LockedList
+from ...utils.collections import FrozenList
+from ...utils.exceptions import print_exception
+from ...utils.console import puts, Colored, indent
+from ...utils.formatting import as_raw
+
 
 class ValidationContext(object):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/parser/validation/issue.py
----------------------------------------------------------------------
diff --git a/aria/parser/validation/issue.py b/aria/parser/validation/issue.py
index 7c73b1c..f001efc 100644
--- a/aria/parser/validation/issue.py
+++ b/aria/parser/validation/issue.py
@@ -15,11 +15,9 @@
 
 from __future__ import absolute_import  # so we can import standard 
'collections'
 
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict
-from ..utils import full_type_name
+from ...utils.collections import OrderedDict
+from ...utils.formatting import full_type_name
+
 
 class Issue(object):
     PLATFORM = 0

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/storage/structures.py
----------------------------------------------------------------------
diff --git a/aria/storage/structures.py b/aria/storage/structures.py
index a833d99..b02366e 100644
--- a/aria/storage/structures.py
+++ b/aria/storage/structures.py
@@ -32,7 +32,7 @@ from uuid import uuid4
 
 from .exceptions import StorageError
 from ..logger import LoggerMixin
-from ..tools.validation import ValidatorMixin
+from ..utils.validation import ValidatorMixin
 
 __all__ = (
     'uuid_generator',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/__init__.py
----------------------------------------------------------------------
diff --git a/aria/tools/__init__.py b/aria/tools/__init__.py
deleted file mode 100644
index 320b445..0000000
--- a/aria/tools/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from .lru_cache import lru_cache
-from .module import load_attribute
-from .plugin import plugin_installer
-from .process import Process
-from .validation import validate_function_arguments, ValidatorMixin

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/application.py
----------------------------------------------------------------------
diff --git a/aria/tools/application.py b/aria/tools/application.py
deleted file mode 100644
index b1a7fcc..0000000
--- a/aria/tools/application.py
+++ /dev/null
@@ -1,294 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Convenience storage related tools.
-# TODO rename module name
-"""
-
-import json
-import os
-import shutil
-import tarfile
-import tempfile
-from datetime import datetime
-
-from aria.storage.exceptions import StorageError
-from aria.logger import LoggerMixin
-
-
-class StorageManager(LoggerMixin):
-    """
-    Convenience wrapper to simplify work with the lower level storage mechanism
-    """
-
-    def __init__(
-            self,
-            model_storage,
-            resource_storage,
-            blueprint_path,
-            blueprint_id,
-            blueprint_plan,
-            deployment_id,
-            deployment_plan,
-            **kwargs):
-        super(StorageManager, self).__init__(**kwargs)
-        self.model_storage = model_storage
-        self.resource_storage = resource_storage
-        self.blueprint_path = blueprint_path
-        self.blueprint_id = blueprint_id
-        self.blueprint_plan = blueprint_plan
-        self.deployment_id = deployment_id
-        self.deployment_plan = deployment_plan
-
-    @classmethod
-    def from_deployment(
-            cls,
-            model_storage,
-            resource_storage,
-            deployment_id,
-            deployment_plan):
-        """
-        Create a StorageManager from a deployment
-        """
-        return cls(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            deployment_id=deployment_id,
-            deployment_plan=deployment_plan,
-            blueprint_path=None,
-            blueprint_plan=None,
-            blueprint_id=None
-        )
-
-    @classmethod
-    def from_blueprint(
-            cls,
-            model_storage,
-            resource_storage,
-            blueprint_path,
-            blueprint_id,
-            blueprint_plan):
-        """
-        Create a StorageManager from a blueprint
-        """
-        return cls(
-            model_storage=model_storage,
-            resource_storage=resource_storage,
-            blueprint_path=blueprint_path,
-            blueprint_plan=blueprint_plan,
-            blueprint_id=blueprint_id,
-            deployment_id=None,
-            deployment_plan=None)
-
-    def create_blueprint_storage(self, source, main_file_name=None):
-        """
-        create blueprint model & resource
-        """
-        assert self.blueprint_path and self.blueprint_id
-        assert hasattr(self.resource_storage, 'blueprint')
-        assert hasattr(self.model_storage, 'blueprint')
-
-        self.logger.debug('creating blueprint resource storage entry')
-        self.resource_storage.blueprint.upload(
-            entry_id=self.blueprint_id,
-            source=os.path.dirname(source))
-        self.logger.debug('created blueprint resource storage entry')
-
-        self.logger.debug('creating blueprint model storage entry')
-        now = datetime.utcnow()
-        blueprint = self.model_storage.blueprint.model_cls(
-            plan=self.blueprint_plan,
-            id=self.blueprint_id,
-            description=self.blueprint_plan.get('description'),
-            created_at=now,
-            updated_at=now,
-            main_file_name=main_file_name,
-        )
-        self.model_storage.blueprint.store(blueprint)
-        self.logger.debug('created blueprint model storage entry')
-
-    def create_nodes_storage(self):
-        """
-        create nodes model
-        """
-        assert self.blueprint_path and self.blueprint_id
-        assert hasattr(self.model_storage, 'node')
-        assert hasattr(self.model_storage, 'relationship')
-
-        for node in self.blueprint_plan['nodes']:
-            node_copy = node.copy()
-            for field in ('name',
-                          'deployment_plugins_to_install',
-                          'interfaces',
-                          'instances'):
-                node_copy.pop(field)
-            scalable = node_copy.pop('capabilities')['scalable']['properties']
-            for index, relationship in enumerate(node_copy['relationships']):
-                relationship = 
self.model_storage.relationship.model_cls(**relationship)
-                self.model_storage.relationship.store(relationship)
-                node_copy['relationships'][index] = relationship
-
-            node_copy = self.model_storage.node.model_cls(
-                blueprint_id=self.blueprint_id,
-                planned_number_of_instances=scalable['current_instances'],
-                deploy_number_of_instances=scalable['default_instances'],
-                min_number_of_instances=scalable['min_instances'],
-                max_number_of_instances=scalable['max_instances'],
-                number_of_instances=scalable['current_instances'],
-                **node_copy)
-            self.model_storage.node.store(node_copy)
-
-    def create_deployment_storage(self):
-        """
-        create deployment model & resource
-        """
-        assert self.deployment_id and self.deployment_plan
-
-        assert hasattr(self.resource_storage, 'blueprint')
-        assert hasattr(self.resource_storage, 'deployment')
-        assert hasattr(self.model_storage, 'deployment')
-
-        self.logger.debug('creating deployment resource storage entry')
-        temp_dir = tempfile.mkdtemp()
-        try:
-            self.resource_storage.blueprint.download(
-                entry_id=self.blueprint_id,
-                destination=temp_dir)
-            self.resource_storage.deployment.upload(
-                entry_id=self.deployment_id,
-                source=temp_dir)
-        finally:
-            shutil.rmtree(temp_dir, ignore_errors=True)
-        self.logger.debug('created deployment resource storage entry')
-
-        self.logger.debug('creating deployment model storage entry')
-        now = datetime.utcnow()
-        deployment = self.model_storage.deployment.model_cls(
-            id=self.deployment_id,
-            blueprint_id=self.blueprint_id,
-            description=self.deployment_plan['description'],
-            workflows=self.deployment_plan['workflows'],
-            inputs=self.deployment_plan['inputs'],
-            policy_types=self.deployment_plan['policy_types'],
-            policy_triggers=self.deployment_plan['policy_triggers'],
-            groups=self.deployment_plan['groups'],
-            scaling_groups=self.deployment_plan['scaling_groups'],
-            outputs=self.deployment_plan['outputs'],
-            created_at=now,
-            updated_at=now
-        )
-        self.model_storage.deployment.store(deployment)
-        self.logger.debug('created deployment model storage entry')
-
-    def create_node_instances_storage(self):
-        """
-        create node_instances model
-        """
-        assert self.deployment_id and self.deployment_plan
-        assert hasattr(self.model_storage, 'node_instance')
-        assert hasattr(self.model_storage, 'relationship_instance')
-
-        self.logger.debug('creating node-instances model storage entries')
-        for node_instance in self.deployment_plan['node_instances']:
-            node_model = self.model_storage.node.get(node_instance['node_id'])
-            relationship_instances = []
-
-            for index, relationship_instance in 
enumerate(node_instance['relationships']):
-                relationship_instance_model = 
self.model_storage.relationship_instance.model_cls(
-                    relationship=node_model.relationships[index],
-                    target_name=relationship_instance['target_name'],
-                    type=relationship_instance['type'],
-                    target_id=relationship_instance['target_id'])
-                relationship_instances.append(relationship_instance_model)
-                
self.model_storage.relationship_instance.store(relationship_instance_model)
-
-            node_instance_model = self.model_storage.node_instance.model_cls(
-                node=node_model,
-                id=node_instance['id'],
-                runtime_properties={},
-                state=self.model_storage.node_instance.model_cls.UNINITIALIZED,
-                deployment_id=self.deployment_id,
-                version='1.0',
-                relationship_instances=relationship_instances)
-
-            self.model_storage.node_instance.store(node_instance_model)
-        self.logger.debug('created node-instances model storage entries')
-
-    def create_plugin_storage(self, plugin_id, source):
-        """
-        create plugin model & resource
-        """
-        assert hasattr(self.model_storage, 'plugin')
-        assert hasattr(self.resource_storage, 'plugin')
-
-        self.logger.debug('creating plugin resource storage entry')
-        self.resource_storage.plugin.upload(entry_id=plugin_id, source=source)
-        self.logger.debug('created plugin resource storage entry')
-
-        self.logger.debug('creating plugin model storage entry')
-        plugin = _load_plugin_from_archive(source)
-        build_props = plugin.get('build_server_os_properties')
-        now = datetime.utcnow()
-
-        plugin = self.model_storage.plugin.model_cls(
-            id=plugin_id,
-            package_name=plugin.get('package_name'),
-            package_version=plugin.get('package_version'),
-            archive_name=plugin.get('archive_name'),
-            package_source=plugin.get('package_source'),
-            supported_platform=plugin.get('supported_platform'),
-            distribution=build_props.get('distribution'),
-            distribution_version=build_props.get('distribution_version'),
-            distribution_release=build_props.get('distribution_release'),
-            wheels=plugin.get('wheels'),
-            excluded_wheels=plugin.get('excluded_wheels'),
-            supported_py_versions=plugin.get('supported_python_versions'),
-            uploaded_at=now
-        )
-        self.model_storage.plugin.store(plugin)
-        self.logger.debug('created plugin model storage entry')
-
-
-def _load_plugin_from_archive(tar_source):
-    if not tarfile.is_tarfile(tar_source):
-        # TODO: go over the exceptions
-        raise StorageError(
-            'the provided tar archive can not be read.')
-
-    with tarfile.open(tar_source) as tar:
-        tar_members = tar.getmembers()
-        # a wheel plugin will contain exactly one sub directory
-        if not tar_members:
-            raise StorageError(
-                'archive file structure malformed. expecting exactly one '
-                'sub directory; got none.')
-        package_json_path = os.path.join(tar_members[0].name,
-                                         'package.json')
-        try:
-            package_member = tar.getmember(package_json_path)
-        except KeyError:
-            raise StorageError("'package.json' was not found under {0}"
-                               .format(package_json_path))
-        try:
-            package_json = tar.extractfile(package_member)
-        except tarfile.ExtractError as e:
-            raise StorageError(str(e))
-        try:
-            return json.load(package_json)
-        except ValueError as e:
-            raise StorageError("'package.json' is not a valid json: "
-                               "{json_str}. error is {error}"
-                               .format(json_str=package_json.read(), 
error=str(e)))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/lru_cache.py
----------------------------------------------------------------------
diff --git a/aria/tools/lru_cache.py b/aria/tools/lru_cache.py
deleted file mode 100755
index bb39b90..0000000
--- a/aria/tools/lru_cache.py
+++ /dev/null
@@ -1,136 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Function lru_cache implementation for python 2.7
-(In Python 3 this decorator is in functools)
-"""
-
-from time import time
-from functools import partial, wraps
-from itertools import imap
-
-try:
-    from collections import OrderedDict
-except ImportError:
-    from ordereddict import OrderedDict
-
-
-class _LRUCache(object):
-
-    def __init__(self, input_func, max_size, timeout):
-        self._input_func = input_func
-        self._max_size = max_size
-        self._timeout = timeout
-
-        # This will store the cache for this function, format:
-        # {caller1 : [OrderedDict1, last_refresh_time1],
-        #  caller2 : [OrderedDict2, last_refresh_time2]}.
-        # In case of an instance method -
-        # the caller is the instance,
-        # in case called from a regular function - the caller is None.
-        self._caches_dict = {}
-
-    @staticmethod
-    def _prepare_key(*args, **kwargs):
-        kwargs_key = "".join(
-            imap(lambda x: str(x) + str(type(kwargs[x])) + str(kwargs[x]),
-                 sorted(kwargs)))
-        return "".join(imap(lambda x: str(type(x)) + str(x), args)) + 
kwargs_key
-
-    def cache_clear(self, caller=None):
-        """
-        Clears the cache, optionally, only for a specific caller
-        """
-        # Remove the cache for the caller, only if exists:
-        if caller in self._caches_dict:
-            del self._caches_dict[caller]
-            self._caches_dict[caller] = (OrderedDict(), time())
-
-    def __get__(self, obj, _):
-        """ Called for instance methods """
-        return_func = partial(self._cache_wrapper, obj)
-        return_func.cache_clear = partial(self.cache_clear, obj)
-        # Return the wrapped function and wraps it to maintain the docstring
-        # and the name of the original function:
-        return wraps(self._input_func)(return_func)
-
-    def __call__(self, *args, **kwargs):
-        """ Called for regular functions """
-        return self._cache_wrapper(None, *args, **kwargs)
-    # Set the cache_clear function in the __call__ operator:
-    __call__.cache_clear = cache_clear
-
-    def _cache_wrapper(self, caller, *args, **kwargs):
-        # Create a unique key including the types
-        # (in order to differentiate between 1 and '1'):
-        key = self._prepare_key(*args, **kwargs)
-
-        # Check if caller exists, if not create one:
-        if caller not in self._caches_dict:
-            self._caches_dict[caller] = (OrderedDict(), time())
-        else:
-            # Validate in case the refresh time has passed:
-            if self._timeout is not None and time() - 
self._caches_dict[caller][1] > self._timeout:
-                self.cache_clear(caller)
-
-        # Check if the key exists, if so - return it:
-        cur_caller_cache_dict = self._caches_dict[caller][0]
-        if key in cur_caller_cache_dict:
-            return cur_caller_cache_dict[key]
-
-        # Validate we didn't exceed the max_size:
-        if len(cur_caller_cache_dict) >= self._max_size:
-            # Delete the first item in the dict:
-            cur_caller_cache_dict.popitem(False)
-
-        # Call the function and store the data in the cache
-        # (call it with the caller in case it's an instance function - Ternary 
condition):
-        cur_caller_cache_dict[key] = self._input_func(caller, *args, **kwargs) 
\
-            if caller is not None else self._input_func(*args, **kwargs)
-        return cur_caller_cache_dict[key]
-
-
-def lru_cache(maxsize=255, timeout=None):
-    """
-    lru_cache(maxsize = 255, timeout = None)
-    Returns a decorator which returns an instance (a descriptor).
-
-    Purpose:
-        This decorator factory will wrap a function / instance method,
-        and will supply a caching mechanism to the function.
-        For every given input params it will store the result in a queue of 
maxsize size,
-        and will return a cached ret_val if the same parameters are passed.
-
-    Notes:
-        * If an instance method is wrapped,
-          each instance will have it's own cache and it's own timeout.
-        * The wrapped function will have a cache_clear variable inserted into 
it,
-          and may be called to clear it's specific cache.
-        * The wrapped function will maintain the original function's docstring 
and name (wraps)
-        * The type of the wrapped function will no longer be that of a 
function,
-          but either an instance of _LRU_Cache_class or a functool.partial 
type.
-
-    :param maxsize: The cache size limit,
-                    Anything added above that will delete the first values 
enterred (FIFO).
-                    This size is per instance, thus 1000 instances with 
maxsize of 255,
-                    will contain at max 255K elements.
-    :type maxsize: int
-    :param timeout: Every n seconds the cache is deleted, regardless of usage.
-                    If None - cache will never be refreshed.
-    :type: timeout: int, float, None
-
-    """
-    return lambda input_func: wraps(input_func)(_LRUCache(input_func, maxsize, 
timeout))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/module.py
----------------------------------------------------------------------
diff --git a/aria/tools/module.py b/aria/tools/module.py
deleted file mode 100644
index 3afc0ff..0000000
--- a/aria/tools/module.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Utility methods for dynamically loading python code
-"""
-
-import importlib
-
-
-def load_attribute(attribute_path):
-    """
-    Dynamically load an attribute based on the path to it.
-    e.g. some_package.some_module.some_attribute, will load the some_attribute 
from the
-    some_package.some_module module
-    """
-    module_name, attribute_name = attribute_path.rsplit('.', 1)
-    try:
-        module = importlib.import_module(module_name)
-        return getattr(module, attribute_name)
-    except ImportError:
-        # TODO: handle
-        raise
-    except AttributeError:
-        # TODO: handle
-        raise

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/plugin.py
----------------------------------------------------------------------
diff --git a/aria/tools/plugin.py b/aria/tools/plugin.py
deleted file mode 100644
index bb2b974..0000000
--- a/aria/tools/plugin.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Contains utility methods that enable dynamic python code loading
-# TODO: merge with tools.module
-"""
-
-import os
-from importlib import import_module
-
-
-def plugin_installer(path, plugin_suffix, package=None, callback=None):
-    """
-    Load each module under ``path`` that ends with ``plugin_suffix``. If 
``callback`` is supplied,
-    call it with each loaded module.
-    """
-    assert callback is None or callable(callback)
-    plugin_suffix = '{0}.py'.format(plugin_suffix)
-
-    for file_name in os.listdir(path):
-        if not file_name.endswith(plugin_suffix):
-            continue
-        module_name = '{0}.{1}'.format(package, file_name[:-3]) if package 
else file_name[:-3]
-        module = import_module(module_name)
-        if callback:
-            callback(module)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/process.py
----------------------------------------------------------------------
diff --git a/aria/tools/process.py b/aria/tools/process.py
deleted file mode 100644
index b9586b6..0000000
--- a/aria/tools/process.py
+++ /dev/null
@@ -1,186 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Subprocess utility methods
-"""
-
-import os
-import subprocess
-from signal import SIGKILL
-from time import sleep
-
-from aria.logger import LoggerMixin
-from aria.orchestrator.workflows.exceptions import ExecutorException, 
ProcessException
-
-
-class Process(LoggerMixin):
-    """
-    Subprocess wrapper
-    """
-
-    def __init__(
-            self,
-            args,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-            cwd=None,
-            env=None,
-            **kwargs):
-        """
-        Subprocess wrapper
-        """
-        super(Process, self).__init__(**kwargs)
-        self.args = args
-        self.cwd = cwd
-        self.env = env
-        self.process = None
-        self._stdout = stdout
-        self._stderr = stderr
-
-    def __repr__(self):
-        return '{cls.__name__}(args={self.args}, cwd={self.cwd})'.format(
-            cls=self.__class__, self=self)
-
-    def __getattr__(self, item):
-        return getattr(self.process, item)
-
-    @property
-    def name(self):
-        """
-        The process name
-        """
-        return self.args[0]
-
-    @property
-    def pid(self):
-        """
-        The process pid (if running)
-        """
-        if self.is_running():
-            return self.process.pid
-
-    @property
-    def stdout(self):
-        """
-        The process stdout
-        """
-        assert self.process, 'Need to run before calling this method'
-        return self.process.stdout
-
-    @property
-    def stderr(self):
-        """
-        The process stderr
-        """
-        assert self.process, 'Need to run before calling this method'
-        return self.process.stderr
-
-    @property
-    def return_code(self):
-        """
-        The process return code. Will wait for process to end if it hasn't 
already
-        """
-        if self.process is None:
-            return None
-        if self.is_running():
-            raise ExecutorException(
-                'Can not get return code while process is still running')
-        if self.process.returncode is None:
-            self.wait()
-        return self.process.returncode
-
-    def terminate(self):
-        """
-        Terminates the process by sending a SIGTERM to it. If the process did 
not stop after that,
-        sends a SIGKILL with 1 second interval for a maximum of 10 times.
-        """
-        if self.process is not None and self.process.poll() is None:
-            self.logger.debug('terminating process {0:d} 
({1})'.format(self.process.pid, self.name))
-            self.process.terminate()
-            sleep(1)
-        kill_attempts = 0
-        while self.process is not None and self.process.poll() is None and 
kill_attempts < 10:
-            self.logger.debug('trying to kill process 
{0:d}'.format(self.process.pid))
-            self.process.kill()
-            sleep(1)
-            kill_attempts += 1
-
-    def kill(self):
-        """
-        Kill the process by sending a SIGKILL to it
-        """
-        if self.is_running():
-            os.killpg(os.getpgid(self.pid), SIGKILL)
-
-    def is_running(self):
-        """
-        Returns ``True`` if the process is currently running
-        """
-        return self.process.poll() is None if self.process else False
-
-    def wait(self):
-        """
-        Block until process finishes
-        """
-        assert self.process, 'Need to run before calling thie method'
-        self.process.wait()
-
-    def run(self, nice=None, universal_newlines=True):
-        """
-        Run the child process. This call does not block.
-        """
-        self.logger.debug('Running child process: {0}'.format(' 
'.join(self.args)))
-        self.process = subprocess.Popen(
-            self.args,
-            cwd=self.cwd,
-            env=self.env,
-            stdout=self._stdout,
-            stderr=self._stderr,
-            close_fds=os.name != 'nt',
-            preexec_fn=lambda: os.nice(nice) if nice else None,
-            universal_newlines=universal_newlines)
-
-    def run_in_shell(self, nice=None, universal_newlines=True):
-        """
-        Run the child process in a shell. This call does not block.
-        """
-        command = ' '.join(self.args)
-        self.logger.debug('Running child process in shell: 
{0}'.format(command))
-        self.process = subprocess.Popen(
-            command,
-            shell=True,
-            cwd=self.cwd,
-            env=self.env,
-            stdout=self._stdout,
-            stderr=self._stderr,
-            close_fds=os.name != 'nt',
-            preexec_fn=lambda: os.nice(nice) if nice else None,
-            universal_newlines=universal_newlines)
-
-    def raise_failure(self):
-        """
-        Raise a ProcessException if the process terminated with a non zero 
return code. Will wait
-        for the process to finish if it hasn't already
-        """
-        if self.is_running():
-            self.wait()
-        if self.return_code == 0:
-            return
-        raise ProcessException(
-            command=self.args,
-            stderr=self.stderr.read(),
-            stdout=self.stdout.read(),
-            return_code=self.return_code)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/tools/validation.py
----------------------------------------------------------------------
diff --git a/aria/tools/validation.py b/aria/tools/validation.py
deleted file mode 100644
index a33f7a2..0000000
--- a/aria/tools/validation.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Contains validation related utilities
-"""
-
-
-class ValidatorMixin(object):
-    """
-    A mixin that should be added to classes that require validating user input
-    """
-
-    _ARGUMENT_TYPE_MESSAGE = '{name} argument must be {type} based, got 
{arg!r}'
-    _ARGUMENT_CHOICE_MESSAGE = '{name} argument must be in {choices}, got 
{arg!r}'
-
-    @classmethod
-    def validate_in_choice(cls, name, argument, choices):
-        """
-        Validate ``argument`` is in ``choices``
-        """
-        if argument not in choices:
-            raise TypeError(cls._ARGUMENT_CHOICE_MESSAGE.format(
-                name=name, choices=choices, arg=argument))
-
-    @classmethod
-    def validate_type(cls, argument_name, argument, expected_type):
-        """
-        Validate ``argument`` is a subclass of ``expected_type``
-        """
-        if not issubclass(argument, expected_type):
-            raise TypeError(cls._ARGUMENT_TYPE_MESSAGE.format(
-                name=argument_name, type=expected_type, arg=argument))
-
-    @classmethod
-    def validate_instance(cls, argument_name, argument, expected_type):
-        """
-        Validate ``argument`` is a instance of ``expected_type``
-        """
-        if not isinstance(argument, expected_type):
-            raise TypeError(cls._ARGUMENT_TYPE_MESSAGE.format(
-                name=argument_name, type=expected_type, arg=argument))
-
-    @classmethod
-    def validate_callable(cls, argument_name, argument):
-        """
-        Validate ``argument`` is callable
-        """
-        if not callable(argument):
-            raise TypeError(cls._ARGUMENT_TYPE_MESSAGE.format(
-                name=argument_name, type='callable', arg=argument))
-
-
-def validate_function_arguments(func, func_kwargs):
-    """
-    Validates all required arguments are supplied to ``func`` and that no 
additional arguments are
-    supplied
-    """
-
-    _kwargs_flags = 8
-
-    has_kwargs = func.func_code.co_flags & _kwargs_flags != 0
-    args_count = func.func_code.co_argcount
-
-    # all args without the ones with default values
-    args = func.func_code.co_varnames[:args_count]
-    non_default_args = args[:len(func.func_defaults)] if func.func_defaults 
else args
-
-    # Check if any args without default values is missing in the func_kwargs
-    for arg in non_default_args:
-        if arg not in func_kwargs:
-            raise ValueError(
-                "The argument '{arg}' doest not have a default value, and it "
-                "isn't passed to {func.__name__}".format(arg=arg, func=func))
-
-    # check if there are any extra kwargs
-    extra_kwargs = [arg for arg in func_kwargs.keys() if arg not in args]
-
-    # assert that the function has kwargs
-    if extra_kwargs and not has_kwargs:
-        raise ValueError("The following extra kwargs were supplied: 
{extra_kwargs}".format(
-            extra_kwargs=extra_kwargs
-        ))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/utils/__init__.py
----------------------------------------------------------------------
diff --git a/aria/utils/__init__.py b/aria/utils/__init__.py
new file mode 100644
index 0000000..ae1e83e
--- /dev/null
+++ b/aria/utils/__init__.py
@@ -0,0 +1,14 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/3895f8ca/aria/utils/application.py
----------------------------------------------------------------------
diff --git a/aria/utils/application.py b/aria/utils/application.py
new file mode 100644
index 0000000..b1a7fcc
--- /dev/null
+++ b/aria/utils/application.py
@@ -0,0 +1,294 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Convenience storage related tools.
+# TODO rename module name
+"""
+
+import json
+import os
+import shutil
+import tarfile
+import tempfile
+from datetime import datetime
+
+from aria.storage.exceptions import StorageError
+from aria.logger import LoggerMixin
+
+
+class StorageManager(LoggerMixin):
+    """
+    Convenience wrapper to simplify work with the lower level storage mechanism
+    """
+
+    def __init__(
+            self,
+            model_storage,
+            resource_storage,
+            blueprint_path,
+            blueprint_id,
+            blueprint_plan,
+            deployment_id,
+            deployment_plan,
+            **kwargs):
+        super(StorageManager, self).__init__(**kwargs)
+        self.model_storage = model_storage
+        self.resource_storage = resource_storage
+        self.blueprint_path = blueprint_path
+        self.blueprint_id = blueprint_id
+        self.blueprint_plan = blueprint_plan
+        self.deployment_id = deployment_id
+        self.deployment_plan = deployment_plan
+
+    @classmethod
+    def from_deployment(
+            cls,
+            model_storage,
+            resource_storage,
+            deployment_id,
+            deployment_plan):
+        """
+        Create a StorageManager from a deployment
+        """
+        return cls(
+            model_storage=model_storage,
+            resource_storage=resource_storage,
+            deployment_id=deployment_id,
+            deployment_plan=deployment_plan,
+            blueprint_path=None,
+            blueprint_plan=None,
+            blueprint_id=None
+        )
+
+    @classmethod
+    def from_blueprint(
+            cls,
+            model_storage,
+            resource_storage,
+            blueprint_path,
+            blueprint_id,
+            blueprint_plan):
+        """
+        Create a StorageManager from a blueprint
+        """
+        return cls(
+            model_storage=model_storage,
+            resource_storage=resource_storage,
+            blueprint_path=blueprint_path,
+            blueprint_plan=blueprint_plan,
+            blueprint_id=blueprint_id,
+            deployment_id=None,
+            deployment_plan=None)
+
+    def create_blueprint_storage(self, source, main_file_name=None):
+        """
+        create blueprint model & resource
+        """
+        assert self.blueprint_path and self.blueprint_id
+        assert hasattr(self.resource_storage, 'blueprint')
+        assert hasattr(self.model_storage, 'blueprint')
+
+        self.logger.debug('creating blueprint resource storage entry')
+        self.resource_storage.blueprint.upload(
+            entry_id=self.blueprint_id,
+            source=os.path.dirname(source))
+        self.logger.debug('created blueprint resource storage entry')
+
+        self.logger.debug('creating blueprint model storage entry')
+        now = datetime.utcnow()
+        blueprint = self.model_storage.blueprint.model_cls(
+            plan=self.blueprint_plan,
+            id=self.blueprint_id,
+            description=self.blueprint_plan.get('description'),
+            created_at=now,
+            updated_at=now,
+            main_file_name=main_file_name,
+        )
+        self.model_storage.blueprint.store(blueprint)
+        self.logger.debug('created blueprint model storage entry')
+
+    def create_nodes_storage(self):
+        """
+        create nodes model
+        """
+        assert self.blueprint_path and self.blueprint_id
+        assert hasattr(self.model_storage, 'node')
+        assert hasattr(self.model_storage, 'relationship')
+
+        for node in self.blueprint_plan['nodes']:
+            node_copy = node.copy()
+            for field in ('name',
+                          'deployment_plugins_to_install',
+                          'interfaces',
+                          'instances'):
+                node_copy.pop(field)
+            scalable = node_copy.pop('capabilities')['scalable']['properties']
+            for index, relationship in enumerate(node_copy['relationships']):
+                relationship = 
self.model_storage.relationship.model_cls(**relationship)
+                self.model_storage.relationship.store(relationship)
+                node_copy['relationships'][index] = relationship
+
+            node_copy = self.model_storage.node.model_cls(
+                blueprint_id=self.blueprint_id,
+                planned_number_of_instances=scalable['current_instances'],
+                deploy_number_of_instances=scalable['default_instances'],
+                min_number_of_instances=scalable['min_instances'],
+                max_number_of_instances=scalable['max_instances'],
+                number_of_instances=scalable['current_instances'],
+                **node_copy)
+            self.model_storage.node.store(node_copy)
+
+    def create_deployment_storage(self):
+        """
+        create deployment model & resource
+        """
+        assert self.deployment_id and self.deployment_plan
+
+        assert hasattr(self.resource_storage, 'blueprint')
+        assert hasattr(self.resource_storage, 'deployment')
+        assert hasattr(self.model_storage, 'deployment')
+
+        self.logger.debug('creating deployment resource storage entry')
+        temp_dir = tempfile.mkdtemp()
+        try:
+            self.resource_storage.blueprint.download(
+                entry_id=self.blueprint_id,
+                destination=temp_dir)
+            self.resource_storage.deployment.upload(
+                entry_id=self.deployment_id,
+                source=temp_dir)
+        finally:
+            shutil.rmtree(temp_dir, ignore_errors=True)
+        self.logger.debug('created deployment resource storage entry')
+
+        self.logger.debug('creating deployment model storage entry')
+        now = datetime.utcnow()
+        deployment = self.model_storage.deployment.model_cls(
+            id=self.deployment_id,
+            blueprint_id=self.blueprint_id,
+            description=self.deployment_plan['description'],
+            workflows=self.deployment_plan['workflows'],
+            inputs=self.deployment_plan['inputs'],
+            policy_types=self.deployment_plan['policy_types'],
+            policy_triggers=self.deployment_plan['policy_triggers'],
+            groups=self.deployment_plan['groups'],
+            scaling_groups=self.deployment_plan['scaling_groups'],
+            outputs=self.deployment_plan['outputs'],
+            created_at=now,
+            updated_at=now
+        )
+        self.model_storage.deployment.store(deployment)
+        self.logger.debug('created deployment model storage entry')
+
+    def create_node_instances_storage(self):
+        """
+        create node_instances model
+        """
+        assert self.deployment_id and self.deployment_plan
+        assert hasattr(self.model_storage, 'node_instance')
+        assert hasattr(self.model_storage, 'relationship_instance')
+
+        self.logger.debug('creating node-instances model storage entries')
+        for node_instance in self.deployment_plan['node_instances']:
+            node_model = self.model_storage.node.get(node_instance['node_id'])
+            relationship_instances = []
+
+            for index, relationship_instance in 
enumerate(node_instance['relationships']):
+                relationship_instance_model = 
self.model_storage.relationship_instance.model_cls(
+                    relationship=node_model.relationships[index],
+                    target_name=relationship_instance['target_name'],
+                    type=relationship_instance['type'],
+                    target_id=relationship_instance['target_id'])
+                relationship_instances.append(relationship_instance_model)
+                
self.model_storage.relationship_instance.store(relationship_instance_model)
+
+            node_instance_model = self.model_storage.node_instance.model_cls(
+                node=node_model,
+                id=node_instance['id'],
+                runtime_properties={},
+                state=self.model_storage.node_instance.model_cls.UNINITIALIZED,
+                deployment_id=self.deployment_id,
+                version='1.0',
+                relationship_instances=relationship_instances)
+
+            self.model_storage.node_instance.store(node_instance_model)
+        self.logger.debug('created node-instances model storage entries')
+
+    def create_plugin_storage(self, plugin_id, source):
+        """
+        create plugin model & resource
+        """
+        assert hasattr(self.model_storage, 'plugin')
+        assert hasattr(self.resource_storage, 'plugin')
+
+        self.logger.debug('creating plugin resource storage entry')
+        self.resource_storage.plugin.upload(entry_id=plugin_id, source=source)
+        self.logger.debug('created plugin resource storage entry')
+
+        self.logger.debug('creating plugin model storage entry')
+        plugin = _load_plugin_from_archive(source)
+        build_props = plugin.get('build_server_os_properties')
+        now = datetime.utcnow()
+
+        plugin = self.model_storage.plugin.model_cls(
+            id=plugin_id,
+            package_name=plugin.get('package_name'),
+            package_version=plugin.get('package_version'),
+            archive_name=plugin.get('archive_name'),
+            package_source=plugin.get('package_source'),
+            supported_platform=plugin.get('supported_platform'),
+            distribution=build_props.get('distribution'),
+            distribution_version=build_props.get('distribution_version'),
+            distribution_release=build_props.get('distribution_release'),
+            wheels=plugin.get('wheels'),
+            excluded_wheels=plugin.get('excluded_wheels'),
+            supported_py_versions=plugin.get('supported_python_versions'),
+            uploaded_at=now
+        )
+        self.model_storage.plugin.store(plugin)
+        self.logger.debug('created plugin model storage entry')
+
+
+def _load_plugin_from_archive(tar_source):
+    if not tarfile.is_tarfile(tar_source):
+        # TODO: go over the exceptions
+        raise StorageError(
+            'the provided tar archive can not be read.')
+
+    with tarfile.open(tar_source) as tar:
+        tar_members = tar.getmembers()
+        # a wheel plugin will contain exactly one sub directory
+        if not tar_members:
+            raise StorageError(
+                'archive file structure malformed. expecting exactly one '
+                'sub directory; got none.')
+        package_json_path = os.path.join(tar_members[0].name,
+                                         'package.json')
+        try:
+            package_member = tar.getmember(package_json_path)
+        except KeyError:
+            raise StorageError("'package.json' was not found under {0}"
+                               .format(package_json_path))
+        try:
+            package_json = tar.extractfile(package_member)
+        except tarfile.ExtractError as e:
+            raise StorageError(str(e))
+        try:
+            return json.load(package_json)
+        except ValueError as e:
+            raise StorageError("'package.json' is not a valid json: "
+                               "{json_str}. error is {error}"
+                               .format(json_str=package_json.read(), 
error=str(e)))


Reply via email to