http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/tools/rest.py
----------------------------------------------------------------------
diff --git a/aria/parser/tools/rest.py b/aria/parser/tools/rest.py
new file mode 100644
index 0000000..d4997d8
--- /dev/null
+++ b/aria/parser/tools/rest.py
@@ -0,0 +1,262 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import urllib
+from collections import OrderedDict
+from urlparse import (urlparse, parse_qs)
+
+from ..loading import LiteralLocation
+from .. import install_aria_extensions
+from .utils import (CommonArgumentParser,
+                    create_context_from_namespace)
+from ..consumption import (ConsumerChain, Read, Validate, Model, Inputs, 
Instance)
+from ..utils import (RestServer, JsonAsRawEncoder, print_exception, 
start_daemon, stop_daemon,
+                     status_daemon, puts, Colored)
+
+VALIDATE_PATH = 'validate'
+INDIRECT_VALIDATE_PATH = 'indirect/validate'
+MODEL_PATH = 'model'
+INDIRECT_MODEL_PATH = 'indirect/model'
+INSTANCE_PATH = 'instance'
+INDIRECT_INSTANCE_PATH = 'indirect/instance'
+
+DEFAULT_PORT = 8080
+
+#
+# Utils
+#
+
+class Configuration(object):
+    def __init__(self, arguments):
+        self.arguments = arguments
+
+    def create_context(self, uri):
+        return create_context_from_namespace(self.arguments, uri=uri)
+
+def parse_path(handler):
+    parsed = urlparse(urllib.unquote(handler.path))
+    uri = parsed.path[len(handler.matched_re):]
+    query = parse_qs(parsed.query, keep_blank_values=True)
+    return uri, query
+
+def parse_indirect_payload(handler):
+    try:
+        payload = handler.json_payload
+    except BaseException:
+        handler.send_plain_text_response(400, 'Payload is not JSON\n')
+        return None, None
+
+    for key in payload.iterkeys():
+        if key not in ('uri', 'inputs'):
+            handler.send_plain_text_response(400, 'Payload has unsupported 
field: %s\n' % key)
+            return None, None
+
+    try:
+        uri = payload['uri']
+    except BaseException:
+        handler.send_plain_text_response(400, 'Payload does not have required 
"uri" field\n')
+        return None, None
+
+    inputs = payload.get('inputs')
+
+    return uri, inputs
+
+def validate(handler, uri):
+    context = handler.rest_server.configuration.create_context(uri)
+    ConsumerChain(context, (Read, Validate)).consume()
+    return context
+
+def model(handler, uri):
+    context = handler.rest_server.configuration.create_context(uri)
+    ConsumerChain(context, (Read, Validate, Model)).consume()
+    return context
+
+def instance(handler, uri, inputs):
+    context = handler.rest_server.configuration.create_context(uri)
+    if inputs:
+        if isinstance(inputs, dict):
+            for name, value in inputs.iteritems():
+                context.modeling.set_input(name, value)
+        else:
+            context.args.append('--inputs=%s' % inputs)
+    ConsumerChain(context, (Read, Validate, Model, Inputs, Instance)).consume()
+    return context
+
+def issues(context):
+    return {'issues': context.validation.issues_as_raw}
+
+#
+# Handlers
+#
+
+# Validate
+
+def validate_get(handler):
+    uri, _ = parse_path(handler)
+    context = validate(handler, uri)
+    return issues(context) if context.validation.has_issues else {}
+
+def validate_post(handler):
+    payload = handler.payload
+    context = validate(handler, LiteralLocation(payload))
+    return issues(context) if context.validation.has_issues else {}
+
+def indirect_validate_post(handler):
+    uri, _ = parse_indirect_payload(handler)
+    if uri is None:
+        return None
+    context = validate(handler, uri)
+    return issues(context) if context.validation.has_issues else {}
+
+# Model
+
+def model_get(handler):
+    uri, _ = parse_path(handler)
+    context = model(handler, uri)
+    return issues(context) if context.validation.has_issues else {
+        'types': context.modeling.types_as_raw,
+        'model': context.modeling.model_as_raw
+    }
+
+def model_post(handler):
+    payload = handler.payload
+    context = model(handler, LiteralLocation(payload))
+    return issues(context) if context.validation.has_issues else {
+        'types': context.modeling.types_as_raw,
+        'model': context.modeling.model_as_raw
+    }
+
+def indirect_model_post(handler):
+    uri, _ = parse_indirect_payload(handler)
+    if uri is None:
+        return None
+    context = model(handler, uri)
+    return issues(context) if context.validation.has_issues else {
+        'types': context.modeling.types_as_raw,
+        'model': context.modeling.model_as_raw
+    }
+
+# Instance
+
+def instance_get(handler):
+    uri, query = parse_path(handler)
+    inputs = query.get('inputs')
+    if inputs:
+        inputs = inputs[0]
+    context = instance(handler, uri, inputs)
+    return issues(context) if context.validation.has_issues else {
+        'types': context.modeling.types_as_raw,
+        'model': context.modeling.model_as_raw,
+        'instance': context.modeling.instance_as_raw
+    }
+
+def instance_post(handler):
+    _, query = parse_path(handler)
+    inputs = query.get('inputs')
+    if inputs:
+        inputs = inputs[0]
+    payload = handler.payload
+    context = instance(handler, LiteralLocation(payload), inputs)
+    return issues(context) if context.validation.has_issues else {
+        'types': context.modeling.types_as_raw,
+        'model': context.modeling.model_as_raw,
+        'instance': context.modeling.instance_as_raw
+    }
+
+def indirect_instance_post(handler):
+    uri, inputs = parse_indirect_payload(handler)
+    if uri is None:
+        return None
+    context = instance(handler, uri, inputs)
+    return issues(context) if context.validation.has_issues else {
+        'types': context.modeling.types_as_raw,
+        'model': context.modeling.model_as_raw,
+        'instance': context.modeling.instance_as_raw
+    }
+
+#
+# Server
+#
+
+ROUTES = OrderedDict((
+    ('^/$', {'file': 'index.html', 'media_type': 'text/html'}),
+    ('^/' + VALIDATE_PATH, {'GET': validate_get,
+                            'POST': validate_post,
+                            'media_type': 'application/json'}),
+    ('^/' + MODEL_PATH, {'GET': model_get, 'POST': model_post, 'media_type': 
'application/json'}),
+    ('^/' + INSTANCE_PATH, {'GET': instance_get,
+                            'POST': instance_post,
+                            'media_type': 'application/json'}),
+    ('^/' + INDIRECT_VALIDATE_PATH, {'POST': indirect_validate_post,
+                                     'media_type': 'application/json'}),
+    ('^/' + INDIRECT_MODEL_PATH, {'POST': indirect_model_post, 'media_type': 
'application/json'}),
+    ('^/' + INDIRECT_INSTANCE_PATH, {'POST': indirect_instance_post,
+                                     'media_type': 'application/json'})))
+
+class ArgumentParser(CommonArgumentParser):
+    def __init__(self):
+        super(ArgumentParser, self).__init__(description='REST Server', 
prog='aria-rest')
+        self.add_argument('command',
+                          nargs='?',
+                          help='daemon command: start, stop, restart, or 
status')
+        self.add_argument('--port', type=int, default=DEFAULT_PORT, help='HTTP 
port')
+        self.add_argument('--root', help='web root directory')
+        self.add_argument('--rundir',
+                          help='pid and log files directory for daemons 
(defaults to user home)')
+
+def main():
+    try:
+        install_aria_extensions()
+
+        arguments, _ = ArgumentParser().parse_known_args()
+
+        rest_server = RestServer()
+        rest_server.configuration = Configuration(arguments)
+        rest_server.port = arguments.port
+        rest_server.routes = ROUTES
+        rest_server.static_root = arguments.root or 
os.path.join(os.path.dirname(__file__), 'web')
+        rest_server.json_encoder = JsonAsRawEncoder(ensure_ascii=False, 
separators=(',', ':'))
+
+        if arguments.command:
+            rundir = os.path.abspath(arguments.rundir or 
os.path.expanduser('~'))
+            pidfile_path = os.path.join(rundir, 'aria-rest.pid')
+
+            def start():
+                log_path = os.path.join(rundir, 'aria-rest.log')
+                context = start_daemon(pidfile_path, log_path)
+                if context is not None:
+                    with context:
+                        rest_server.start(daemon=True)
+
+            if arguments.command == 'start':
+                start()
+            elif arguments.command == 'stop':
+                stop_daemon(pidfile_path)
+            elif arguments.command == 'restart':
+                stop_daemon(pidfile_path)
+                start()
+            elif arguments.command == 'status':
+                status_daemon(pidfile_path)
+            else:
+                puts(Colored.red('Unknown command: %s' % arguments.command))
+        else:
+            rest_server.start()
+
+    except Exception as e:
+        print_exception(e)
+
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/tools/spec.py
----------------------------------------------------------------------
diff --git a/aria/parser/tools/spec.py b/aria/parser/tools/spec.py
new file mode 100644
index 0000000..ecb4010
--- /dev/null
+++ b/aria/parser/tools/spec.py
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import csv
+import sys
+
+from .utils import BaseArgumentParser
+from ..utils import (print_exception, import_modules, puts, Colored, indent)
+from .. import (install_aria_extensions, DSL_SPECIFICATION_PACKAGES, 
DSL_SPECIFICATION,
+                iter_spec)
+
+class ArgumentParser(BaseArgumentParser):
+    def __init__(self):
+        super(ArgumentParser, self).__init__(description='Specification Tool', 
prog='aria-spec')
+        self.add_argument('--csv', action='store_true', help='output as CSV')
+
+def main():
+    try:
+        args, _ = ArgumentParser().parse_known_args()
+
+        install_aria_extensions()
+
+        # Make sure that all @dsl_specification decorators are processed
+        for pkg in DSL_SPECIFICATION_PACKAGES:
+            import_modules(pkg)
+
+        if args.csv:
+            writer = csv.writer(sys.stdout, quoting=csv.QUOTE_ALL)
+            writer.writerow(('Specification', 'Section', 'Code', 'URL'))
+            for spec in sorted(DSL_SPECIFICATION):
+                for section, details in iter_spec(spec):
+                    writer.writerow((spec, section, details['code'], 
details['url']))
+
+        else:
+            for spec in sorted(DSL_SPECIFICATION):
+                puts(Colored.cyan(spec))
+                with indent(2):
+                    for section, details in iter_spec(spec):
+                        puts(Colored.blue(section))
+                        with indent(2):
+                            for k, v in details.iteritems():
+                                puts('%s: %s' % (Colored.magenta(k), v))
+
+    except Exception as e:
+        print_exception(e)
+
+if __name__ == '__main__':
+    main()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/tools/utils.py
----------------------------------------------------------------------
diff --git a/aria/parser/tools/utils.py b/aria/parser/tools/utils.py
new file mode 100644
index 0000000..9080e43
--- /dev/null
+++ b/aria/parser/tools/utils.py
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .. import VERSION
+from ..consumption import ConsumptionContext
+from ..loading import (UriLocation, URI_LOADER_PREFIXES)
+from ..utils import (ArgumentParser, import_fullname, cachedmethod)
+
+class BaseArgumentParser(ArgumentParser):
+    def __init__(self, description, **kwargs):
+        super(BaseArgumentParser, self).__init__(
+            description='%s for ARIA version %s' % (description, VERSION), 
**kwargs)
+
+class CommonArgumentParser(BaseArgumentParser):
+    def __init__(self, description, **kwargs):
+        super(CommonArgumentParser, self).__init__(description, **kwargs)
+
+        self.add_argument('--loader-source',
+                          default='aria.loading.DefaultLoaderSource',
+                          help='loader source class for the parser')
+        self.add_argument('--reader-source',
+                          default='aria.reading.DefaultReaderSource',
+                          help='reader source class for the parser')
+        self.add_argument('--presenter-source',
+                          default='aria.presentation.DefaultPresenterSource',
+                          help='presenter source class for the parser')
+        self.add_argument('--presenter', help='force use of this presenter 
class in parser')
+        self.add_argument('--prefix', nargs='*', help='prefixes for imports')
+        self.add_flag_argument('debug',
+                               help_true='print debug info',
+                               help_false='don\'t print debug info')
+        self.add_flag_argument('cached-methods',
+                               help_true='enable cached methods',
+                               help_false='disable cached methods',
+                               default=True)
+
+    def parse_known_args(self, args=None, namespace=None):
+        namespace, args = super(CommonArgumentParser, 
self).parse_known_args(args, namespace)
+
+        if namespace.prefix:
+            for prefix in namespace.prefix:
+                URI_LOADER_PREFIXES.append(prefix)
+
+        cachedmethod.ENABLED = namespace.cached_methods
+
+        return namespace, args
+
+def create_context_from_namespace(namespace, **kwargs):
+    args = vars(namespace).copy()
+    args.update(kwargs)
+    return create_context(**args)
+
+def create_context(uri, loader_source, reader_source, presenter_source, 
presenter, debug, **kwargs):
+    context = ConsumptionContext()
+    context.loading.loader_source = import_fullname(loader_source)()
+    context.reading.reader_source = import_fullname(reader_source)()
+    context.presentation.location = UriLocation(uri) if isinstance(uri, 
basestring) else uri
+    context.presentation.presenter_source = import_fullname(presenter_source)()
+    context.presentation.presenter_class = import_fullname(presenter)
+    context.presentation.print_exceptions = debug
+    return context

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/tools/web/index.html
----------------------------------------------------------------------
diff --git a/aria/parser/tools/web/index.html b/aria/parser/tools/web/index.html
new file mode 100644
index 0000000..31b459d
--- /dev/null
+++ b/aria/parser/tools/web/index.html
@@ -0,0 +1,8 @@
+<html>
+<head>
+    <title>ARIA REST Service</title>
+</head>
+<body>
+    <h1>ARIA REST Service</h1>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/__init__.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/__init__.py b/aria/parser/utils/__init__.py
new file mode 100644
index 0000000..ba42565
--- /dev/null
+++ b/aria/parser/utils/__init__.py
@@ -0,0 +1,81 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from clint.textui import indent
+
+from .uris import as_file
+from .openclose import OpenClose
+from .rest_client import call_rest
+from .argparse import ArgumentParser
+from .console import (puts, Colored)
+from .caching import (cachedmethod, HasCachedMethods)
+from .imports import (import_fullname, import_modules)
+from .rest_server import (RestServer, RestRequestHandler)
+from .exceptions import (print_exception, print_traceback)
+from .daemon import (start_daemon, stop_daemon, status_daemon)
+from .threading import (ExecutorException, FixedThreadPoolExecutor, LockedList)
+from .collections import (FrozenList, EMPTY_READ_ONLY_LIST, FrozenDict, 
EMPTY_READ_ONLY_DICT,
+                          StrictList, StrictDict, merge, prune, 
deepcopy_with_locators,
+                          copy_locators, is_removable)
+from .formatting import (JsonAsRawEncoder, YamlAsRawDumper, full_type_name, 
safe_str, safe_repr,
+                         string_list_as_string, as_raw, as_raw_list, 
as_raw_dict, as_agnostic,
+                         json_dumps, yaml_dumps, yaml_loads)
+
+__all__ = (
+    'OpenClose',
+    'cachedmethod',
+    'HasCachedMethods',
+    'JsonAsRawEncoder',
+    'YamlAsRawDumper',
+    'full_type_name',
+    'safe_str',
+    'safe_repr',
+    'string_list_as_string',
+    'as_raw',
+    'as_raw_list',
+    'as_raw_dict',
+    'as_agnostic',
+    'json_dumps',
+    'yaml_dumps',
+    'yaml_loads',
+    'FrozenList',
+    'EMPTY_READ_ONLY_LIST',
+    'FrozenDict',
+    'EMPTY_READ_ONLY_DICT',
+    'StrictList',
+    'StrictDict',
+    'merge',
+    'prune',
+    'deepcopy_with_locators',
+    'copy_locators',
+    'is_removable',
+    'print_exception',
+    'print_traceback',
+    'import_fullname',
+    'import_modules',
+    'ExecutorException',
+    'FixedThreadPoolExecutor',
+    'LockedList',
+    'as_file',
+    'ArgumentParser',
+    'puts',
+    'Colored',
+    'indent',
+    'RestServer',
+    'RestRequestHandler',
+    'call_rest',
+    'start_daemon',
+    'stop_daemon',
+    'status_daemon')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/argparse.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/argparse.py b/aria/parser/utils/argparse.py
new file mode 100644
index 0000000..071752d
--- /dev/null
+++ b/aria/parser/utils/argparse.py
@@ -0,0 +1,113 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 'argparse'
+
+from argparse import ArgumentParser as BaseArgumentParser
+
+class ArgumentParser(BaseArgumentParser):
+    """
+    Enhanced argument parser.
+
+    Applied patch to fix `this issue <https://bugs.python.org/issue22433>`__.
+    """
+
+    def add_flag_argument(self, name, help_true=None, help_false=None, 
default=False):
+        """
+        Adds a flag argument as two arguments: :code:`--my-flag` and 
:code:`--no-my-flag`.
+        """
+
+        dest = name.replace('-', '_')
+
+        if default:
+            if help_true is not None:
+                help_true += ' (default)'
+            else:
+                help_true = '(default)'
+        else:
+            if help_false is not None:
+                help_false += ' (default)'
+            else:
+                help_false = '(default)'
+
+        group = self.add_mutually_exclusive_group()
+        group.add_argument('--%s' % name, action='store_true', help=help_true)
+        group.add_argument('--no-%s' % name, dest=dest, action='store_false', 
help=help_false)
+
+        self.set_defaults(**{dest: default})
+
+    def _parse_optional(self, arg_string):
+
+        if self._is_positional(arg_string):
+            return None
+
+        # if the option string is present in the parser, return the action
+        if arg_string in self._option_string_actions:
+            action = self._option_string_actions[arg_string]
+            return action, arg_string, None
+
+        # if the option string before the "=" is present, return the action
+        if '=' in arg_string:
+            option_string, explicit_arg = arg_string.split('=', 1)
+            if option_string in self._option_string_actions:
+                action = self._option_string_actions[option_string]
+                return action, option_string, explicit_arg
+
+        # search through all possible prefixes of the option string
+        # and all actions in the parser for possible interpretations
+        option_tuples = self._get_option_tuples(arg_string)
+
+        # if multiple actions match, the option string was ambiguous
+        if len(option_tuples) > 1:
+            options = ', '.join(
+                [option_string for action, option_string, explicit_arg in 
option_tuples])
+            tup = arg_string, options
+            self.error('ambiguous option: %s could match %s' % tup)
+
+        # if exactly one action matched, this segmentation is good,
+        # so return the parsed action
+        elif len(option_tuples) == 1:
+            option_tuple = option_tuples
+            return option_tuple
+
+        # if it was not found as an option, but it looks like a negative
+        # number, it was meant to be positional
+        # unless there are negative-number-like options
+        if self._negative_number_matcher.match(arg_string):
+            if not self._has_negative_number_optionals:
+                return None
+
+        # it was meant to be an optional but there is no such option
+        # in this parser (though it might be a valid option in a subparser)
+        return None, arg_string, None
+
+    def _is_positional(self, arg_string):
+        # if it's an empty string, it was meant to be a positional
+        if not arg_string:
+            return True
+
+        # if it doesn't start with a prefix, it was meant to be positional
+        if not arg_string[0] in self.prefix_chars:
+            return True
+
+        # if it's just a single character, it was meant to be positional
+        if len(arg_string) == 1:
+            return True
+
+        # if it contains a space, it was meant to be a positional
+        if ' ' in arg_string and arg_string[0] not in self.prefix_chars:
+            return True
+
+        return False

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/caching.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/caching.py b/aria/parser/utils/caching.py
new file mode 100644
index 0000000..0b21560
--- /dev/null
+++ b/aria/parser/utils/caching.py
@@ -0,0 +1,132 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 
'collections' and 'threading'
+
+from threading import Lock
+from functools import partial
+from collections import OrderedDict
+
+
+
+class cachedmethod(object):  # pylint: disable=invalid-name
+    """
+    Decorator for caching method return values.
+
+    The implementation is thread-safe.
+
+    Supports :code:`cache_info` to be compatible with Python 3's 
:code:`functools.lru_cache`.
+    Note that the statistics are combined for all instances of the class.
+
+    Won't use the cache if not called when bound to an object, allowing you to 
override the cache.
+
+    Adapted from `this solution
+    
<http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/>`__.
+    """
+
+    ENABLED = True
+
+    def __init__(self, func):
+        self.func = func
+        self.hits = 0
+        self.misses = 0
+        self.lock = Lock()
+
+    def cache_info(self):
+        with self.lock:
+            return (self.hits, self.misses, None, self.misses)
+
+    def reset_cache_info(self):
+        with self.lock:
+            self.hits = 0
+            self.misses = 0
+
+    def __get__(self, instance, owner):
+        if instance is None:
+            # Don't use cache if not bound to an object
+            # Note: This is also a way for callers to override the cache
+            return self.func
+        return partial(self, instance)
+
+    def __call__(self, *args, **kwargs):
+        if not self.ENABLED:
+            return self.func(*args, **kwargs)
+
+        instance = args[0]
+        cache = instance.get_method_cache()
+
+        key = (self.func, args[1:], frozenset(kwargs.items()))
+
+        try:
+            with self.lock:
+                return_value = cache[key]
+                self.hits += 1
+        except KeyError:
+            return_value = self.func(*args, **kwargs)
+            with self.lock:
+                cache[key] = return_value
+                self.misses += 1
+            # Another thread may override our cache entry here, so we need to 
read
+            # it again to make sure all threads use the same return value
+            return_value = cache.get(key, return_value)
+
+        return return_value
+
+class HasCachedMethods(object):
+    """
+    Provides convenience methods for working with :class:`cachedmethod`.
+    """
+
+    def __init__(self, method_cache=None):
+        self._method_cache = method_cache or {}
+
+    def get_method_cache(self):
+        return self._method_cache
+
+    @property
+    def _method_cache_info(self):
+        """
+        The cache infos of all cached methods.
+
+        :rtype: dict of str, 4-tuple
+        """
+
+        cached_info = OrderedDict()
+        for k, v in self.__class__.__dict__.iteritems():
+            if isinstance(v, property):
+                # The property getter might be cached
+                v = v.fget
+            if hasattr(v, 'cache_info'):
+                cached_info[k] = v.cache_info()
+        return cached_info
+
+    def _reset_method_cache(self):
+        """
+        Resets the caches of all cached methods.
+        """
+
+        if hasattr(self, '_method_cache'):
+            self._method_cache = {}
+
+        # Note: Another thread may already be storing entries in the cache 
here.
+        # But it's not a big deal! It only means that our cache_info isn't
+        # guaranteed to be accurate.
+
+        for entry in self.__class__.__dict__.itervalues():
+            if isinstance(entry, property):
+                # The property getter might be cached
+                entry = entry.fget
+            if hasattr(entry, 'reset_cache_info'):
+                entry.reset_cache_info()

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/collections.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/collections.py b/aria/parser/utils/collections.py
new file mode 100644
index 0000000..d4b461d
--- /dev/null
+++ b/aria/parser/utils/collections.py
@@ -0,0 +1,283 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 
'collections'
+
+from copy import deepcopy
+from collections import OrderedDict
+
+def cls_name(cls):
+    module = str(cls.__module__)
+    name = str(cls.__name__)
+    return name if module == '__builtin__' else '%s.%s' % (module, name)
+
+class FrozenList(list):
+    """
+    An immutable list.
+
+    After initialization it will raise :class:`TypeError` exceptions if 
modification
+    is attempted.
+
+    Note that objects stored in the list may not be immutable.
+    """
+    def __init__(self, *args, **kwargs):
+        self.locked = False
+        super(FrozenList, self).__init__(*args, **kwargs)
+        self.locked = True
+
+    def __setitem__(self, index, value):
+        if self.locked:
+            raise TypeError('frozen list')
+        return super(FrozenList, self).__setitem__(index, value)
+
+    def __delitem__(self, index):
+        if self.locked:
+            raise TypeError('frozen list')
+        return super(FrozenList, self).__delitem__(index)
+
+    def __iadd__(self, values):
+        if self.locked:
+            raise TypeError('frozen list')
+        return super(FrozenList, self).__iadd__(values)
+
+    def __deepcopy__(self, memo):
+        res = [deepcopy(v, memo) for v in self]
+        return FrozenList(res)
+
+    def append(self, value):
+        if self.locked:
+            raise TypeError('frozen list')
+        return super(FrozenList, self).append(value)
+
+    def extend(self, values):
+        if self.locked:
+            raise TypeError('frozen list')
+        return super(FrozenList, self).append(values)
+
+    def insert(self, index, value):
+        if self.locked:
+            raise TypeError('frozen list')
+        return super(FrozenList, self).insert(index, value)
+
+EMPTY_READ_ONLY_LIST = FrozenList()
+
+class FrozenDict(OrderedDict):
+    """
+    An immutable ordered dict.
+
+    After initialization it will raise :class:`TypeError` exceptions if 
modification
+    is attempted.
+
+    Note that objects stored in the dict may not be immutable.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.locked = False
+        super(FrozenDict, self).__init__(*args, **kwargs)
+        self.locked = True
+
+    def __setitem__(self, key, value, **_):
+        if self.locked:
+            raise TypeError('frozen dict')
+        return super(FrozenDict, self).__setitem__(key, value)
+
+    def __delitem__(self, key, **_):
+        if self.locked:
+            raise TypeError('frozen dict')
+        return super(FrozenDict, self).__delitem__(key)
+
+    def __deepcopy__(self, memo):
+        res = [(deepcopy(k, memo), deepcopy(v, memo)) for k, v in 
self.iteritems()]
+        return FrozenDict(res)
+
+EMPTY_READ_ONLY_DICT = FrozenDict()
+
+class StrictList(list):
+    """
+    A list that raises :class:`TypeError` exceptions when objects of the wrong 
type are inserted.
+    """
+
+    def __init__(self,
+                 items=None,
+                 value_class=None,
+                 wrapper_function=None,
+                 unwrapper_function=None):
+        super(StrictList, self).__init__()
+        if isinstance(items, StrictList):
+            self.value_class = items.value_class
+            self.wrapper_function = items.wrapper_function
+            self.unwrapper_function = items.unwrapper_function
+        self.value_class = value_class
+        self.wrapper_function = wrapper_function
+        self.unwrapper_function = unwrapper_function
+        if items:
+            for item in items:
+                self.append(item)
+
+    def _wrap(self, value):
+        if (self.value_class is not None) and (not isinstance(value, 
self.value_class)):
+            raise TypeError('value must be a "%s": %s' % 
(cls_name(self.value_class), repr(value)))
+        if self.wrapper_function is not None:
+            value = self.wrapper_function(value)
+        return value
+
+    def _unwrap(self, value):
+        if self.unwrapper_function is not None:
+            value = self.unwrapper_function(value)
+        return value
+
+    def __getitem__(self, index):
+        value = super(StrictList, self).__getitem__(index)
+        value = self._unwrap(value)
+        return value
+
+    def __setitem__(self, index, value):
+        value = self._wrap(value)
+        return super(StrictList, self).__setitem__(index, value)
+
+    def __iadd__(self, values):
+        values = [self._wrap(v) for v in values]
+        return super(StrictList, self).__iadd__(values)
+
+    def append(self, value):
+        value = self._wrap(value)
+        return super(StrictList, self).append(value)
+
+    def extend(self, values):
+        values = [self._wrap(v) for v in values]
+        return super(StrictList, self).extend(values)
+
+    def insert(self, index, value):
+        value = self._wrap(value)
+        return super(StrictList, self).insert(index, value)
+
+class StrictDict(OrderedDict):
+    """
+    An ordered dict that raises :class:`TypeError` exceptions
+    when keys or values of the wrong type are used.
+    """
+
+    def __init__(self,
+                 items=None,
+                 key_class=None,
+                 value_class=None,
+                 wrapper_function=None,
+                 unwrapper_function=None):
+        super(StrictDict, self).__init__()
+        if isinstance(items, StrictDict):
+            self.key_class = items.key_class
+            self.value_class = items.value_class
+            self.wrapper_function = items.wrapper_function
+            self.unwrapper_function = items.unwrapper_function
+        self.key_class = key_class
+        self.value_class = value_class
+        self.wrapper_function = wrapper_function
+        self.unwrapper_function = unwrapper_function
+        if items:
+            for k, v in items:
+                self[k] = v
+
+    def __getitem__(self, key):
+        if (self.key_class is not None) and (not isinstance(key, 
self.key_class)):
+            raise TypeError('key must be a "%s": %s' % 
(cls_name(self.key_class), repr(key)))
+        value = super(StrictDict, self).__getitem__(key)
+        if self.unwrapper_function is not None:
+            value = self.unwrapper_function(value)
+        return value
+
+    def __setitem__(self, key, value, **_):
+        if (self.key_class is not None) and (not isinstance(key, 
self.key_class)):
+            raise TypeError('key must be a "%s": %s' % 
(cls_name(self.key_class), repr(key)))
+        if (self.value_class is not None) and (not isinstance(value, 
self.value_class)):
+            raise TypeError('value must be a "%s": %s' % 
(cls_name(self.value_class), repr(value)))
+        if self.wrapper_function is not None:
+            value = self.wrapper_function(value)
+        return super(StrictDict, self).__setitem__(key, value)
+
+def merge(dict_a, dict_b, path=None, strict=False):
+    """
+    Merges dicts, recursively.
+    """
+
+    # TODO: a.add_yaml_merge(b), see https://bitbucket.org/ruamel/yaml/src/
+    # TODO: 
86622a1408e0f171a12e140d53c4ffac4b6caaa3/comments.py?fileviewer=file-view-default
+
+    path = path or []
+    for key, value_b in dict_b.iteritems():
+        if key in dict_a:
+            value_a = dict_a[key]
+            if isinstance(value_a, dict) and isinstance(value_b, dict):
+                merge(value_a, value_b, path + [str(key)], strict)
+            elif value_a != value_b:
+                if strict:
+                    raise ValueError('dict merge conflict at %s' % 
'.'.join(path + [str(key)]))
+                else:
+                    dict_a[key] = value_b
+        else:
+            dict_a[key] = value_b
+    return dict_a
+
+def is_removable(_container, _key, v):
+    return (v is None) or ((isinstance(v, dict) or isinstance(v, list)) and 
(len(v) == 0))
+
+def prune(value, is_removable_function=is_removable):
+    """
+    Deletes :code:`None` and empty lists and dicts, recursively.
+    """
+
+    if isinstance(value, list):
+        for i, v in enumerate(value):
+            if is_removable_function(value, i, v):
+                del value[i]
+            else:
+                prune(v, is_removable_function)
+    elif isinstance(value, dict):
+        for k, v in value.iteritems():
+            if is_removable_function(value, k, v):
+                del value[k]
+            else:
+                prune(v, is_removable_function)
+
+    return value
+
+def deepcopy_with_locators(value):
+    """
+    Like :code:`deepcopy`, but also copies over locators.
+    """
+
+    res = deepcopy(value)
+    copy_locators(res, value)
+    return res
+
+def copy_locators(target, source):
+    """
+    Copies over :code:`_locator` for all elements, recursively.
+
+    Assumes that target and source have exactly the same list/dict structure.
+    """
+
+    locator = getattr(source, '_locator', None)
+    if locator is not None:
+        try:
+            setattr(target, '_locator', locator)
+        except AttributeError:
+            pass
+
+    if isinstance(target, list) and isinstance(source, list):
+        for i, _ in enumerate(target):
+            copy_locators(target[i], source[i])
+    elif isinstance(target, dict) and isinstance(source, dict):
+        for k, v in target.iteritems():
+            copy_locators(v, source[k])

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/console.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/console.py b/aria/parser/utils/console.py
new file mode 100644
index 0000000..15c01e2
--- /dev/null
+++ b/aria/parser/utils/console.py
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from clint.textui.core import STDOUT
+from clint.textui import puts as _puts
+from clint.textui.colored import ColoredString as _ColoredString
+
+from .formatting import safe_str
+
+class ColoredString(_ColoredString):
+    def __init__(self, color, str_, always_color=False, bold=False):
+        super(ColoredString, self).__init__(color, safe_str(str_), 
always_color, bold)
+
+def puts(string='', newline=True, stream=STDOUT):
+    _puts(safe_str(string), newline, stream)
+
+class Colored(object):
+    @staticmethod
+    def black(string, always=False, bold=False):
+        return ColoredString('BLACK', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def red(string, always=False, bold=False):
+        return ColoredString('RED', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def green(string, always=False, bold=False):
+        return ColoredString('GREEN', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def yellow(string, always=False, bold=False):
+        return ColoredString('YELLOW', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def blue(string, always=False, bold=False):
+        return ColoredString('BLUE', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def magenta(string, always=False, bold=False):
+        return ColoredString('MAGENTA', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def cyan(string, always=False, bold=False):
+        return ColoredString('CYAN', string, always_color=always, bold=bold)
+
+    @staticmethod
+    def white(string, always=False, bold=False):
+        return ColoredString('WHITE', string, always_color=always, bold=bold)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/daemon.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/daemon.py b/aria/parser/utils/daemon.py
new file mode 100644
index 0000000..c9cbd35
--- /dev/null
+++ b/aria/parser/utils/daemon.py
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 'daemon'
+
+try:
+    from .console import puts, Colored
+    from daemon import DaemonContext
+    from daemon.pidfile import TimeoutPIDLockFile
+    from daemon.runner import is_pidfile_stale
+    from time import sleep
+    import os
+    import signal
+
+    def start_daemon(pidfile_path, log_path, acquire_timeout=5):
+        pidfile = TimeoutPIDLockFile(pidfile_path, 
acquire_timeout=acquire_timeout)
+        if is_pidfile_stale(pidfile):
+            pidfile.break_lock()
+        if pidfile.is_locked():
+            pid = pidfile.read_pid()
+            if pid is not None:
+                puts(Colored.red('Already running at pid: %d' % pid))
+            else:
+                puts(Colored.red('Already running'))
+            return None
+        logfile = open(log_path, 'w+t')
+        puts(Colored.blue('Starting'))
+        return DaemonContext(pidfile=pidfile, stdout=logfile, stderr=logfile)
+
+    def stop_daemon(pidfile_path, acquire_timeout=5):
+        pidfile = TimeoutPIDLockFile(pidfile_path, 
acquire_timeout=acquire_timeout)
+        pid = pidfile.read_pid()
+        if pid is not None:
+            puts(Colored.blue('Stopping pid: %d' % pid))
+            os.kill(pid, signal.SIGTERM)
+            while pidfile.is_locked():
+                puts(Colored.cyan('Waiting...'))
+                sleep(0.1)
+            puts(Colored.blue('Stopped'))
+        else:
+            puts(Colored.red('Not running'))
+
+    def status_daemon(pidfile_path, acquire_timeout=5):
+        pid = TimeoutPIDLockFile(pidfile_path, 
acquire_timeout=acquire_timeout).read_pid()
+        if pid is not None:
+            puts(Colored.blue('Running at pid: %d' % pid))
+        else:
+            puts(Colored.blue('Not running'))
+
+except ImportError:
+    def start_daemon(*args, **kwargs):
+        puts(Colored.red('Cannot start daemon in this environment'))
+
+    def stop_daemon(*args, **kwargs):
+        puts(Colored.red('Not running'))
+
+    def status_daemon(*args, **kwargs):
+        puts(Colored.blue('Not running'))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/exceptions.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/exceptions.py b/aria/parser/utils/exceptions.py
new file mode 100644
index 0000000..0370bb3
--- /dev/null
+++ b/aria/parser/utils/exceptions.py
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import linecache
+
+from clint.textui import indent
+from .console import (puts, Colored)
+
+
+def print_exception(e, full=True, cause=False, traceback=None):
+    """
+    Prints the exception with nice colors and such.
+    """
+    def format_heading(e):
+        return '%s%s: %s' % (Colored.red('Caused by ') if cause else '', 
Colored.red(
+            e.__class__.__name__, bold=True), Colored.red(e))
+
+    puts(format_heading(e))
+    if full:
+        if cause:
+            if traceback:
+                print_traceback(traceback)
+        else:
+            print_traceback()
+    if hasattr(e, 'cause') and e.cause:
+        traceback = e.cause_traceback if hasattr(e, 'cause_traceback') else 
None
+        print_exception(e.cause, full=full, cause=True, traceback=traceback)
+
+def print_traceback(traceback=None):
+    """
+    Prints the traceback with nice colors and such.
+    """
+
+    if traceback is None:
+        _, _, traceback = sys.exc_info()
+    while traceback is not None:
+        frame = traceback.tb_frame
+        lineno = traceback.tb_lineno
+        code = frame.f_code
+        filename = code.co_filename
+        name = code.co_name
+        with indent(2):
+            puts('File "%s", line %s, in %s' % (Colored.blue(filename),
+                                                Colored.cyan(lineno),
+                                                Colored.cyan(name)))
+            linecache.checkcache(filename)
+            line = linecache.getline(filename, lineno, frame.f_globals)
+            if line:
+                with indent(2):
+                    puts(Colored.black(line.strip()))
+        traceback = traceback.tb_next

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/formatting.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/formatting.py b/aria/parser/utils/formatting.py
new file mode 100644
index 0000000..222dac9
--- /dev/null
+++ b/aria/parser/utils/formatting.py
@@ -0,0 +1,205 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 
'collections'
+
+import json
+from types import MethodType
+from collections import OrderedDict
+
+from ruamel import yaml  # @UnresolvedImport
+
+from aria.parser.utils.collections import (FrozenList, FrozenDict, StrictList, 
StrictDict)
+
+# Add our types to ruamel.yaml (for round trips)
+yaml.representer.RoundTripRepresenter.add_representer(
+    FrozenList, yaml.representer.RoundTripRepresenter.represent_list)
+yaml.representer.RoundTripRepresenter.add_representer(
+    FrozenDict, yaml.representer.RoundTripRepresenter.represent_dict)
+yaml.representer.RoundTripRepresenter.add_representer(
+    StrictList, yaml.representer.RoundTripRepresenter.represent_list)
+yaml.representer.RoundTripRepresenter.add_representer(
+    StrictDict, yaml.representer.RoundTripRepresenter.represent_dict)
+
+# Without this, ruamel.yaml will output "!!omap" types, which is
+# technically correct but unnecessarily verbose for our uses
+yaml.representer.RoundTripRepresenter.add_representer(
+    OrderedDict, yaml.representer.RoundTripRepresenter.represent_dict)
+
+
+class JsonAsRawEncoder(json.JSONEncoder):
+    """
+    A :class:`JSONEncoder` that will use the :code:`as_raw` property of objects
+    if available.
+    """
+    def raw_encoder_default(self, obj):
+        try:
+            return iter(obj)
+        except TypeError:
+            if hasattr(obj, 'as_raw'):
+                return as_raw(obj)
+            return str(obj)
+        return super(JsonAsRawEncoder, self).default(obj)
+
+    def __init__(self, *args, **kwargs):
+        kwargs['default'] = self.raw_encoder_default
+        super(JsonAsRawEncoder, self).__init__(*args, **kwargs)
+
+
+class YamlAsRawDumper(yaml.dumper.RoundTripDumper):  # pylint: 
disable=too-many-ancestors
+    """
+    A :class:`RoundTripDumper` that will use the :code:`as_raw` property of 
objects
+    if available.
+    """
+
+    def represent_data(self, data):
+        if hasattr(data, 'as_raw'):
+            data = as_raw(data)
+        return super(YamlAsRawDumper, self).represent_data(data)
+
+
+def full_type_name(value):
+    """
+    The full class name of a type or object.
+    """
+
+    if not isinstance(value, type):
+        value = value.__class__
+    module = str(value.__module__)
+    name = str(value.__name__)
+    return name if module == '__builtin__' else '%s.%s' % (module, name)
+
+
+def safe_str(value):
+    """
+    Like :code:`str` coercion, but makes sure that Unicode strings are properly
+    encoded, and will never return None.
+    """
+
+    try:
+        return str(value)
+    except UnicodeEncodeError:
+        return unicode(value).encode('utf8')
+
+
+def safe_repr(value):
+    """
+    Like :code:`repr`, but calls :code:`as_raw` and :code:`as_agnostic` first.
+    """
+
+    return repr(as_agnostic(as_raw(value)))
+
+
+def string_list_as_string(strings):
+    """
+    Nice representation of a list of strings.
+    """
+
+    return ', '.join('"%s"' % safe_str(v) for v in strings)
+
+
+def as_raw(value):
+    """
+    Converts values using their :code:`as_raw` property, if it exists, 
recursively.
+    """
+
+    if hasattr(value, 'as_raw'):
+        value = value.as_raw
+        if isinstance(value, MethodType):
+            # Old-style Python classes don't support properties
+            value = value()
+    elif isinstance(value, list):
+        value = list(value)
+        for i, _ in enumerate(value):
+            value[i] = as_raw(value[i])
+    elif isinstance(value, dict):
+        value = dict(value)
+        for k, v in value.iteritems():
+            value[k] = as_raw(v)
+    return value
+
+
+def as_raw_list(value):
+    """
+    Assuming value is a list, converts its values using :code:`as_raw`.
+    """
+
+    if value is None:
+        return []
+    if isinstance(value, dict):
+        value = value.itervalues()
+    return [as_raw(v) for v in value]
+
+
+def as_raw_dict(value):
+    """
+    Assuming value is a dict, converts its values using :code:`as_raw`.
+    The keys are left as is.
+    """
+
+    if value is None:
+        return OrderedDict()
+    return OrderedDict((
+        (k, as_raw(v)) for k, v in value.iteritems()))
+
+
+def as_agnostic(value):
+    """
+    Converts subclasses of list and dict to standard lists and dicts, and 
Unicode strings
+    to non-Unicode if possible, recursively.
+
+    Useful for creating human-readable output of structures.
+    """
+
+    if isinstance(value, unicode):
+        try:
+            value = str(value)
+        except UnicodeEncodeError:
+            pass
+    elif isinstance(value, list):
+        value = list(value)
+    elif isinstance(value, dict):
+        value = dict(value)
+
+    if isinstance(value, list):
+        for i, _ in enumerate(value):
+            value[i] = as_agnostic(value[i])
+    elif isinstance(value, dict):
+        for k, v in value.iteritems():
+            value[k] = as_agnostic(v)
+
+    return value
+
+
+def json_dumps(value, indent=2):
+    """
+    JSON dumps that supports Unicode and the :code:`as_raw` property of objects
+    if available.
+    """
+
+    return json.dumps(value, indent=indent, ensure_ascii=False, 
cls=JsonAsRawEncoder)
+
+
+def yaml_dumps(value, indent=2):
+    """
+    YAML dumps that supports Unicode and the :code:`as_raw` property of objects
+    if available.
+    """
+
+    return yaml.dump(value, indent=indent, allow_unicode=True, 
Dumper=YamlAsRawDumper)
+
+
+def yaml_loads(value):
+    return yaml.load(value, Loader=yaml.SafeLoader)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/imports.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/imports.py b/aria/parser/utils/imports.py
new file mode 100644
index 0000000..8f97156
--- /dev/null
+++ b/aria/parser/utils/imports.py
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def import_fullname(name, paths=None):
+    """
+    Imports a variable or class based on a full name, optionally searching for 
it in the paths.
+    """
+    paths = paths or []
+    if name is None:
+        return None
+
+    def do_import(name):
+        if name and ('.' in name):
+            module_name, name = name.rsplit('.', 1)
+            return getattr(__import__(module_name, fromlist=[name], level=0), 
name)
+        else:
+            raise ImportError('import not found: %s' % name)
+
+    try:
+        return do_import(name)
+    except ImportError:
+        for path in paths:
+            try:
+                return do_import('%s.%s' % (path, name))
+            except Exception as e:
+                raise ImportError('cannot import %s, because %s' % (name, e))
+
+    raise ImportError('import not found: %s' % name)
+
+def import_modules(name):
+    """
+    Imports a module and all its sub-modules, recursively.
+    Relies on modules defining a 'MODULES' attribute listing their sub-module 
names.
+    """
+
+    module = __import__(name, fromlist=['MODULES'], level=0)
+    if hasattr(module, 'MODULES'):
+        for module_ in module.MODULES:
+            import_modules('%s.%s' % (name, module_))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/openclose.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/openclose.py b/aria/parser/utils/openclose.py
new file mode 100644
index 0000000..19740eb
--- /dev/null
+++ b/aria/parser/utils/openclose.py
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class OpenClose(object):
+    """
+    Wraps an object that has open() and close() methods to support the "with" 
keyword.
+    """
+
+    def __init__(self, wrapped):
+        self.wrapped = wrapped
+
+    def __enter__(self):
+        if hasattr(self.wrapped, 'open'):
+            self.wrapped.open()
+        return self.wrapped
+
+    def __exit__(self, the_type, value, traceback):
+        if hasattr(self.wrapped, 'close'):
+            self.wrapped.close()
+        return False

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/rest_client.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/rest_client.py b/aria/parser/utils/rest_client.py
new file mode 100644
index 0000000..905e372
--- /dev/null
+++ b/aria/parser/utils/rest_client.py
@@ -0,0 +1,59 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import urllib2
+
+def call_rest(url, payload=None, with_payload_method='PUT'):
+    """
+    REST call with JSON decoding of the response and JSON payloads.
+    """
+
+    if payload:
+        if not isinstance(payload, basestring):
+            payload = json.dumps(payload)
+        # PUT or POST
+        response = urllib2.urlopen(MethodRequest(
+            url,
+            payload,
+            {'Content-Type': 'application/json'}, method=with_payload_method))
+    else:
+        # GET
+        response = urllib2.urlopen(url)
+    response = response.read().decode()
+    return json.loads(response)
+
+#
+# Utils
+#
+
+class MethodRequest(urllib2.Request):
+    """
+    Workaround to support all HTTP methods.
+
+    From `here <https://gist.github.com/logic/2715756>`__.
+    """
+
+    def __init__(self, *args, **kwargs):
+        if 'method' in kwargs:
+            self._method = kwargs['method']
+            del kwargs['method']
+        else:
+            self._method = None
+        urllib2.Request.__init__(self, *args, **kwargs)
+
+    def get_method(self, *args, **kwargs):
+        return self._method if self._method is not None else 
urllib2.Request.get_method(
+            self, *args, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/rest_server.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/rest_server.py b/aria/parser/utils/rest_server.py
new file mode 100644
index 0000000..9e842e7
--- /dev/null
+++ b/aria/parser/utils/rest_server.py
@@ -0,0 +1,250 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 
'collections'
+
+import os
+import re
+import shutil
+import json
+import sys
+import BaseHTTPServer
+from collections import OrderedDict
+
+from ..utils import (puts, Colored)
+
+class RestServer(object):
+    """
+    Straightforward REST server.
+
+    Supports custom handling of all HTTP verbs, with special (optional) 
support for JSON, as well
+    as serving straightforward static files via GET.
+
+    Properties:
+
+    * :code:`configuration`: An optional configuration object
+    * :code:`port`: HTTP server port
+    * :code:`routes`: :class:`OrderedDict` of routes (see below)
+    * :code:`static_root`: Root directory for static files
+    * :code:`json_encoder`: :class:`JSONEncoder` for responses
+    * :code:`json_decoder`: :class:`JSONDecoder` for requests
+    * :code:`unicode`: True to support Unicode
+
+    The route keys are regular expressions for matching the path. They are 
checked in order, which
+    is why it's important to use :class:`OrderedDict`.
+
+    The route values are dicts with the following optional fields:
+
+    * :code:`GET`: Function to handle GET for this route
+    * :code:`PUT`: Function to handle PUT for this route
+    * :code:`POST`: Function to handle POST for this route
+    * :code:`DELETE`: Function to handle DELETE for this route
+    * :code:`file`: Attach a static file to this route; it is the path to
+            the file to return relative to :code:`static_root` (if 
:code:`file` is
+            set then :code:`GET`/:code:`PUT`/:code:`POST`/:code:`DELETE` are 
ignored)
+    * :code:`media_type`: Media type to set for responses to this
+            route (except error message, which will be in "text/plan")
+
+    The :code:`GET`/:code:`PUT`/:code:`POST`/:code:`DELETE` handler functions 
all receive a single
+    argument: an instance of :class:`RestRequestHandler`.
+
+    If you return None, then a 404 error will be generated. Otherwise, it will 
be a 200 response
+    with the return value will be written to it. If the :code:`media_type` for 
the route was set to
+    "application/json", then the return value will first be encoded into JSON 
using the configured
+    :code:`json_encoder`.
+
+    If you want to write the response yourself, set :code:`handled=True` on the
+    :class:`RestRequestHandler`, which will cause the return value to be 
ignored (you won't have to
+    return anything). If all you want to do is send an error message, then use
+    :code:`send_plain_text_response`.
+
+    If you raise an (uncaught) exception, then a 500 error will be generated 
with the exception
+    message.
+
+    To get the payload (for :code:`PUT`/:code:`POST`) use :code:`payload` on 
the
+    :class:`RestRequestHandler` for plain text, or :code:`json_payload` to use 
the configured
+    :code:`json_decoder`. Note that it's up to you to check for JSON decoding 
exceptions and return
+    an appropriate 400 error message.
+    """
+
+    def __init__(self):
+        self.configuration = None
+        self.port = 8080
+        self.routes = OrderedDict()
+        self.static_root = '.'
+        self.json_encoder = json.JSONEncoder(ensure_ascii=False, 
separators=(',', ':'))
+        self.json_decoder = json.JSONDecoder(object_pairs_hook=OrderedDict)
+        self.unicode = True
+
+    def start(self, daemon=False):
+        """
+        Starts the REST server.
+        """
+
+        if self.unicode:
+            # Fixes issues with decoding HTTP responses
+            # (Not such a great solution! But there doesn't seem to be a 
better way)
+            reload(sys)
+            sys.setdefaultencoding('utf8')  # @UndefinedVariable
+
+        http_server = BaseHTTPServer.HTTPServer(('', self.port), 
rest_request_handler(self))
+        if daemon:
+            print 'Running HTTP server daemon at port %d' % self.port
+        else:
+            puts(Colored.red('Running HTTP server at port %d, use CTRL-C to 
exit' % self.port))
+        try:
+            http_server.serve_forever()
+        except KeyboardInterrupt:
+            pass
+        puts(Colored.red('Stopping HTTP server'))
+        http_server.server_close()
+
+class RestRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+    """
+    Handler for :class:`RestServer`.
+    """
+
+    def __init__(self, rest_server, *args, **kwargs):
+        self.rest_server = rest_server
+        self.handled = False
+        self.matched_re = None
+        self.matched_route = None
+        # Old-style Python classes don't support super
+        BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
+
+    @property
+    def content_length(self):
+        return int(self.headers.getheader('content-length', 0))
+
+    @property
+    def payload(self):
+        return self.rfile.read(self.content_length)
+
+    @property
+    def json_payload(self):
+        return self.rest_server.json_decoder.decode(self.payload)
+
+    def match_route(self):
+        for path_re, route in self.rest_server.routes.iteritems():
+            if re.match(path_re, self.path):
+                return path_re, route
+        return None, None
+
+    def send_plain_text_response(self, status, content):
+        self.send_response(status)
+        self.send_header('Content-type', 'text/plain')
+        self.end_headers()
+        self.wfile.write(content)
+        self.handled = True
+
+    def send_content_type(self, route=None):
+        if route is None:
+            _, route = self.match_route()
+        media_type = route.get('media_type')
+        if media_type is not None:
+            self.send_header('Content-type', media_type)
+        return media_type
+
+    def _handle_file(self, method):
+        if method != 'GET':
+            self.send_plain_text_response(405, '%s is not supported\n' % 
method)
+            return
+
+        try:
+            matched_route_file = open(os.path.join(
+                self.rest_server.static_root,
+                self.matched_route['file']))
+            try:
+                self.send_response(200)
+                self.send_content_type(self.matched_route)
+                self.end_headers()
+                shutil.copyfileobj(matched_route_file, self.wfile)
+            finally:
+                matched_route_file.close()
+        except IOError:
+            self.send_plain_text_response(404, 'Not found\n')
+        return
+
+    def handle_method(self, method):
+        # pylint: disable=too-many-return-statements
+        self.matched_re, self.matched_route = self.match_route()
+
+        if self.matched_route is None:
+            self.send_plain_text_response(404, 'Not found\n')
+            return
+
+        if method == 'HEAD':
+            self.send_response(200)
+            self.send_content_type(self.matched_route)
+            self.end_headers()
+            return
+
+        if 'file' in self.matched_route:
+            self._handle_file(method)
+            return
+
+        if method not in self.matched_route:
+            self.send_plain_text_response(405, '%s is not supported\n' % 
method)
+            return
+
+        try:
+            content = self.matched_route[method](self)
+        except Exception as e:
+            self.send_plain_text_response(500, 'Internal error: %s\n' % e)
+            return
+
+        if self.handled:
+            return
+
+        if content is None:
+            self.send_plain_text_response(404, 'Not found\n')
+            return
+
+        self.send_response(200)
+        media_type = self.send_content_type(self.matched_route)
+        self.end_headers()
+
+        if method == 'DELETE':
+            # No content for DELETE
+            return
+
+        if media_type == 'application/json':
+            self.wfile.write(self.rest_server.json_encoder.encode(content))
+        else:
+            self.wfile.write(content)
+
+    # BaseHTTPRequestHandler
+    # pylint: disable=invalid-name
+    def do_HEAD(self):
+        self.handle_method('HEAD')
+
+    def do_GET(self):
+        self.handle_method('GET')
+
+    def do_POST(self):
+        self.handle_method('POST')
+
+    def do_PUT(self):
+        self.handle_method('PUT')
+
+    def do_DELETE(self):
+        self.handle_method('DELETE')
+
+#
+# Utils
+#
+
+def rest_request_handler(rest_server):
+    return lambda *args, **kwargs: RestRequestHandler(rest_server, *args, 
**kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/threading.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/threading.py b/aria/parser/utils/threading.py
new file mode 100644
index 0000000..575d011
--- /dev/null
+++ b/aria/parser/utils/threading.py
@@ -0,0 +1,252 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import absolute_import  # so we can import standard 'threading'
+
+import itertools
+import multiprocessing
+from threading import (Thread, Lock)
+from Queue import (Queue, Full, Empty)
+
+from .exceptions import print_exception
+
+class ExecutorException(Exception):
+    pass
+
+class DaemonThread(Thread):
+    def __init__(self, *args, **kwargs):
+        super(DaemonThread, self).__init__(*args, **kwargs)
+        self.daemon = True
+
+    def run(self):
+        """
+        We're overriding `Thread.run` in order to avoid annoying (but 
harmless) error
+        messages during shutdown. The problem is that CPython nullifies the
+        global state _before_ shutting down daemon threads, so that exceptions
+        might happen, and then `Thread.__bootstrap_inner` prints them out.
+
+        Our solution is to swallow these exceptions here.
+
+        The side effect is that uncaught exceptions in our own thread code 
will _not_
+        be printed out as usual, so it's our responsibility to catch them in 
our
+        code.
+        """
+
+        try:
+            super(DaemonThread, self).run()
+        except SystemExit as e:
+            # This exception should be bubbled up
+            raise e
+        except BaseException:
+            # Exceptions might occur in daemon threads during interpreter 
shutdown
+            pass
+
+# https://gist.github.com/tliron/81dd915166b0bfc64be08b4f8e22c835
+class FixedThreadPoolExecutor(object):
+    """
+    Executes tasks in a fixed thread pool.
+
+    Makes sure to gather all returned results and thrown exceptions in one 
place, in order of task
+    submission.
+
+    Example::
+
+        def sum(arg1, arg2):
+            return arg1 + arg2
+
+        executor = FixedThreadPoolExecutor(10)
+        try:
+            for value in range(100):
+                executor.submit(sum, value, value)
+            executor.drain()
+        except:
+            executor.close()
+        executor.raise_first()
+        print executor.returns
+
+    You can also use it with the Python "with" keyword, in which case you 
don't need to call "close"
+    explicitly::
+
+        with FixedThreadPoolExecutor(10) as executor:
+            for value in range(100):
+                executor.submit(sum, value, value)
+            executor.drain()
+            executor.raise_first()
+            print executor.returns
+    """
+
+    _CYANIDE = object()  # Special task marker used to kill worker threads.
+
+    def __init__(self,
+                 size=multiprocessing.cpu_count() * 2 + 1,
+                 timeout=None,
+                 print_exceptions=False):
+        """
+        :param size: Number of threads in the pool (fixed).
+        :param timeout: Timeout in seconds for all
+               blocking operations. (Defaults to none, meaning no timeout)
+        :param print_exceptions: Set to true in order to
+               print exceptions from tasks. (Defaults to false)
+        """
+
+        self.size = size
+        self.timeout = timeout
+        self.print_exceptions = print_exceptions
+
+        self._tasks = Queue()
+        self._returns = {}
+        self._exceptions = {}
+        self._id_creator = itertools.count()
+        self._lock = Lock() # for console output
+
+        self._workers = []
+        for index in range(size):
+            worker = DaemonThread(
+                name='%s%d' % (self.__class__.__name__, index),
+                target=self._thread_worker)
+            worker.start()
+            self._workers.append(worker)
+
+    def submit(self, func, *args, **kwargs):
+        """
+        Submit a task for execution.
+
+        The task will be called ASAP on the next available worker thread in 
the pool.
+
+        Will raise an :class:`ExecutorException` exception if cannot be 
submitted.
+        """
+
+        try:
+            self._tasks.put((self._id_creator.next(), func, args, kwargs), 
timeout=self.timeout)
+        except Full:
+            raise ExecutorException('cannot submit task: queue is full')
+
+    def close(self):
+        """
+        Blocks until all current tasks finish execution and all worker threads 
are dead.
+
+        You cannot submit tasks anymore after calling this.
+
+        This is called automatically upon exit if you are using the "with" 
keyword.
+        """
+
+        self.drain()
+        while self.is_alive:
+            try:
+                self._tasks.put(self._CYANIDE, timeout=self.timeout)
+            except Full:
+                raise ExecutorException('cannot close executor: a thread seems 
to be hanging')
+        self._workers = None
+
+    def drain(self):
+        """
+        Blocks until all current tasks finish execution, but leaves the worker 
threads alive.
+        """
+
+        self._tasks.join()  # oddly, the API does not support a timeout 
parameter
+
+    @property
+    def is_alive(self):
+        """
+        True if any of the worker threads are alive.
+        """
+
+        for worker in self._workers:
+            if worker.is_alive():
+                return True
+        return False
+
+    @property
+    def returns(self):
+        """
+        The returned values from all tasks, in order of submission.
+        """
+
+        return [self._returns[k] for k in sorted(self._returns)]
+
+    @property
+    def exceptions(self):
+        """
+        The raised exceptions from all tasks, in order of submission.
+        """
+
+        return [self._exceptions[k] for k in sorted(self._exceptions)]
+
+    def raise_first(self):
+        """
+        If exceptions were thrown by any task, then the first one will be 
raised.
+
+        This is rather arbitrary: proper handling would involve iterating all 
the
+        exceptions. However, if you want to use the "raise" mechanism, you are
+        limited to raising only one of them.
+        """
+
+        exceptions = self.exceptions
+        if exceptions:
+            raise exceptions[0]
+
+    def _thread_worker(self):
+        while True:
+            if not self._execute_next_task():
+                break
+
+    def _execute_next_task(self):
+        try:
+            task = self._tasks.get(timeout=self.timeout)
+        except Empty:
+            # Happens if timeout is reached
+            return True
+        if task == self._CYANIDE:
+            # Time to die :(
+            return False
+        self._execute_task(*task)
+        return True
+
+    def _execute_task(self, task_id, func, args, kwargs):
+        try:
+            result = func(*args, **kwargs)
+            self._returns[task_id] = result
+        except Exception as e:
+            self._exceptions[task_id] = e
+            if self.print_exceptions:
+                with self._lock:
+                    print_exception(e)
+        self._tasks.task_done()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, the_type, value, traceback):
+        self.close()
+        return False
+
+class LockedList(list):
+    """
+    A list that supports the "with" keyword with a built-in lock.
+
+    Though Python lists are thread-safe in that they will not raise exceptions
+    during concurrent access, they do not guarantee atomicity. This class will
+    let you gain atomicity when needed.
+    """
+
+    def __init__(self, *args, **kwargs):
+        super(LockedList, self).__init__(*args, **kwargs)
+        self.lock = Lock()
+
+    def __enter__(self):
+        return self.lock.__enter__()
+
+    def __exit__(self, the_type, value, traceback):
+        return self.lock.__exit__(the_type, value, traceback)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/utils/uris.py
----------------------------------------------------------------------
diff --git a/aria/parser/utils/uris.py b/aria/parser/utils/uris.py
new file mode 100644
index 0000000..1686517
--- /dev/null
+++ b/aria/parser/utils/uris.py
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import urlparse
+
+def as_file(uri):
+    """
+    If the URI is a file (either the :code:`file` scheme or no scheme), then 
returns the absolute
+    path. Otherwise, returns None.
+    """
+
+    url = urlparse.urlparse(uri)
+    if (not url.scheme) or (url.scheme == 'file'):
+        return os.path.abspath(url.path)
+    return None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/validation/__init__.py
----------------------------------------------------------------------
diff --git a/aria/parser/validation/__init__.py 
b/aria/parser/validation/__init__.py
new file mode 100644
index 0000000..fead43b
--- /dev/null
+++ b/aria/parser/validation/__init__.py
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .issue import Issue
+from .context import ValidationContext
+
+__all__ = (
+    'ValidationContext',
+    'Issue')

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6a4dc43f/aria/parser/validation/context.py
----------------------------------------------------------------------
diff --git a/aria/parser/validation/context.py 
b/aria/parser/validation/context.py
new file mode 100644
index 0000000..e0355e3
--- /dev/null
+++ b/aria/parser/validation/context.py
@@ -0,0 +1,79 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .issue import Issue
+from ..utils import (LockedList, FrozenList, print_exception, puts, Colored, 
indent, as_raw)
+
+class ValidationContext(object):
+    """
+    Properties:
+
+    * :code:`allow_unknown_fields`: When False (the default) will report an 
issue
+            if an unknown field is used
+    * :code:`allow_primitive_coersion`: When False (the default) will not 
attempt to
+            coerce primitive field types
+    * :code:`max_level`: Maximum validation level to report (default is all)
+    """
+
+    def __init__(self):
+        self.allow_unknown_fields = False
+        self.allow_primitive_coersion = False
+        self.max_level = Issue.ALL
+
+        self._issues = LockedList()
+
+    def report(self, message=None, exception=None, location=None, line=None,
+               column=None, locator=None, snippet=None, level=Issue.PLATFORM, 
issue=None):
+        if issue is None:
+            issue = Issue(message, exception, location, line, column, locator, 
snippet, level)
+
+        # Avoid duplicate issues
+        with self._issues:
+            for i in self._issues:
+                if str(i) == str(issue):
+                    return
+
+            self._issues.append(issue)
+
+    @property
+    def has_issues(self):
+        return len(self._issues) > 0
+
+    @property
+    def issues(self):
+        issues = [i for i in self._issues if i.level <= self.max_level]
+        issues.sort(key=lambda i: (i.level, i.location, i.line, i.column, 
i.message))
+        return FrozenList(issues)
+
+    @property
+    def issues_as_raw(self):
+        return [as_raw(i) for i in self.issues]
+
+    def dump_issues(self):
+        issues = self.issues
+        if issues:
+            puts(Colored.blue('Validation issues:', bold=True))
+            with indent(2):
+                for issue in issues:
+                    puts(Colored.blue(issue.heading_as_str))
+                    details = issue.details_as_str
+                    if details:
+                        with indent(3):
+                            puts(details)
+                    if issue.exception is not None:
+                        with indent(3):
+                            print_exception(issue.exception)
+            return True
+        return False

Reply via email to