Generifying ARIA models

Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/6e1f1260
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/6e1f1260
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/6e1f1260

Branch: refs/heads/ARIA-39-Genericize-storage-models
Commit: 6e1f1260f2e1ed10c4f44e2f4dd3dd165e58c9ba
Parents: 5cf84ee
Author: mxmrlv <mxm...@gmail.com>
Authored: Mon Dec 12 00:50:09 2016 +0200
Committer: mxmrlv <mxm...@gmail.com>
Committed: Tue Dec 20 16:59:50 2016 +0200

----------------------------------------------------------------------
 aria/__init__.py                                |  25 +-
 aria/orchestrator/context/workflow.py           |  10 +-
 aria/orchestrator/workflows/api/task.py         |  10 +-
 aria/orchestrator/workflows/builtin/heal.py     |  11 +-
 aria/orchestrator/workflows/core/engine.py      |  17 +-
 aria/orchestrator/workflows/core/task.py        |  23 +-
 aria/storage/__init__.py                        |  12 +-
 aria/storage/base_model.py                      | 677 +++++++++++++++++++
 aria/storage/core.py                            |   2 +-
 aria/storage/model.py                           | 110 +++
 aria/storage/models.py                          | 575 ----------------
 aria/storage/structure.py                       | 180 +++++
 aria/storage/structures.py                      | 244 -------
 aria/storage/type.py                            | 123 ++++
 tests/mock/models.py                            |  49 +-
 tests/orchestrator/context/test_toolbelt.py     |   8 +-
 .../orchestrator/workflows/builtin/test_heal.py |   4 +-
 .../orchestrator/workflows/core/test_engine.py  |  10 +-
 .../workflows/executor/test_executor.py         |   7 +-
 .../workflows/executor/test_process_executor.py |   6 +-
 tests/storage/__init__.py                       |   7 +-
 tests/storage/test_model_storage.py             |  64 +-
 tests/storage/test_models.py                    | 195 +++---
 23 files changed, 1306 insertions(+), 1063 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 0f7bec6..a39d58b 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -64,22 +64,21 @@ def application_model_storage(api, api_kwargs=None):
     Initiate model storage for the supplied storage driver
     """
     models = [
-        storage.models.Plugin,
-        storage.models.ProviderContext,
+        storage.model.Plugin,
 
-        storage.models.Blueprint,
-        storage.models.Deployment,
-        storage.models.DeploymentUpdate,
-        storage.models.DeploymentUpdateStep,
-        storage.models.DeploymentModification,
+        storage.model.Blueprint,
+        storage.model.Deployment,
+        storage.model.DeploymentUpdate,
+        storage.model.DeploymentUpdateStep,
+        storage.model.DeploymentModification,
 
-        storage.models.Node,
-        storage.models.NodeInstance,
-        storage.models.Relationship,
-        storage.models.RelationshipInstance,
+        storage.model.Node,
+        storage.model.NodeInstance,
+        storage.model.Relationship,
+        storage.model.RelationshipInstance,
 
-        storage.models.Execution,
-        storage.models.Task,
+        storage.model.Execution,
+        storage.model.Task,
     ]
     # if api not in _model_storage:
     return storage.ModelStorage(api, items=models, api_kwargs=api_kwargs or {})

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py 
b/aria/orchestrator/context/workflow.py
index e2e8e25..7f53a9c 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -46,8 +46,7 @@ class WorkflowContext(BaseContext):
         execution_cls = self.model.execution.model_cls
         now = datetime.utcnow()
         execution = self.model.execution.model_cls(
-            blueprint_id=self.blueprint.id,
-            deployment_id=self.deployment.id,
+            deployment=self.deployment,
             workflow_name=self._workflow_name,
             created_at=now,
             status=execution_cls.PENDING,
@@ -75,9 +74,11 @@ class WorkflowContext(BaseContext):
         """
         Iterator over nodes
         """
+        key = 
'deployment_{0}'.format(self.model.node_instance.model_cls.name_column_name())
+
         return self.model.node.iter(
             filters={
-                'deployment_id': self.deployment.id
+                key: getattr(self.deployment, 
self.deployment.name_column_name())
             }
         )
 
@@ -86,9 +87,10 @@ class WorkflowContext(BaseContext):
         """
         Iterator over node instances
         """
+        key = 
'deployment_{0}'.format(self.model.node_instance.model_cls.name_column_name())
         return self.model.node_instance.iter(
             filters={
-                'deployment_id': self.deployment.id
+                key: getattr(self.deployment, 
self.deployment.name_column_name())
             }
         )
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py 
b/aria/orchestrator/workflows/api/task.py
index 4f025b6..70324a6 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -18,7 +18,7 @@ Provides the tasks to be entered into the task graph
 """
 from uuid import uuid4
 
-from aria.storage import models
+from aria.storage import model
 
 from ... import context
 from .. import exceptions
@@ -75,8 +75,8 @@ class OperationTask(BaseTask):
         :param actor: the operation host on which this operation is registered.
         :param inputs: operation inputs.
         """
-        assert isinstance(actor, (models.NodeInstance,
-                                  models.RelationshipInstance))
+        assert isinstance(actor, (model.NodeInstance,
+                                  model.RelationshipInstance))
         super(OperationTask, self).__init__()
         self.actor = actor
         self.name = '{name}.{actor.id}'.format(name=name, actor=actor)
@@ -98,7 +98,7 @@ class OperationTask(BaseTask):
         :param instance: the node of which this operation belongs to.
         :param name: the name of the operation.
         """
-        assert isinstance(instance, models.NodeInstance)
+        assert isinstance(instance, model.NodeInstance)
         return cls._instance(instance=instance,
                              name=name,
                              operation_details=instance.node.operations[name],
@@ -118,7 +118,7 @@ class OperationTask(BaseTask):
         with 'source_operations' and 'target_operations'
         :param inputs any additional inputs to the operation
         """
-        assert isinstance(instance, models.RelationshipInstance)
+        assert isinstance(instance, model.RelationshipInstance)
         if operation_end not in [cls.TARGET_OPERATION, cls.SOURCE_OPERATION]:
             raise exceptions.TaskException('The operation end should be {0} or 
{1}'.format(
                 cls.TARGET_OPERATION, cls.SOURCE_OPERATION

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/orchestrator/workflows/builtin/heal.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/heal.py 
b/aria/orchestrator/workflows/builtin/heal.py
index de07095..406a42a 100644
--- a/aria/orchestrator/workflows/builtin/heal.py
+++ b/aria/orchestrator/workflows/builtin/heal.py
@@ -34,7 +34,7 @@ def heal(ctx, graph, node_instance_id):
     :return:
     """
     failing_node = ctx.model.node_instance.get(node_instance_id)
-    host_node = ctx.model.node_instance.get(failing_node.host_id)
+    host_node = ctx.model.node_instance.get(failing_node.host.id)
     failed_node_instance_subgraph = _get_contained_subgraph(ctx, host_node)
     failed_node_instance_ids = list(n.id for n in 
failed_node_instance_subgraph)
 
@@ -163,10 +163,11 @@ def heal_install(ctx, graph, failing_node_instances, 
targeted_node_instances):
 
 
 def _get_contained_subgraph(context, host_node_instance):
-    contained_instances = [node_instance
-                           for node_instance in context.node_instances
-                           if node_instance.host_id == host_node_instance.id 
and
-                           node_instance.id != node_instance.host_id]
+    contained_instances = [
+        node_instance
+        for node_instance in context.node_instances
+        if node_instance == host_node_instance and node_instance != 
node_instance
+        ]
     result = [host_node_instance]
 
     if not contained_instances:

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/orchestrator/workflows/core/engine.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/engine.py 
b/aria/orchestrator/workflows/core/engine.py
index 7886b7a..47269a3 100644
--- a/aria/orchestrator/workflows/core/engine.py
+++ b/aria/orchestrator/workflows/core/engine.py
@@ -23,7 +23,7 @@ from datetime import datetime
 import networkx
 
 from aria import logger
-from aria.storage import models
+from aria.storage import model
 from aria.orchestrator import events
 
 from .. import exceptions
@@ -82,18 +82,18 @@ class Engine(logger.LoggerMixin):
         events.on_cancelling_workflow_signal.send(self._workflow_context)
 
     def _is_cancel(self):
-        return self._workflow_context.execution.status in 
[models.Execution.CANCELLING,
-                                                           
models.Execution.CANCELLED]
+        return self._workflow_context.execution.status in 
[model.Execution.CANCELLING,
+                                                           
model.Execution.CANCELLED]
 
     def _executable_tasks(self):
         now = datetime.utcnow()
         return (task for task in self._tasks_iter()
-                if task.status in models.Task.WAIT_STATES and
+                if task.status in model.Task.WAIT_STATES and
                 task.due_at <= now and
                 not self._task_has_dependencies(task))
 
     def _ended_tasks(self):
-        return (task for task in self._tasks_iter() if task.status in 
models.Task.END_STATES)
+        return (task for task in self._tasks_iter() if task.status in 
model.Task.END_STATES)
 
     def _task_has_dependencies(self, task):
         return len(self._execution_graph.pred.get(task.id, {})) > 0
@@ -105,18 +105,19 @@ class Engine(logger.LoggerMixin):
         for _, data in self._execution_graph.nodes_iter(data=True):
             task = data['task']
             if isinstance(task, engine_task.OperationTask):
-                self._workflow_context.model.task.refresh(task.model_task)
+                if task.model_task.status not in model.Task.END_STATES:
+                    self._workflow_context.model.task.refresh(task.model_task)
             yield task
 
     def _handle_executable_task(self, task):
         if isinstance(task, engine_task.StubTask):
-            task.status = models.Task.SUCCESS
+            task.status = model.Task.SUCCESS
         else:
             events.sent_task_signal.send(task)
             self._executor.execute(task)
 
     def _handle_ended_tasks(self, task):
-        if task.status == models.Task.FAILED and not task.ignore_failure:
+        if task.status == model.Task.FAILED and not task.ignore_failure:
             raise exceptions.ExecutorException('Workflow failed')
         else:
             self._execution_graph.remove_node(task.id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py 
b/aria/orchestrator/workflows/core/task.py
index 08cf26e..08eebf0 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -24,7 +24,7 @@ from functools import (
 )
 
 from aria import logger
-from aria.storage import models
+from aria.storage import model
 from aria.orchestrator.context import operation as operation_context
 
 from .. import exceptions
@@ -66,7 +66,7 @@ class StubTask(BaseTask):
 
     def __init__(self, *args, **kwargs):
         super(StubTask, self).__init__(*args, **kwargs)
-        self.status = models.Task.PENDING
+        self.status = model.Task.PENDING
         self.due_at = datetime.utcnow()
 
 
@@ -106,35 +106,36 @@ class OperationTask(BaseTask):
     def __init__(self, api_task, *args, **kwargs):
         super(OperationTask, self).__init__(id=api_task.id, **kwargs)
         self._workflow_context = api_task._workflow_context
-        model = api_task._workflow_context.model
+        model_storage = api_task._workflow_context.model
 
-        base_task_model = model.task.model_cls
-        if isinstance(api_task.actor, models.NodeInstance):
+        base_task_model = model_storage.task.model_cls
+        if isinstance(api_task.actor, model.NodeInstance):
             context_class = operation_context.NodeOperationContext
             task_model_cls = base_task_model.as_node_instance
-        elif isinstance(api_task.actor, models.RelationshipInstance):
+        elif isinstance(api_task.actor, model.RelationshipInstance):
             context_class = operation_context.RelationshipOperationContext
             task_model_cls = base_task_model.as_relationship_instance
         else:
             raise RuntimeError('No operation context could be created for 
{actor.model_cls}'
                                .format(actor=api_task.actor))
         plugin = api_task.plugin
-        plugins = model.plugin.list(filters={'package_name': 
plugin.get('package_name', ''),
-                                             'package_version': 
plugin.get('package_version', '')},
-                                    include=['id'])
+        plugins = model_storage.plugin.list(filters={
+            'package_name': plugin.get('package_name', ''),
+            'package_version': plugin.get('package_version', '')
+        }, include=['id'])
         # Validation during installation ensures that at most one plugin can 
exists with provided
         # package_name and package_version
         plugin_id = plugins[0].id if plugins else None
         operation_task = task_model_cls(
             name=api_task.name,
             operation_mapping=api_task.operation_mapping,
-            instance_id=api_task.actor.id,
+            instance_fk=api_task.actor.id,
             inputs=api_task.inputs,
             status=base_task_model.PENDING,
             max_attempts=api_task.max_attempts,
             retry_interval=api_task.retry_interval,
             ignore_failure=api_task.ignore_failure,
-            plugin_id=plugin_id
+            plugin_fk=plugin_id
         )
         self._workflow_context.model.task.put(operation_task)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/__init__.py
----------------------------------------------------------------------
diff --git a/aria/storage/__init__.py b/aria/storage/__init__.py
index fd69d47..a1c07d7 100644
--- a/aria/storage/__init__.py
+++ b/aria/storage/__init__.py
@@ -45,19 +45,19 @@ from .core import (
 from . import (
     exceptions,
     api,
-    structures,
+    structure,
     core,
     filesystem_rapi,
     sql_mapi,
-    models
+    model
 )
 
 __all__ = (
     'exceptions',
-    'structures',
-    # 'Storage',
-    # 'ModelStorage',
-    # 'ResourceStorage',
+    'structure',
+    'Storage',
+    'ModelStorage',
+    'ResourceStorage',
     'filesystem_rapi',
     'sql_mapi',
     'api'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/base_model.py
----------------------------------------------------------------------
diff --git a/aria/storage/base_model.py b/aria/storage/base_model.py
new file mode 100644
index 0000000..d5adb4e
--- /dev/null
+++ b/aria/storage/base_model.py
@@ -0,0 +1,677 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Aria's storage.models module
+Path: aria.storage.models
+
+models module holds aria's models.
+
+classes:
+    * Field - represents a single field.
+    * IterField - represents an iterable field.
+    * Model - abstract model implementation.
+    * Snapshot - snapshots implementation model.
+    * Deployment - deployment implementation model.
+    * DeploymentUpdateStep - deployment update step implementation model.
+    * DeploymentUpdate - deployment update implementation model.
+    * DeploymentModification - deployment modification implementation model.
+    * Execution - execution implementation model.
+    * Node - node implementation model.
+    * Relationship - relationship implementation model.
+    * NodeInstance - node instance implementation model.
+    * RelationshipInstance - relationship instance implementation model.
+    * Plugin - plugin implementation model.
+"""
+from collections import namedtuple
+from datetime import datetime
+
+from sqlalchemy.ext.associationproxy import association_proxy
+from sqlalchemy.ext.declarative import declared_attr
+from sqlalchemy import (
+    Column,
+    Integer,
+    Text,
+    DateTime,
+    Boolean,
+    Enum,
+    String,
+    Float,
+    orm,
+)
+
+from .structure import ModelMixin
+
+from .type import (
+    List,
+    Dict
+)
+
+__all__ = (
+    'BlueprintBase',
+    'DeploymentBase',
+    'DeploymentUpdateStepBase',
+    'DeploymentUpdateBase',
+    'DeploymentModificationBase',
+    'ExecutionBase',
+    'NodeBase',
+    'RelationshipBase',
+    'NodeInstanceBase',
+    'RelationshipInstanceBase',
+    'PluginBase',
+    'TaskBase'
+)
+
+#pylint: disable=no-self-argument, abstract-method
+
+
+class BlueprintBase(ModelMixin):
+    """
+    Blueprint model representation.
+    """
+    __tablename__ = 'blueprints'
+
+    created_at = Column(DateTime, nullable=False, index=True)
+    main_file_name = Column(Text, nullable=False)
+    plan = Column(Dict, nullable=False)
+    updated_at = Column(DateTime)
+    description = Column(Text)
+
+
+class DeploymentBase(ModelMixin):
+    """
+    Deployment model representation.
+    """
+    __tablename__ = 'deployments'
+
+    _private_fields = ['blueprint_fk']
+
+    created_at = Column(DateTime, nullable=False, index=True)
+    description = Column(Text)
+    inputs = Column(Dict)
+    groups = Column(Dict)
+    permalink = Column(Text)
+    policy_triggers = Column(Dict)
+    policy_types = Column(Dict)
+    outputs = Column(Dict)
+    scaling_groups = Column(List)
+    updated_at = Column(DateTime)
+    workflows = Column(List)
+
+    @declared_attr
+    def blueprint_fk(cls):
+        return cls.foreign_key(BlueprintBase, nullable=False)
+
+    @declared_attr
+    def blueprint(cls):
+        return cls.one_to_many_relationship('blueprint_fk')
+
+    @declared_attr
+    def blueprint_name(cls):
+        return association_proxy('blueprint', cls.name_column_name())
+
+
+class ExecutionBase(ModelMixin):
+    """
+    Execution model representation.
+    """
+    # Needed only for pylint. the id will be populated by sqlalcehmy and the 
proper column.
+    __tablename__ = 'executions'
+    _private_fields = ['deployment_fk']
+
+    TERMINATED = 'terminated'
+    FAILED = 'failed'
+    CANCELLED = 'cancelled'
+    PENDING = 'pending'
+    STARTED = 'started'
+    CANCELLING = 'cancelling'
+    FORCE_CANCELLING = 'force_cancelling'
+
+    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, 
FORCE_CANCELLING]
+    END_STATES = [TERMINATED, FAILED, CANCELLED]
+    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
+
+    VALID_TRANSITIONS = {
+        PENDING: [STARTED, CANCELLED],
+        STARTED: END_STATES + [CANCELLING],
+        CANCELLING: END_STATES
+    }
+
+    @orm.validates('status')
+    def validate_status(self, key, value):
+        """Validation function that verifies execution status transitions are 
OK"""
+        try:
+            current_status = getattr(self, key)
+        except AttributeError:
+            return
+        valid_transitions = 
ExecutionBase.VALID_TRANSITIONS.get(current_status, [])
+        if all([current_status is not None,
+                current_status != value,
+                value not in valid_transitions]):
+            raise ValueError('Cannot change execution status from {current} to 
{new}'.format(
+                current=current_status,
+                new=value))
+        return value
+
+    created_at = Column(DateTime, index=True)
+    started_at = Column(DateTime, nullable=True, index=True)
+    ended_at = Column(DateTime, nullable=True, index=True)
+    error = Column(Text, nullable=True)
+    is_system_workflow = Column(Boolean, nullable=False, default=False)
+    parameters = Column(Dict)
+    status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
+    workflow_name = Column(Text)
+
+    @declared_attr
+    def blueprint(cls):
+        return association_proxy('deployment', 'blueprint')
+
+    @declared_attr
+    def deployment_fk(cls):
+        return cls.foreign_key(DeploymentBase, nullable=True)
+
+    @declared_attr
+    def deployment(cls):
+        return cls.one_to_many_relationship('deployment_fk')
+
+    @declared_attr
+    def deployment_name(cls):
+        return association_proxy('deployment', cls.name_column_name())
+
+    @declared_attr
+    def blueprint_name(cls):
+        return association_proxy('deployment', 'blueprint_name')
+
+    def __str__(self):
+        return '<{0} id=`{1}` (status={2})>'.format(
+            self.__class__.__name__,
+            getattr(self, self.name_column_name()),
+            self.status
+        )
+
+
+class DeploymentUpdateBase(ModelMixin):
+    """
+    Deployment update model representation.
+    """
+    # Needed only for pylint. the id will be populated by sqlalcehmy and the 
proper column.
+    steps = None
+
+    __tablename__ = 'deployment_updates'
+
+    _private_fields = ['execution_fk', 'deployment_fk']
+
+    created_at = Column(DateTime, nullable=False, index=True)
+    deployment_plan = Column(Dict, nullable=False)
+    deployment_update_node_instances = Column(List)
+    deployment_update_deployment = Column(Dict)
+    deployment_update_nodes = Column(List)
+    modified_entity_ids = Column(Dict)
+    state = Column(Text)
+
+    @declared_attr
+    def execution_fk(cls):
+        return cls.foreign_key(ExecutionBase, nullable=True)
+
+    @declared_attr
+    def execution(cls):
+        return cls.one_to_many_relationship('execution_fk')
+
+    @declared_attr
+    def execution_name(cls):
+        return association_proxy('execution', cls.name_column_name())
+
+    @declared_attr
+    def deployment_fk(cls):
+        return cls.foreign_key(DeploymentBase)
+
+    @declared_attr
+    def deployment(cls):
+        return cls.one_to_many_relationship('deployment_fk')
+
+    @declared_attr
+    def deployment_name(cls):
+        return association_proxy('deployment', cls.name_column_name())
+
+    def to_dict(self, suppress_error=False, **kwargs):
+        dep_update_dict = super(DeploymentUpdateBase, 
self).to_dict(suppress_error)     #pylint: disable=no-member
+        # Taking care of the fact the DeploymentSteps are _BaseModels
+        dep_update_dict['steps'] = [step.to_dict() for step in self.steps]
+        return dep_update_dict
+
+
+class DeploymentUpdateStepBase(ModelMixin):
+    """
+    Deployment update step model representation.
+    """
+    # Needed only for pylint. the id will be populated by sqlalcehmy and the 
proper column.
+    __tablename__ = 'deployment_update_steps'
+    _private_fields = ['deployment_update_fk']
+
+    _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
+    ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
+    _entity_types = namedtuple(
+        'ENTITY_TYPES',
+        'NODE, RELATIONSHIP, PROPERTY, OPERATION, WORKFLOW, OUTPUT, 
DESCRIPTION, GROUP, '
+        'POLICY_TYPE, POLICY_TRIGGER, PLUGIN')
+    ENTITY_TYPES = _entity_types(
+        NODE='node',
+        RELATIONSHIP='relationship',
+        PROPERTY='property',
+        OPERATION='operation',
+        WORKFLOW='workflow',
+        OUTPUT='output',
+        DESCRIPTION='description',
+        GROUP='group',
+        POLICY_TYPE='policy_type',
+        POLICY_TRIGGER='policy_trigger',
+        PLUGIN='plugin'
+    )
+
+    action = Column(Enum(*ACTION_TYPES, name='action_type'), nullable=False)
+    entity_id = Column(Text, nullable=False)
+    entity_type = Column(Enum(*ENTITY_TYPES, name='entity_type'), 
nullable=False)
+
+    @declared_attr
+    def deployment_update_fk(cls):
+        return cls.foreign_key(DeploymentUpdateBase)
+
+    @declared_attr
+    def deployment_update(cls):
+        return cls.one_to_many_relationship('deployment_update_fk', 
backreference='steps')
+
+    @declared_attr
+    def deployment_update_name(cls):
+        return association_proxy('deployment_update', cls.name_column_name())
+
+    def __hash__(self):
+        return hash((getattr(self, self.id_column_name()), self.entity_id))
+
+    def __lt__(self, other):
+        """
+        the order is 'remove' < 'modify' < 'add'
+        :param other:
+        :return:
+        """
+        if not isinstance(other, self.__class__):
+            return not self >= other
+
+        if self.action != other.action:
+            if self.action == 'remove':
+                return_value = True
+            elif self.action == 'add':
+                return_value = False
+            else:
+                return_value = other.action == 'add'
+            return return_value
+
+        if self.action == 'add':
+            return self.entity_type == 'node' and other.entity_type == 
'relationship'
+        if self.action == 'remove':
+            return self.entity_type == 'relationship' and other.entity_type == 
'node'
+        return False
+
+
+class DeploymentModificationBase(ModelMixin):
+    """
+    Deployment modification model representation.
+    """
+    __tablename__ = 'deployment_modifications'
+    _private_fields = ['deployment_fk']
+
+    STARTED = 'started'
+    FINISHED = 'finished'
+    ROLLEDBACK = 'rolledback'
+
+    STATES = [STARTED, FINISHED, ROLLEDBACK]
+    END_STATES = [FINISHED, ROLLEDBACK]
+
+    context = Column(Dict)
+    created_at = Column(DateTime, nullable=False, index=True)
+    ended_at = Column(DateTime, index=True)
+    modified_nodes = Column(Dict)
+    node_instances = Column(Dict)
+    status = Column(Enum(*STATES, name='deployment_modification_status'))
+
+    @declared_attr
+    def deployment_fk(cls):
+        return cls.foreign_key(DeploymentBase)
+
+    @declared_attr
+    def deployment(cls):
+        return cls.one_to_many_relationship('deployment_fk', 
backreference='modifications')
+
+    @declared_attr
+    def deployment_name(cls):
+        return association_proxy('deployment', cls.name_column_name())
+
+
+class NodeBase(ModelMixin):
+    """
+    Node model representation.
+    """
+    __tablename__ = 'nodes'
+
+    # See base class for an explanation on these properties
+    is_id_unique = False
+
+    _private_fields = ['blueprint_fk', 'host_fk']
+
+    @declared_attr
+    def host_fk(cls):
+        return cls.foreign_key(NodeBase, nullable=True)
+
+    @declared_attr
+    def host(cls):
+        return cls.relationship_to_self('host_fk')
+
+    @declared_attr
+    def host_name(cls):
+        return association_proxy('host', cls.name_column_name())
+
+    @declared_attr
+    def deployment_fk(cls):
+        return cls.foreign_key(DeploymentBase)
+
+    @declared_attr
+    def deployment(cls):
+        return cls.one_to_many_relationship('deployment_fk')
+
+    @declared_attr
+    def deployment_name(cls):
+        return association_proxy('deployment', cls.name_column_name())
+
+    @declared_attr
+    def blueprint_name(cls):
+        return association_proxy('deployment', 
'blueprint_{0}'.format(cls.name_column_name()))
+
+    deploy_number_of_instances = Column(Integer, nullable=False)
+    max_number_of_instances = Column(Integer, nullable=False)
+    min_number_of_instances = Column(Integer, nullable=False)
+    number_of_instances = Column(Integer, nullable=False)
+    planned_number_of_instances = Column(Integer, nullable=False)
+    plugins = Column(List)
+    properties = Column(Dict)
+    operations = Column(Dict)
+    type = Column(Text, nullable=False, index=True)
+    type_hierarchy = Column(List)
+
+
+class RelationshipBase(ModelMixin):
+    """
+    Relationship model representation.
+    """
+    __tablename__ = 'relationships'
+
+    _private_fields = ['source_node_fk', 'target_node_fk']
+
+    @declared_attr
+    def source_node_fk(cls):
+        return cls.foreign_key(NodeBase)
+
+    @declared_attr
+    def source_node(cls):
+        return cls.one_to_many_relationship('source_node_fk',
+                                            
backreference='outbound_relationships')
+
+    @declared_attr
+    def source_name(cls):
+        return association_proxy('source_node', cls.name_column_name())
+
+    @declared_attr
+    def target_node_fk(cls):
+        return cls.foreign_key(NodeBase)
+
+    @declared_attr
+    def target_node(cls):
+        return cls.one_to_many_relationship('target_node_fk', 
backreference='inbound_relationships')
+
+    @declared_attr
+    def target_name(cls):
+        return association_proxy('target_node', cls.name_column_name())
+
+    source_interfaces = Column(Dict)
+    source_operations = Column(Dict, nullable=False)
+    target_interfaces = Column(Dict)
+    target_operations = Column(Dict, nullable=False)
+    type = Column(String, nullable=False)
+    type_hierarchy = Column(List)
+    properties = Column(Dict)
+
+
+class NodeInstanceBase(ModelMixin):
+    """
+    Node instance model representation.
+    """
+    __tablename__ = 'node_instances'
+    _private_fields = ['node_fk', 'host_fk']
+
+    runtime_properties = Column(Dict)
+    scaling_groups = Column(List)
+    state = Column(Text, nullable=False)
+    version = Column(Integer, default=1)
+
+    @declared_attr
+    def host_fk(cls):
+        return cls.foreign_key(NodeInstanceBase, nullable=True)
+
+    @declared_attr
+    def host(cls):
+        return cls.relationship_to_self('host_fk')
+
+    @declared_attr
+    def host_name(cls):
+        return association_proxy('host', cls.name_column_name())
+
+    @declared_attr
+    def deployment(cls):
+        return association_proxy('node', 'deployment')
+
+    @declared_attr
+    def deployment_name(cls):
+        return association_proxy('node', 'deployment_name')
+
+    @declared_attr
+    def node_fk(cls):
+        return cls.foreign_key(NodeBase, nullable=True)
+
+    @declared_attr
+    def node(cls):
+        return cls.one_to_many_relationship('node_fk')
+
+    @declared_attr
+    def node_name(cls):
+        return association_proxy('node', cls.name_column_name())
+
+
+class RelationshipInstanceBase(ModelMixin):
+    """
+    Relationship instance model representation.
+    """
+    __tablename__ = 'relationship_instances'
+    _private_fields = ['relationship_storage_fk',
+                       'source_node_instance_fk',
+                       'target_node_instance_fk']
+
+    @declared_attr
+    def source_node_instance_fk(cls):
+        return cls.foreign_key(NodeInstanceBase)
+
+    @declared_attr
+    def source_node_instance(cls):
+        return cls.one_to_many_relationship('source_node_instance_fk',
+                                            
backreference='outbound_relationship_instances')
+
+    @declared_attr
+    def source_node_instance_name(cls):
+        return association_proxy('source_node_instance', 
cls.name_column_name())
+
+    @declared_attr
+    def target_node_instance_fk(cls):
+        return cls.foreign_key(NodeInstanceBase)
+
+    @declared_attr
+    def target_node_instance(cls):
+        return cls.one_to_many_relationship('target_node_instance_fk',
+                                            
backreference='inbound_relationship_instances')
+
+    @declared_attr
+    def target_node_instance_name(cls):
+        return association_proxy('target_node_instance', 
cls.name_column_name())
+
+    @declared_attr
+    def relationship_fk(cls):
+        return cls.foreign_key(RelationshipBase)
+
+    @declared_attr
+    def relationship(cls):
+        return cls.one_to_many_relationship('relationship_fk')
+
+    @declared_attr
+    def relationship_name(cls):
+        return association_proxy('relationship', cls.name_column_name())
+
+
+class PluginBase(ModelMixin):
+    """
+    Plugin model representation.
+    """
+    __tablename__ = 'plugins'
+
+    archive_name = Column(Text, nullable=False, index=True)
+    distribution = Column(Text)
+    distribution_release = Column(Text)
+    distribution_version = Column(Text)
+    package_name = Column(Text, nullable=False, index=True)
+    package_source = Column(Text)
+    package_version = Column(Text)
+    supported_platform = Column(Text)
+    supported_py_versions = Column(List)
+    uploaded_at = Column(DateTime, nullable=False, index=True)
+    wheels = Column(List, nullable=False)
+
+
+class TaskBase(ModelMixin):
+    """
+    A Model which represents an task
+    """
+    __tablename__ = 'tasks'
+    _private_fields = ['node_instance_fk', 'relationship_instance_fk', 
'execution_fk']
+
+    @declared_attr
+    def node_instance_fk(cls):
+        return cls.foreign_key(NodeInstanceBase, nullable=True)
+
+    @declared_attr
+    def node_instance_name(cls):
+        return association_proxy('node_instance', cls.name_column_name())
+
+    @declared_attr
+    def node_instance(cls):
+        return cls.one_to_many_relationship('node_instance_fk')
+
+    @declared_attr
+    def relationship_instance_fk(cls):
+        return cls.foreign_key(RelationshipInstanceBase, nullable=True)
+
+    @declared_attr
+    def relationship_instance_name(cls):
+        return association_proxy('relationship_instance', 
cls.name_column_name())
+
+    @declared_attr
+    def relationship_instance(cls):
+        return cls.one_to_many_relationship('relationship_instance_fk')
+
+    @declared_attr
+    def plugin_fk(cls):
+        return cls.foreign_key(PluginBase, nullable=True)
+
+    @declared_attr
+    def plugin(cls):
+        return cls.one_to_many_relationship('plugin_fk')
+
+    @declared_attr
+    def plugin_name(cls):
+        return association_proxy('plugin', 'name')
+
+    @declared_attr
+    def execution_fk(cls):
+        return cls.foreign_key(ExecutionBase, nullable=True)
+
+    @declared_attr
+    def execution(cls):
+        return cls.one_to_many_relationship('execution_fk')
+
+    @declared_attr
+    def execution_name(cls):
+        return association_proxy('execution', cls.name_column_name())
+
+    PENDING = 'pending'
+    RETRYING = 'retrying'
+    SENT = 'sent'
+    STARTED = 'started'
+    SUCCESS = 'success'
+    FAILED = 'failed'
+    STATES = (
+        PENDING,
+        RETRYING,
+        SENT,
+        STARTED,
+        SUCCESS,
+        FAILED,
+    )
+
+    WAIT_STATES = [PENDING, RETRYING]
+    END_STATES = [SUCCESS, FAILED]
+
+    @orm.validates('max_attempts')
+    def validate_max_attempts(self, _, value):                                 
 # pylint: disable=no-self-use
+        """Validates that max attempts is either -1 or a positive number"""
+        if value < 1 and value != TaskBase.INFINITE_RETRIES:
+            raise ValueError('Max attempts can be either -1 (infinite) or any 
positive number. '
+                             'Got {value}'.format(value=value))
+        return value
+
+    INFINITE_RETRIES = -1
+
+    status = Column(Enum(*STATES), name='status', default=PENDING)
+
+    due_at = Column(DateTime, default=datetime.utcnow)
+    started_at = Column(DateTime, default=None)
+    ended_at = Column(DateTime, default=None)
+    max_attempts = Column(Integer, default=1)
+    retry_count = Column(Integer, default=0)
+    retry_interval = Column(Float, default=0)
+    ignore_failure = Column(Boolean, default=False)
+
+    # Operation specific fields
+    operation_mapping = Column(String)
+    inputs = Column(Dict)
+
+    @property
+    def actor(self):
+        """
+        Return the actor of the task
+        :return:
+      `  """
+        return self.node_instance or self.relationship_instance
+
+    @classmethod
+    def as_node_instance(cls, instance_fk, **kwargs):
+        return cls(node_instance_fk=instance_fk, **kwargs)
+
+    @classmethod
+    def as_relationship_instance(cls, instance_fk, **kwargs):
+        return cls(relationship_instance_fk=instance_fk, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/core.py
----------------------------------------------------------------------
diff --git a/aria/storage/core.py b/aria/storage/core.py
index a5d3210..94b4fe0 100644
--- a/aria/storage/core.py
+++ b/aria/storage/core.py
@@ -53,9 +53,9 @@ class Storage(LoggerMixin):
     Represents the storage
     """
     def __init__(self, api_cls, api_kwargs=None, items=(), **kwargs):
-        self._api_kwargs = api_kwargs or {}
         super(Storage, self).__init__(**kwargs)
         self.api = api_cls
+        self._api_kwargs = api_kwargs or {}
         self.registered = {}
         for item in items:
             self.register(item)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/model.py
----------------------------------------------------------------------
diff --git a/aria/storage/model.py b/aria/storage/model.py
new file mode 100644
index 0000000..afca3e4
--- /dev/null
+++ b/aria/storage/model.py
@@ -0,0 +1,110 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Aria's storage.models module
+Path: aria.storage.models
+
+models module holds aria's models.
+
+classes:
+    * Field - represents a single field.
+    * IterField - represents an iterable field.
+    * Model - abstract model implementation.
+    * Snapshot - snapshots implementation model.
+    * Deployment - deployment implementation model.
+    * DeploymentUpdateStep - deployment update step implementation model.
+    * DeploymentUpdate - deployment update implementation model.
+    * DeploymentModification - deployment modification implementation model.
+    * Execution - execution implementation model.
+    * Node - node implementation model.
+    * Relationship - relationship implementation model.
+    * NodeInstance - node instance implementation model.
+    * RelationshipInstance - relationship instance implementation model.
+    * ProviderContext - provider context implementation model.
+    * Plugin - plugin implementation model.
+"""
+from sqlalchemy.ext.declarative import declarative_base
+
+from . import structure
+from . import base_model as base
+
+__all__ = (
+    'Blueprint',
+    'Deployment',
+    'DeploymentUpdateStep',
+    'DeploymentUpdate',
+    'DeploymentModification',
+    'Execution',
+    'Node',
+    'Relationship',
+    'NodeInstance',
+    'RelationshipInstance',
+    'Plugin',
+)
+
+
+#pylint: disable=abstract-method
+# The required abstract method implementation are implemented in the 
ModelIDMixin, which is used as
+# a base to the DeclerativeBase.
+DeclarativeBase = declarative_base(cls=structure.ModelIDMixin)
+
+
+class Blueprint(DeclarativeBase, base.BlueprintBase):
+    pass
+
+
+class Deployment(DeclarativeBase, base.DeploymentBase):
+    pass
+
+
+class Execution(DeclarativeBase, base.ExecutionBase):
+    pass
+
+
+class DeploymentUpdate(DeclarativeBase, base.DeploymentUpdateBase):
+    pass
+
+
+class DeploymentUpdateStep(DeclarativeBase, base.DeploymentUpdateStepBase):
+    pass
+
+
+class DeploymentModification(DeclarativeBase, base.DeploymentModificationBase):
+    pass
+
+
+class Node(DeclarativeBase, base.NodeBase):
+    pass
+
+
+class Relationship(DeclarativeBase, base.RelationshipBase):
+    pass
+
+
+class NodeInstance(DeclarativeBase, base.NodeInstanceBase):
+    pass
+
+
+class RelationshipInstance(DeclarativeBase, base.RelationshipInstanceBase):
+    pass
+
+
+class Plugin(DeclarativeBase, base.PluginBase):
+    pass
+
+
+class Task(DeclarativeBase, base.TaskBase):
+    pass

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/models.py
----------------------------------------------------------------------
diff --git a/aria/storage/models.py b/aria/storage/models.py
deleted file mode 100644
index 0a1027b..0000000
--- a/aria/storage/models.py
+++ /dev/null
@@ -1,575 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Aria's storage.models module
-Path: aria.storage.models
-
-models module holds aria's models.
-
-classes:
-    * Field - represents a single field.
-    * IterField - represents an iterable field.
-    * Model - abstract model implementation.
-    * Snapshot - snapshots implementation model.
-    * Deployment - deployment implementation model.
-    * DeploymentUpdateStep - deployment update step implementation model.
-    * DeploymentUpdate - deployment update implementation model.
-    * DeploymentModification - deployment modification implementation model.
-    * Execution - execution implementation model.
-    * Node - node implementation model.
-    * Relationship - relationship implementation model.
-    * NodeInstance - node instance implementation model.
-    * RelationshipInstance - relationship instance implementation model.
-    * ProviderContext - provider context implementation model.
-    * Plugin - plugin implementation model.
-"""
-from collections import namedtuple
-from datetime import datetime
-
-from sqlalchemy.ext.declarative.base import declared_attr
-
-from .structures import (
-    SQLModelBase,
-    Column,
-    Integer,
-    Text,
-    DateTime,
-    Boolean,
-    Enum,
-    String,
-    Float,
-    List,
-    Dict,
-    foreign_key,
-    one_to_many_relationship,
-    relationship_to_self,
-    orm)
-
-__all__ = (
-    'Blueprint',
-    'Deployment',
-    'DeploymentUpdateStep',
-    'DeploymentUpdate',
-    'DeploymentModification',
-    'Execution',
-    'Node',
-    'Relationship',
-    'NodeInstance',
-    'RelationshipInstance',
-    'ProviderContext',
-    'Plugin',
-)
-
-
-#pylint: disable=no-self-argument
-
-
-class Blueprint(SQLModelBase):
-    """
-    Blueprint model representation.
-    """
-    __tablename__ = 'blueprints'
-
-    name = Column(Text, index=True)
-    created_at = Column(DateTime, nullable=False, index=True)
-    main_file_name = Column(Text, nullable=False)
-    plan = Column(Dict, nullable=False)
-    updated_at = Column(DateTime)
-    description = Column(Text)
-
-
-class Deployment(SQLModelBase):
-    """
-    Deployment model representation.
-    """
-    __tablename__ = 'deployments'
-
-    _private_fields = ['blueprint_id']
-
-    blueprint_id = foreign_key(Blueprint.id)
-
-    name = Column(Text, index=True)
-    created_at = Column(DateTime, nullable=False, index=True)
-    description = Column(Text)
-    inputs = Column(Dict)
-    groups = Column(Dict)
-    permalink = Column(Text)
-    policy_triggers = Column(Dict)
-    policy_types = Column(Dict)
-    outputs = Column(Dict)
-    scaling_groups = Column(Dict)
-    updated_at = Column(DateTime)
-    workflows = Column(Dict)
-
-    @declared_attr
-    def blueprint(cls):
-        return one_to_many_relationship(cls, Blueprint, cls.blueprint_id)
-
-
-class Execution(SQLModelBase):
-    """
-    Execution model representation.
-    """
-    __tablename__ = 'executions'
-
-    TERMINATED = 'terminated'
-    FAILED = 'failed'
-    CANCELLED = 'cancelled'
-    PENDING = 'pending'
-    STARTED = 'started'
-    CANCELLING = 'cancelling'
-    FORCE_CANCELLING = 'force_cancelling'
-
-    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED, CANCELLING, 
FORCE_CANCELLING]
-    END_STATES = [TERMINATED, FAILED, CANCELLED]
-    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
-
-    VALID_TRANSITIONS = {
-        PENDING: [STARTED, CANCELLED],
-        STARTED: END_STATES + [CANCELLING],
-        CANCELLING: END_STATES
-    }
-
-    @orm.validates('status')
-    def validate_status(self, key, value):
-        """Validation function that verifies execution status transitions are 
OK"""
-        try:
-            current_status = getattr(self, key)
-        except AttributeError:
-            return
-        valid_transitions = Execution.VALID_TRANSITIONS.get(current_status, [])
-        if all([current_status is not None,
-                current_status != value,
-                value not in valid_transitions]):
-            raise ValueError('Cannot change execution status from {current} to 
{new}'.format(
-                current=current_status,
-                new=value))
-        return value
-
-    deployment_id = foreign_key(Deployment.id)
-    blueprint_id = foreign_key(Blueprint.id)
-    _private_fields = ['deployment_id', 'blueprint_id']
-
-    created_at = Column(DateTime, index=True)
-    started_at = Column(DateTime, nullable=True, index=True)
-    ended_at = Column(DateTime, nullable=True, index=True)
-    error = Column(Text, nullable=True)
-    is_system_workflow = Column(Boolean, nullable=False, default=False)
-    parameters = Column(Dict)
-    status = Column(Enum(*STATES, name='execution_status'), default=PENDING)
-    workflow_name = Column(Text, nullable=False)
-
-    @declared_attr
-    def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
-
-    @declared_attr
-    def blueprint(cls):
-        return one_to_many_relationship(cls, Blueprint, cls.blueprint_id)
-
-    def __str__(self):
-        return '<{0} id=`{1}` (status={2})>'.format(
-            self.__class__.__name__,
-            self.id,
-            self.status
-        )
-
-
-class DeploymentUpdate(SQLModelBase):
-    """
-    Deployment update model representation.
-    """
-    __tablename__ = 'deployment_updates'
-
-    deployment_id = foreign_key(Deployment.id)
-    execution_id = foreign_key(Execution.id, nullable=True)
-    _private_fields = ['execution_id', 'deployment_id']
-
-    created_at = Column(DateTime, nullable=False, index=True)
-    deployment_plan = Column(Dict, nullable=False)
-    deployment_update_node_instances = Column(Dict)
-    deployment_update_deployment = Column(Dict)
-    deployment_update_nodes = Column(Dict)
-    modified_entity_ids = Column(Dict)
-    state = Column(Text)
-
-    @declared_attr
-    def execution(cls):
-        return one_to_many_relationship(cls, Execution, cls.execution_id)
-
-    @declared_attr
-    def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
-
-    def to_dict(self, suppress_error=False, **kwargs):
-        dep_update_dict = super(DeploymentUpdate, self).to_dict(suppress_error)
-        # Taking care of the fact the DeploymentSteps are objects
-        dep_update_dict['steps'] = [step.to_dict() for step in self.steps]
-        return dep_update_dict
-
-
-class DeploymentUpdateStep(SQLModelBase):
-    """
-    Deployment update step model representation.
-    """
-    __tablename__ = 'deployment_update_steps'
-    _action_types = namedtuple('ACTION_TYPES', 'ADD, REMOVE, MODIFY')
-    ACTION_TYPES = _action_types(ADD='add', REMOVE='remove', MODIFY='modify')
-    _entity_types = namedtuple(
-        'ENTITY_TYPES',
-        'NODE, RELATIONSHIP, PROPERTY, OPERATION, WORKFLOW, OUTPUT, 
DESCRIPTION, GROUP, '
-        'POLICY_TYPE, POLICY_TRIGGER, PLUGIN')
-    ENTITY_TYPES = _entity_types(
-        NODE='node',
-        RELATIONSHIP='relationship',
-        PROPERTY='property',
-        OPERATION='operation',
-        WORKFLOW='workflow',
-        OUTPUT='output',
-        DESCRIPTION='description',
-        GROUP='group',
-        POLICY_TYPE='policy_type',
-        POLICY_TRIGGER='policy_trigger',
-        PLUGIN='plugin'
-    )
-
-    deployment_update_id = foreign_key(DeploymentUpdate.id)
-    _private_fields = ['deployment_update_id']
-
-    action = Column(Enum(*ACTION_TYPES, name='action_type'), nullable=False)
-    entity_id = Column(Text, nullable=False)
-    entity_type = Column(Enum(*ENTITY_TYPES, name='entity_type'), 
nullable=False)
-
-    @declared_attr
-    def deployment_update(cls):
-        return one_to_many_relationship(cls,
-                                        DeploymentUpdate,
-                                        cls.deployment_update_id,
-                                        backreference='steps')
-
-    def __hash__(self):
-        return hash((self.id, self.entity_id))
-
-    def __lt__(self, other):
-        """
-        the order is 'remove' < 'modify' < 'add'
-        :param other:
-        :return:
-        """
-        if not isinstance(other, self.__class__):
-            return not self >= other
-
-        if self.action != other.action:
-            if self.action == 'remove':
-                return_value = True
-            elif self.action == 'add':
-                return_value = False
-            else:
-                return_value = other.action == 'add'
-            return return_value
-
-        if self.action == 'add':
-            return self.entity_type == 'node' and other.entity_type == 
'relationship'
-        if self.action == 'remove':
-            return self.entity_type == 'relationship' and other.entity_type == 
'node'
-        return False
-
-
-class DeploymentModification(SQLModelBase):
-    """
-    Deployment modification model representation.
-    """
-    __tablename__ = 'deployment_modifications'
-
-    STARTED = 'started'
-    FINISHED = 'finished'
-    ROLLEDBACK = 'rolledback'
-
-    STATES = [STARTED, FINISHED, ROLLEDBACK]
-    END_STATES = [FINISHED, ROLLEDBACK]
-
-    deployment_id = foreign_key(Deployment.id)
-    _private_fields = ['deployment_id']
-
-    context = Column(Dict)
-    created_at = Column(DateTime, nullable=False, index=True)
-    ended_at = Column(DateTime, index=True)
-    modified_nodes = Column(Dict)
-    node_instances = Column(Dict)
-    status = Column(Enum(*STATES, name='deployment_modification_status'))
-
-    @declared_attr
-    def deployment(cls):
-        return one_to_many_relationship(cls,
-                                        Deployment,
-                                        cls.deployment_id,
-                                        backreference='modifications')
-
-
-class Node(SQLModelBase):
-    """
-    Node model representation.
-    """
-    __tablename__ = 'nodes'
-
-    # See base class for an explanation on these properties
-    is_id_unique = False
-
-    name = Column(Text, index=True)
-    _private_fields = ['deployment_id', 'host_id']
-    deployment_id = foreign_key(Deployment.id)
-    host_id = foreign_key('nodes.id', nullable=True)
-
-    @declared_attr
-    def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
-
-    deploy_number_of_instances = Column(Integer, nullable=False)
-    # TODO: This probably should be a foreign key, but there's no guarantee
-    # in the code, currently, that the host will be created beforehand
-    max_number_of_instances = Column(Integer, nullable=False)
-    min_number_of_instances = Column(Integer, nullable=False)
-    number_of_instances = Column(Integer, nullable=False)
-    planned_number_of_instances = Column(Integer, nullable=False)
-    plugins = Column(List)
-    properties = Column(Dict)
-    operations = Column(Dict)
-    type = Column(Text, nullable=False, index=True)
-    type_hierarchy = Column(List)
-
-    @declared_attr
-    def host(cls):
-        return relationship_to_self(cls, cls.host_id, cls.id)
-
-
-class Relationship(SQLModelBase):
-    """
-    Relationship model representation.
-    """
-    __tablename__ = 'relationships'
-
-    _private_fields = ['source_node_id', 'target_node_id']
-
-    source_node_id = foreign_key(Node.id)
-    target_node_id = foreign_key(Node.id)
-
-    @declared_attr
-    def source_node(cls):
-        return one_to_many_relationship(cls,
-                                        Node,
-                                        cls.source_node_id,
-                                        'outbound_relationships')
-
-    @declared_attr
-    def target_node(cls):
-        return one_to_many_relationship(cls,
-                                        Node,
-                                        cls.target_node_id,
-                                        'inbound_relationships')
-
-    source_interfaces = Column(Dict)
-    source_operations = Column(Dict, nullable=False)
-    target_interfaces = Column(Dict)
-    target_operations = Column(Dict, nullable=False)
-    type = Column(String, nullable=False)
-    type_hierarchy = Column(List)
-    properties = Column(Dict)
-
-
-class NodeInstance(SQLModelBase):
-    """
-    Node instance model representation.
-    """
-    __tablename__ = 'node_instances'
-
-    node_id = foreign_key(Node.id)
-    deployment_id = foreign_key(Deployment.id)
-    host_id = foreign_key('node_instances.id', nullable=True)
-
-    _private_fields = ['node_id', 'host_id']
-
-    name = Column(Text, index=True)
-    runtime_properties = Column(Dict)
-    scaling_groups = Column(Dict)
-    state = Column(Text, nullable=False)
-    version = Column(Integer, default=1)
-
-    @declared_attr
-    def deployment(cls):
-        return one_to_many_relationship(cls, Deployment, cls.deployment_id)
-
-    @declared_attr
-    def node(cls):
-        return one_to_many_relationship(cls, Node, cls.node_id)
-
-    @declared_attr
-    def host(cls):
-        return relationship_to_self(cls, cls.host_id, cls.id)
-
-
-class RelationshipInstance(SQLModelBase):
-    """
-    Relationship instance model representation.
-    """
-    __tablename__ = 'relationship_instances'
-
-    relationship_id = foreign_key(Relationship.id)
-    source_node_instance_id = foreign_key(NodeInstance.id)
-    target_node_instance_id = foreign_key(NodeInstance.id)
-
-    _private_fields = ['relationship_storage_id',
-                       'source_node_instance_id',
-                       'target_node_instance_id']
-
-    @declared_attr
-    def source_node_instance(cls):
-        return one_to_many_relationship(cls,
-                                        NodeInstance,
-                                        cls.source_node_instance_id,
-                                        'outbound_relationship_instances')
-
-    @declared_attr
-    def target_node_instance(cls):
-        return one_to_many_relationship(cls,
-                                        NodeInstance,
-                                        cls.target_node_instance_id,
-                                        'inbound_relationship_instances')
-
-    @declared_attr
-    def relationship(cls):
-        return one_to_many_relationship(cls, Relationship, cls.relationship_id)
-
-
-class ProviderContext(SQLModelBase):
-    """
-    Provider context model representation.
-    """
-    __tablename__ = 'provider_context'
-
-    name = Column(Text, nullable=False)
-    context = Column(Dict, nullable=False)
-
-
-class Plugin(SQLModelBase):
-    """
-    Plugin model representation.
-    """
-    __tablename__ = 'plugins'
-
-    archive_name = Column(Text, nullable=False, index=True)
-    distribution = Column(Text)
-    distribution_release = Column(Text)
-    distribution_version = Column(Text)
-    package_name = Column(Text, nullable=False, index=True)
-    package_source = Column(Text)
-    package_version = Column(Text)
-    supported_platform = Column(Text)
-    supported_py_versions = Column(List)
-    uploaded_at = Column(DateTime, nullable=False, index=True)
-    wheels = Column(List, nullable=False)
-
-
-class Task(SQLModelBase):
-    """
-    A Model which represents an task
-    """
-
-    __tablename__ = 'task'
-    node_instance_id = foreign_key(NodeInstance.id, nullable=True)
-    relationship_instance_id = foreign_key(RelationshipInstance.id, 
nullable=True)
-    execution_id = foreign_key(Execution.id, nullable=True)
-
-    _private_fields = ['node_instance_id',
-                       'relationship_instance_id',
-                       'execution_id']
-
-    @declared_attr
-    def node_instance(cls):
-        return one_to_many_relationship(cls, NodeInstance, 
cls.node_instance_id)
-
-    @declared_attr
-    def relationship_instance(cls):
-        return one_to_many_relationship(cls,
-                                        RelationshipInstance,
-                                        cls.relationship_instance_id)
-
-    PENDING = 'pending'
-    RETRYING = 'retrying'
-    SENT = 'sent'
-    STARTED = 'started'
-    SUCCESS = 'success'
-    FAILED = 'failed'
-    STATES = (
-        PENDING,
-        RETRYING,
-        SENT,
-        STARTED,
-        SUCCESS,
-        FAILED,
-    )
-
-    WAIT_STATES = [PENDING, RETRYING]
-    END_STATES = [SUCCESS, FAILED]
-
-    @orm.validates('max_attempts')
-    def validate_max_attempts(self, _, value):                                 
 # pylint: disable=no-self-use
-        """Validates that max attempts is either -1 or a positive number"""
-        if value < 1 and value != Task.INFINITE_RETRIES:
-            raise ValueError('Max attempts can be either -1 (infinite) or any 
positive number. '
-                             'Got {value}'.format(value=value))
-        return value
-
-    INFINITE_RETRIES = -1
-
-    status = Column(Enum(*STATES), name='status', default=PENDING)
-
-    due_at = Column(DateTime, default=datetime.utcnow)
-    started_at = Column(DateTime, default=None)
-    ended_at = Column(DateTime, default=None)
-    max_attempts = Column(Integer, default=1)
-    retry_count = Column(Integer, default=0)
-    retry_interval = Column(Float, default=0)
-    ignore_failure = Column(Boolean, default=False)
-
-    # Operation specific fields
-    name = Column(String)
-    operation_mapping = Column(String)
-    inputs = Column(Dict)
-    plugin_id = foreign_key(Plugin.id, nullable=True)
-
-    @declared_attr
-    def plugin(cls):
-        return one_to_many_relationship(cls, Plugin, cls.plugin_id)
-
-    @declared_attr
-    def execution(cls):
-        return one_to_many_relationship(cls, Execution, cls.execution_id)
-
-    @property
-    def actor(self):
-        """
-        Return the actor of the task
-        :return:
-        """
-        return self.node_instance or self.relationship_instance
-
-    @classmethod
-    def as_node_instance(cls, instance_id, **kwargs):
-        return cls(node_instance_id=instance_id, **kwargs)
-
-    @classmethod
-    def as_relationship_instance(cls, instance_id, **kwargs):
-        return cls(relationship_instance_id=instance_id, **kwargs)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/structure.py
----------------------------------------------------------------------
diff --git a/aria/storage/structure.py b/aria/storage/structure.py
new file mode 100644
index 0000000..fe6a144
--- /dev/null
+++ b/aria/storage/structure.py
@@ -0,0 +1,180 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Aria's storage.structures module
+Path: aria.storage.structures
+
+models module holds aria's models.
+
+classes:
+    * Field - represents a single field.
+    * IterField - represents an iterable field.
+    * PointerField - represents a single pointer field.
+    * IterPointerField - represents an iterable pointers field.
+    * Model - abstract model implementation.
+"""
+
+from sqlalchemy.orm import relationship, backref
+from sqlalchemy.ext import associationproxy
+from sqlalchemy import (
+    Column,
+    ForeignKey,
+    Integer,
+    Text
+)
+
+
+class ModelMixin(object):
+
+    @classmethod
+    def id_column_name(cls):
+        raise NotImplementedError
+
+    @classmethod
+    def name_column_name(cls):
+        raise NotImplementedError
+
+    @classmethod
+    def _get_cls_by_tablename(cls, tablename):
+        """Return class reference mapped to table.
+
+         :param tablename: String with name of table.
+         :return: Class reference or None.
+         """
+        if tablename in (cls.__name__, cls.__tablename__):
+            return cls
+
+        for table_cls in cls._decl_class_registry.values():
+            if tablename in (getattr(table_cls, '__name__', None),
+                             getattr(table_cls, '__tablename__', None)):
+                return table_cls
+
+    @classmethod
+    def foreign_key(cls, table, nullable=False):
+        """Return a ForeignKey object with the relevant
+
+        :param table: Unique id column in the parent table
+        :param nullable: Should the column be allowed to remain empty
+        """
+        table = cls._get_cls_by_tablename(table.__tablename__)
+        foreign_key_str = 
'{tablename}.{unique_id}'.format(tablename=table.__tablename__,
+                                                           
unique_id=table.id_column_name())
+        column = Column(ForeignKey(foreign_key_str, ondelete='CASCADE'),
+                        nullable=nullable)
+        column.__remote_table_name = table.__name__
+        return column
+
+    @classmethod
+    def one_to_many_relationship(cls,
+                                 foreign_key_column,
+                                 backreference=None):
+        """Return a one-to-many SQL relationship object
+        Meant to be used from inside the *child* object
+
+        :param parent_class: Class of the parent table
+        :param cls: Class of the child table
+        :param foreign_key_column: The column of the foreign key (from the 
child table)
+        :param backreference: The name to give to the reference to the child 
(on the parent table)
+        """
+        parent_table = cls._get_cls_by_tablename(
+            getattr(cls, foreign_key_column).__remote_table_name)
+        primaryjoin_str = '{parent_class_name}.{parent_unique_id} == ' \
+                          '{child_class.__name__}.{foreign_key_column}'\
+            .format(
+                parent_class_name=parent_table.__name__,
+                parent_unique_id=parent_table.id_column_name(),
+                child_class=cls,
+                foreign_key_column=foreign_key_column
+            )
+        return relationship(
+            parent_table.__name__,
+            primaryjoin=primaryjoin_str,
+            foreign_keys=[getattr(cls, foreign_key_column)],
+            # The following line make sure that when the *parent* is
+            # deleted, all its connected children are deleted as well
+            backref=backref(backreference or cls.__tablename__, cascade='all'),
+        )
+
+    @classmethod
+    def relationship_to_self(cls, local_column):
+
+        remote_side_str = '{cls.__name__}.{remote_column}'.format(
+            cls=cls,
+            remote_column=cls.id_column_name()
+        )
+        primaryjoin_str = '{remote_side_str} == 
{cls.__name__}.{local_column}'.format(
+            remote_side_str=remote_side_str,
+            cls=cls,
+            local_column=local_column)
+        return relationship(cls.__name__,
+                            primaryjoin=primaryjoin_str,
+                            remote_side=remote_side_str,
+                            post_update=True)
+
+    def to_dict(self, suppress_error=False):
+        """Return a dict representation of the model
+
+        :param suppress_error: If set to True, sets `None` to attributes that
+        it's unable to retrieve (e.g., if a relationship wasn't established
+        yet, and so it's impossible to access a property through it)
+        """
+        if suppress_error:
+            res = dict()
+            for field in self.get_fields():
+                try:
+                    field_value = getattr(self, field)
+                except AttributeError:
+                    field_value = None
+                res[field] = field_value
+        else:
+            # Can't simply call here `self.to_response()` because inheriting
+            # class might override it, but we always need the same code here
+            res = dict((f, getattr(self, f)) for f in self.fields())
+        return res
+
+    @classmethod
+    def _association_proxies(cls):
+        for col, value in cls.__table__.columns.items():
+            if isinstance(value, associationproxy.AssociationProxy):
+                yield col
+
+    @classmethod
+    def fields(cls):
+        """Return the list of field names for this table
+
+        Mostly for backwards compatibility in the code (that uses `fields`)
+        """
+        fields = set(cls._association_proxies())
+        fields.update(cls.__table__.columns.keys())
+        return fields - set(cls._private_fields)
+
+    def __repr__(self):
+        return '<{__class__.__name__} id=`{id}`>'.format(
+            __class__=self.__class__,
+            id=getattr(self, self.name_column_name()))
+
+
+class ModelIDMixin(object):
+    id = Column(Integer, primary_key=True, autoincrement=True)
+    name = Column(Text, nullable=True, index=True)
+
+    @classmethod
+    def id_column_name(cls):
+        return 'id'
+
+    @classmethod
+    def name_column_name(cls):
+        return 'name'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/structures.py
----------------------------------------------------------------------
diff --git a/aria/storage/structures.py b/aria/storage/structures.py
deleted file mode 100644
index 8afa40c..0000000
--- a/aria/storage/structures.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Aria's storage.structures module
-Path: aria.storage.structures
-
-models module holds aria's models.
-
-classes:
-    * Field - represents a single field.
-    * IterField - represents an iterable field.
-    * PointerField - represents a single pointer field.
-    * IterPointerField - represents an iterable pointers field.
-    * Model - abstract model implementation.
-"""
-import json
-
-from sqlalchemy.ext.mutable import Mutable
-from sqlalchemy.orm import relationship, backref
-from sqlalchemy.ext.declarative import declarative_base
-# pylint: disable=unused-import
-from sqlalchemy.ext.associationproxy import association_proxy
-from sqlalchemy import (
-    schema,
-    VARCHAR,
-    ARRAY,
-    Column,
-    Integer,
-    Text,
-    DateTime,
-    Boolean,
-    Enum,
-    String,
-    PickleType,
-    Float,
-    TypeDecorator,
-    ForeignKey,
-    orm,
-)
-
-from aria.storage import exceptions
-
-Model = declarative_base()
-
-
-def foreign_key(foreign_key_column, nullable=False):
-    """Return a ForeignKey object with the relevant
-
-    :param foreign_key_column: Unique id column in the parent table
-    :param nullable: Should the column be allowed to remain empty
-    """
-    return Column(
-        ForeignKey(foreign_key_column, ondelete='CASCADE'),
-        nullable=nullable
-    )
-
-
-def one_to_many_relationship(child_class,
-                             parent_class,
-                             foreign_key_column,
-                             backreference=None):
-    """Return a one-to-many SQL relationship object
-    Meant to be used from inside the *child* object
-
-    :param parent_class: Class of the parent table
-    :param child_class: Class of the child table
-    :param foreign_key_column: The column of the foreign key
-    :param backreference: The name to give to the reference to the child
-    """
-    backreference = backreference or child_class.__tablename__
-    return relationship(
-        parent_class,
-        primaryjoin=lambda: parent_class.id == foreign_key_column,
-        # The following line make sure that when the *parent* is
-        # deleted, all its connected children are deleted as well
-        backref=backref(backreference, cascade='all')
-    )
-
-
-def relationship_to_self(self_cls, parent_key, self_key):
-    return relationship(
-        self_cls,
-        foreign_keys=parent_key,
-        remote_side=self_key
-    )
-
-
-class _MutableType(TypeDecorator):
-    """
-    Dict representation of type.
-    """
-    @property
-    def python_type(self):
-        raise NotImplementedError
-
-    impl = VARCHAR
-
-    def process_literal_param(self, value, dialect):
-        pass
-
-    def process_bind_param(self, value, dialect):
-        if value is not None:
-            value = json.dumps(value)
-        return value
-
-    def process_result_value(self, value, dialect):
-        if value is not None:
-            value = json.loads(value)
-        return value
-
-
-class _DictType(_MutableType):
-    @property
-    def python_type(self):
-        return dict
-
-
-class _ListType(_MutableType):
-    @property
-    def python_type(self):
-        return list
-
-
-class _MutableDict(Mutable, dict):
-    """
-    Enables tracking for dict values.
-    """
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-
-        if not isinstance(value, _MutableDict):
-            if isinstance(value, dict):
-                return _MutableDict(value)
-
-            # this call will raise ValueError
-            try:
-                return Mutable.coerce(key, value)
-            except ValueError as e:
-                raise exceptions.StorageError('SQL Storage error: 
{0}'.format(str(e)))
-        else:
-            return value
-
-    def __setitem__(self, key, value):
-        "Detect dictionary set events and emit change events."
-
-        dict.__setitem__(self, key, value)
-        self.changed()
-
-    def __delitem__(self, key):
-        "Detect dictionary del events and emit change events."
-
-        dict.__delitem__(self, key)
-        self.changed()
-
-
-class _MutableList(Mutable, list):
-
-    @classmethod
-    def coerce(cls, key, value):
-        "Convert plain dictionaries to MutableDict."
-
-        if not isinstance(value, _MutableList):
-            if isinstance(value, list):
-                return _MutableList(value)
-
-            # this call will raise ValueError
-            try:
-                return Mutable.coerce(key, value)
-            except ValueError as e:
-                raise exceptions.StorageError('SQL Storage error: 
{0}'.format(str(e)))
-        else:
-            return value
-
-    def __setitem__(self, key, value):
-        list.__setitem__(self, key, value)
-        self.changed()
-
-    def __delitem__(self, key):
-        list.__delitem__(self, key)
-
-
-Dict = _MutableDict.as_mutable(_DictType)
-List = _MutableList.as_mutable(_ListType)
-
-
-class SQLModelBase(Model):
-    """
-    Abstract base class for all SQL models that allows [de]serialization
-    """
-    # SQLAlchemy syntax
-    __abstract__ = True
-
-    # This would be overridden once the models are created. Created for pylint.
-    __table__ = None
-
-    _private_fields = []
-
-    id = Column(Integer, primary_key=True, autoincrement=True)
-
-    def to_dict(self, suppress_error=False):
-        """Return a dict representation of the model
-
-        :param suppress_error: If set to True, sets `None` to attributes that
-        it's unable to retrieve (e.g., if a relationship wasn't established
-        yet, and so it's impossible to access a property through it)
-        """
-        if suppress_error:
-            res = dict()
-            for field in self.fields():
-                try:
-                    field_value = getattr(self, field)
-                except AttributeError:
-                    field_value = None
-                res[field] = field_value
-        else:
-            # Can't simply call here `self.to_response()` because inheriting
-            # class might override it, but we always need the same code here
-            res = dict((f, getattr(self, f)) for f in self.fields())
-        return res
-
-    @classmethod
-    def fields(cls):
-        """Return the list of field names for this table
-
-        Mostly for backwards compatibility in the code (that uses `fields`)
-        """
-        return set(cls.__table__.columns.keys()) - set(cls._private_fields)
-
-    def __repr__(self):
-        return '<{0} id=`{1}`>'.format(self.__class__.__name__, self.id)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/aria/storage/type.py
----------------------------------------------------------------------
diff --git a/aria/storage/type.py b/aria/storage/type.py
new file mode 100644
index 0000000..b168b7f
--- /dev/null
+++ b/aria/storage/type.py
@@ -0,0 +1,123 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import json
+
+from sqlalchemy import (
+    TypeDecorator,
+    VARCHAR
+)
+
+from sqlalchemy.ext import mutable
+
+from . import exceptions
+
+
+class _MutableType(TypeDecorator):
+    """
+    Dict representation of type.
+    """
+    @property
+    def python_type(self):
+        raise NotImplementedError
+
+    def process_literal_param(self, value, dialect):
+        pass
+
+    impl = VARCHAR
+
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            value = json.dumps(value)
+        return value
+
+    def process_result_value(self, value, dialect):
+        if value is not None:
+            value = json.loads(value)
+        return value
+
+
+class Dict(_MutableType):
+    @property
+    def python_type(self):
+        return dict
+
+
+class List(_MutableType):
+    @property
+    def python_type(self):
+        return list
+
+
+class _MutableDict(mutable.Mutable, dict):
+    """
+    Enables tracking for dict values.
+    """
+    @classmethod
+    def coerce(cls, key, value):
+        "Convert plain dictionaries to MutableDict."
+
+        if not isinstance(value, _MutableDict):
+            if isinstance(value, dict):
+                return _MutableDict(value)
+
+            # this call will raise ValueError
+            try:
+                return mutable.Mutable.coerce(key, value)
+            except ValueError as e:
+                raise exceptions.StorageError('SQL Storage error: 
{0}'.format(str(e)))
+        else:
+            return value
+
+    def __setitem__(self, key, value):
+        "Detect dictionary set events and emit change events."
+
+        dict.__setitem__(self, key, value)
+        self.changed()
+
+    def __delitem__(self, key):
+        "Detect dictionary del events and emit change events."
+
+        dict.__delitem__(self, key)
+        self.changed()
+
+
+class _MutableList(mutable.Mutable, list):
+
+    @classmethod
+    def coerce(cls, key, value):
+        "Convert plain dictionaries to MutableDict."
+
+        if not isinstance(value, _MutableList):
+            if isinstance(value, list):
+                return _MutableList(value)
+
+            # this call will raise ValueError
+            try:
+                return mutable.Mutable.coerce(key, value)
+            except ValueError as e:
+                raise exceptions.StorageError('SQL Storage error: 
{0}'.format(str(e)))
+        else:
+            return value
+
+    def __setitem__(self, key, value):
+        list.__setitem__(self, key, value)
+        self.changed()
+
+    def __delitem__(self, key):
+        list.__delitem__(self, key)
+
+
+_MutableDict.associate_with(Dict)
+_MutableList.as_mutable(List)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 26088e0..433bf4c 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -15,7 +15,7 @@
 
 from datetime import datetime
 
-from aria.storage import models
+from aria.storage import model
 
 from . import operations
 
@@ -35,7 +35,7 @@ RELATIONSHIP_INSTANCE_NAME = 'relationship_instance'
 
 
 def get_dependency_node(deployment):
-    return models.Node(
+    return model.Node(
         name=DEPENDENCY_NODE_NAME,
         type='test_node_type',
         type_hierarchy=[],
@@ -46,26 +46,25 @@ def get_dependency_node(deployment):
         operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
         min_number_of_instances=1,
         max_number_of_instances=1,
-        deployment_id=deployment.id
+        deployment_fk=deployment.id
     )
 
 
 def get_dependency_node_instance(dependency_node):
-    return models.NodeInstance(
+    return model.NodeInstance(
         name=DEPENDENCY_NODE_INSTANCE_NAME,
         runtime_properties={'ip': '1.1.1.1'},
         version=None,
-        node_id=dependency_node.id,
-        deployment_id=dependency_node.deployment.id,
+        node_fk=dependency_node.id,
         state='',
         scaling_groups={}
     )
 
 
 def get_relationship(source=None, target=None):
-    return models.Relationship(
-        source_node_id=source.id,
-        target_node_id=target.id,
+    return model.Relationship(
+        source_node_fk=source.id,
+        target_node_fk=target.id,
         source_interfaces={},
         source_operations=dict((key, {}) for key in 
operations.RELATIONSHIP_OPERATIONS),
         target_interfaces={},
@@ -77,17 +76,17 @@ def get_relationship(source=None, target=None):
 
 
 def get_relationship_instance(source_instance, target_instance, relationship):
-    return models.RelationshipInstance(
-        relationship_id=relationship.id,
-        target_node_instance_id=target_instance.id,
-        source_node_instance_id=source_instance.id,
+    return model.RelationshipInstance(
+        relationship_fk=relationship.id,
+        target_node_instance_fk=target_instance.id,
+        source_node_instance_fk=source_instance.id,
     )
 
 
 def get_dependent_node(deployment):
-    return models.Node(
+    return model.Node(
         name=DEPENDENT_NODE_NAME,
-        deployment_id=deployment.id,
+        deployment_fk=deployment.id,
         type='test_node_type',
         type_hierarchy=[],
         number_of_instances=1,
@@ -101,12 +100,11 @@ def get_dependent_node(deployment):
 
 
 def get_dependent_node_instance(dependent_node):
-    return models.NodeInstance(
+    return model.NodeInstance(
         name=DEPENDENT_NODE_INSTANCE_NAME,
         runtime_properties={},
         version=None,
-        node_id=dependent_node.id,
-        deployment_id=dependent_node.deployment.id,
+        node_fk=dependent_node.id,
         state='',
         scaling_groups={}
     )
@@ -114,7 +112,7 @@ def get_dependent_node_instance(dependent_node):
 
 def get_blueprint():
     now = datetime.now()
-    return models.Blueprint(
+    return model.Blueprint(
         plan={},
         name=BLUEPRINT_NAME,
         description=None,
@@ -125,10 +123,9 @@ def get_blueprint():
 
 
 def get_execution(deployment):
-    return models.Execution(
-        deployment_id=deployment.id,
-        blueprint_id=deployment.blueprint.id,
-        status=models.Execution.STARTED,
+    return model.Execution(
+        deployment_fk=deployment.id,
+        status=model.Execution.STARTED,
         workflow_name=WORKFLOW_NAME,
         started_at=datetime.utcnow(),
         parameters=None
@@ -137,9 +134,9 @@ def get_execution(deployment):
 
 def get_deployment(blueprint):
     now = datetime.utcnow()
-    return models.Deployment(
+    return model.Deployment(
         name=DEPLOYMENT_NAME,
-        blueprint_id=blueprint.id,
+        blueprint_fk=blueprint.id,
         description='',
         created_at=now,
         updated_at=now,
@@ -155,7 +152,7 @@ def get_deployment(blueprint):
 
 
 def get_plugin(package_name='package', package_version='0.1'):
-    return models.Plugin(
+    return model.Plugin(
         archive_name='archive_name',
         distribution='distribution',
         distribution_release='dist_release',

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py 
b/tests/orchestrator/context/test_toolbelt.py
index da46696..b63811b 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -49,21 +49,21 @@ def executor():
 
 def _get_elements(workflow_context):
     dependency_node = 
workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    dependency_node.host_id = dependency_node.id
+    dependency_node.host = dependency_node
     workflow_context.model.node.update(dependency_node)
 
     dependency_node_instance = 
workflow_context.model.node_instance.get_by_name(
         mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependency_node_instance.host_id = dependency_node_instance.id
+    dependency_node_instance.host_fk = dependency_node_instance.id
     workflow_context.model.node_instance.update(dependency_node_instance)
 
     dependent_node = 
workflow_context.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
-    dependent_node.host_id = dependency_node.id
+    dependent_node.host_fk = dependency_node.id
     workflow_context.model.node.update(dependent_node)
 
     dependent_node_instance = workflow_context.model.node_instance.get_by_name(
         mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-    dependent_node_instance.host_id = dependent_node_instance.id
+    dependent_node_instance.host_fk = dependent_node_instance.id
     workflow_context.model.node_instance.update(dependent_node_instance)
 
     relationship = workflow_context.model.relationship.list()[0]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/6e1f1260/tests/orchestrator/workflows/builtin/test_heal.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_heal.py 
b/tests/orchestrator/workflows/builtin/test_heal.py
index 97121b9..ad281fd 100644
--- a/tests/orchestrator/workflows/builtin/test_heal.py
+++ b/tests/orchestrator/workflows/builtin/test_heal.py
@@ -34,7 +34,7 @@ def ctx(tmpdir):
 def test_heal_dependent_node(ctx):
     dependent_node_instance = \
         
ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-    dependent_node_instance.host_id = dependent_node_instance.id
+    dependent_node_instance.host_fk = dependent_node_instance.id
     ctx.model.node_instance.update(dependent_node_instance)
     heal_graph = task.WorkflowTask(heal, ctx=ctx, 
node_instance_id=dependent_node_instance.id)
 
@@ -63,7 +63,7 @@ def test_heal_dependent_node(ctx):
 def test_heal_dependency_node(ctx):
     dependency_node_instance = \
         
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependency_node_instance.host_id = dependency_node_instance.id
+    dependency_node_instance.host_fk = dependency_node_instance.id
     ctx.model.node_instance.update(dependency_node_instance)
     heal_graph = task.WorkflowTask(heal, ctx=ctx, 
node_instance_id=dependency_node_instance.id)
     # both subgraphs should contain un\install for both the dependent and the 
dependency


Reply via email to