Repository: incubator-ariatosca
Updated Branches:
  refs/heads/SQLAlchemy-based-models 49401998c -> b8051d439


half baked sql inegration


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/b8051d43
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/b8051d43
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/b8051d43

Branch: refs/heads/SQLAlchemy-based-models
Commit: b8051d439b1d7794b0ec341a738ecb411180b5c3
Parents: 4940199
Author: mxmrlv <[email protected]>
Authored: Tue Nov 29 18:28:33 2016 +0200
Committer: mxmrlv <[email protected]>
Committed: Tue Nov 29 18:28:33 2016 +0200

----------------------------------------------------------------------
 aria/__init__.py                                |   2 +
 aria/orchestrator/context/workflow.py           |  18 +-
 aria/orchestrator/workflows/api/task.py         |   1 +
 aria/orchestrator/workflows/builtin/heal.py     |  19 +-
 aria/orchestrator/workflows/builtin/install.py  |   6 +-
 .../orchestrator/workflows/builtin/uninstall.py |   5 +-
 .../orchestrator/workflows/builtin/workflows.py |   4 +-
 aria/orchestrator/workflows/core/task.py        |  19 +-
 aria/storage/api.py                             |   2 +-
 aria/storage/api_driver/inmemory.py             |   9 +-
 aria/storage/api_driver/sql.py                  |  53 +++++-
 aria/storage/models.py                          | 189 +++++++++++--------
 aria/storage/states.py                          |  68 -------
 aria/storage/structures.py                      |  46 ++---
 tests/mock/context.py                           |  42 ++++-
 tests/mock/models.py                            |  56 +++---
 tests/orchestrator/workflows/api/test_task.py   |  72 +++----
 .../orchestrator/workflows/builtin/__init__.py  |  38 +---
 .../workflows/builtin/test_execute_operation.py |   4 +-
 .../orchestrator/workflows/builtin/test_heal.py |   9 +-
 .../workflows/builtin/test_install.py           |   9 +-
 .../workflows/builtin/test_uninstall.py         |   7 +-
 .../orchestrator/workflows/core/test_engine.py  |  22 +--
 tests/orchestrator/workflows/core/test_task.py  |  20 +-
 .../test_task_graph_into_exececution_graph.py   |  10 +-
 tests/storage/test_model_storage.py             |  14 +-
 26 files changed, 361 insertions(+), 383 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/__init__.py
----------------------------------------------------------------------
diff --git a/aria/__init__.py b/aria/__init__.py
index 45e74df..64be82e 100644
--- a/aria/__init__.py
+++ b/aria/__init__.py
@@ -67,7 +67,9 @@ def application_model_storage(api, api_params=None):
         _model_storage[api] = storage.ModelStorage(
             api, api_params=api_params or {}, items=[
                 storage.models.Node,
+                storage.models.Relationship,
                 storage.models.NodeInstance,
+                storage.models.RelationshipInstance,
                 storage.models.Plugin,
                 storage.models.Blueprint,
                 storage.models.Snapshot,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/context/workflow.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/context/workflow.py 
b/aria/orchestrator/context/workflow.py
index 280390c..df72fa0 100644
--- a/aria/orchestrator/context/workflow.py
+++ b/aria/orchestrator/context/workflow.py
@@ -19,6 +19,7 @@ Workflow and operation contexts
 
 import threading
 from contextlib import contextmanager
+from datetime import datetime
 
 from aria import storage
 
@@ -49,13 +50,14 @@ class WorkflowContext(BaseContext):
 
     def _create_execution(self):
         execution_cls = self.model.execution.model_cls
+        now = datetime.utcnow()
         execution = self.model.execution.model_cls(
             id=self._execution_id,
-            deployment_id=self.deployment.id,
             workflow_id=self._workflow_id,
-            blueprint_id=self.blueprint.id,
+            created_at=now,
             status=execution_cls.PENDING,
             parameters=self.parameters,
+            deployment_storage_id=self.deployment.storage_id
         )
         self.model.execution.store(execution)
 
@@ -64,14 +66,22 @@ class WorkflowContext(BaseContext):
         """
         Iterator over nodes
         """
-        return self.model.node.iter(filters={'blueprint_id': 
self.blueprint.id})
+        return self.model.node.iter(
+            filters={
+                'deployment_storage_id': self.deployment.storage_id
+            }
+        )
 
     @property
     def node_instances(self):
         """
         Iterator over node instances
         """
-        return self.model.node_instance.iter(filters={'deployment_id': 
self.deployment.id})
+        return self.model.node_instance.iter(
+            filters={
+                'deployment_storage_id': self.deployment.storage_id
+            }
+        )
 
 
 class _CurrentContext(threading.local):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/workflows/api/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/api/task.py 
b/aria/orchestrator/workflows/api/task.py
index 4d36725..e95ff6d 100644
--- a/aria/orchestrator/workflows/api/task.py
+++ b/aria/orchestrator/workflows/api/task.py
@@ -19,6 +19,7 @@ Provides the tasks to be entered into the task graph
 from uuid import uuid4
 
 import aria
+from aria import storage
 
 from ... import context
 from .. import exceptions

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/workflows/builtin/heal.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/heal.py 
b/aria/orchestrator/workflows/builtin/heal.py
index dbfc14e..5eb75db 100644
--- a/aria/orchestrator/workflows/builtin/heal.py
+++ b/aria/orchestrator/workflows/builtin/heal.py
@@ -84,16 +84,17 @@ def heal_uninstall(ctx, graph, failing_node_instances, 
targeted_node_instances):
     # create dependencies between the node instance sub workflow
     for node_instance in failing_node_instances:
         node_instance_sub_workflow = 
node_instance_sub_workflows[node_instance.id]
-        for relationship_instance in 
reversed(node_instance.relationship_instances):
-            
graph.add_dependency(node_instance_sub_workflows[relationship_instance.target_id],
+        for relationship_instance in 
reversed(node_instance.relationship_instance_source):
+            
graph.add_dependency(node_instance_sub_workflows[relationship_instance.target_node_instance.id],
                                  node_instance_sub_workflow)
 
     # Add operations for intact nodes depending on a node instance belonging 
to node_instances
     for node_instance in targeted_node_instances:
         node_instance_sub_workflow = 
node_instance_sub_workflows[node_instance.id]
 
-        for relationship_instance in 
reversed(node_instance.relationship_instances):
-            target_node_instance = 
ctx.model.node_instance.get(relationship_instance.target_id)
+        for relationship_instance in 
reversed(node_instance.relationship_instance_source):
+
+            target_node_instance = 
ctx.model.node_instance.get(relationship_instance.target_node_instance.id)
             target_node_instance_subgraph = 
node_instance_sub_workflows[target_node_instance.id]
             graph.add_dependency(target_node_instance_subgraph, 
node_instance_sub_workflow)
 
@@ -134,9 +135,9 @@ def heal_install(ctx, graph, failing_node_instances, 
targeted_node_instances):
     # create dependencies between the node instance sub workflow
     for node_instance in failing_node_instances:
         node_instance_sub_workflow = 
node_instance_sub_workflows[node_instance.id]
-        if node_instance.relationship_instances:
-            dependencies = 
[node_instance_sub_workflows[relationship_instance.target_id]
-                            for relationship_instance in 
node_instance.relationship_instances]
+        if node_instance.relationship_instance_source:
+            dependencies = 
[node_instance_sub_workflows[relationship_instance.target_node_instance.id]
+                            for relationship_instance in 
node_instance.relationship_instance_source]
             graph.add_dependency(node_instance_sub_workflow, dependencies)
 
     # Add operations for intact nodes depending on a node instance
@@ -144,8 +145,8 @@ def heal_install(ctx, graph, failing_node_instances, 
targeted_node_instances):
     for node_instance in targeted_node_instances:
         node_instance_sub_workflow = 
node_instance_sub_workflows[node_instance.id]
 
-        for relationship_instance in node_instance.relationship_instances:
-            target_node_instance = 
ctx.model.node_instance.get(relationship_instance.target_id)
+        for relationship_instance in 
node_instance.relationship_instance_source:
+            target_node_instance = 
ctx.model.node_instance.get(relationship_instance.target_node_instance.id)
             target_node_instance_subworkflow = 
node_instance_sub_workflows[target_node_instance.id]
             graph.add_dependency(node_instance_sub_workflow, 
target_node_instance_subworkflow)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/workflows/builtin/install.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/install.py 
b/aria/orchestrator/workflows/builtin/install.py
index 0ab3ad6..7291d52 100644
--- a/aria/orchestrator/workflows/builtin/install.py
+++ b/aria/orchestrator/workflows/builtin/install.py
@@ -47,7 +47,7 @@ def install(ctx, graph, node_instances=(), 
node_instance_sub_workflows=None):
     # create dependencies between the node instance sub workflow
     for node_instance in node_instances:
         node_instance_sub_workflow = 
node_instance_sub_workflows[node_instance.id]
-        if node_instance.relationship_instances:
-            dependencies = 
[node_instance_sub_workflows[relationship_instance.target_id]
-                            for relationship_instance in 
node_instance.relationship_instances]
+        if node_instance.relationship_instance_source:
+            dependencies = 
[node_instance_sub_workflows[relationship_instance.target_node_instance.id]
+                            for relationship_instance in 
node_instance.relationship_instance_source]
             graph.add_dependency(node_instance_sub_workflow, dependencies)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/workflows/builtin/uninstall.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/uninstall.py 
b/aria/orchestrator/workflows/builtin/uninstall.py
index f4e965c..80fdc4e 100644
--- a/aria/orchestrator/workflows/builtin/uninstall.py
+++ b/aria/orchestrator/workflows/builtin/uninstall.py
@@ -47,6 +47,7 @@ def uninstall(ctx, graph, node_instances=(), 
node_instance_sub_workflows=None):
     # create dependencies between the node instance sub workflow
     for node_instance in node_instances:
         node_instance_sub_workflow = 
node_instance_sub_workflows[node_instance.id]
-        for relationship_instance in 
reversed(node_instance.relationship_instances):
-            
graph.add_dependency(node_instance_sub_workflows[relationship_instance.target_id],
+        for relationship_instance in 
reversed(node_instance.relationship_instance_source):
+            target_id = relationship_instance.target_node_instance.id
+            graph.add_dependency(node_instance_sub_workflows[target_id],
                                  node_instance_sub_workflow)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/workflows/builtin/workflows.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/builtin/workflows.py 
b/aria/orchestrator/workflows/builtin/workflows.py
index 0eb8c34..02bfaf1 100644
--- a/aria/orchestrator/workflows/builtin/workflows.py
+++ b/aria/orchestrator/workflows/builtin/workflows.py
@@ -179,8 +179,8 @@ def relationships_tasks(graph, operation_name, 
node_instance):
     :return:
     """
     relationships_groups = groupby(
-        node_instance.relationship_instances,
-        key=lambda relationship_instance: 
relationship_instance.relationship.target_id)
+        node_instance.relationship_instance_source,
+        key=lambda relationship_instance: 
relationship_instance.target_node_instance.id)
 
     sub_tasks = []
     for _, (_, relationship_group) in enumerate(relationships_groups):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/orchestrator/workflows/core/task.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/task.py 
b/aria/orchestrator/workflows/core/task.py
index a583cfc..2f3a46a 100644
--- a/aria/orchestrator/workflows/core/task.py
+++ b/aria/orchestrator/workflows/core/task.py
@@ -107,6 +107,15 @@ class OperationTask(BaseTask):
         super(OperationTask, self).__init__(id=api_task.id, **kwargs)
         self._workflow_context = api_task._workflow_context
         task_model = api_task._workflow_context.model.task.model_cls
+
+        if isinstance(api_task.actor, models.NodeInstance):
+            context_class = operation_context.NodeOperationContext
+        elif isinstance(api_task.actor, models.RelationshipInstance):
+            context_class = operation_context.RelationshipOperationContext
+        else:
+            raise RuntimeError('No operation context could be created for {0}'
+                               .format(api_task.actor.model_cls))
+
         operation_task = task_model(
             id=api_task.id,
             name=api_task.name,
@@ -117,17 +126,9 @@ class OperationTask(BaseTask):
             execution_id=self._workflow_context._execution_id,
             max_attempts=api_task.max_attempts,
             retry_interval=api_task.retry_interval,
-            ignore_failure=api_task.ignore_failure
+            ignore_failure=api_task.ignore_failure,
         )
 
-        if isinstance(api_task.actor, models.NodeInstance):
-            context_class = operation_context.NodeOperationContext
-        elif isinstance(api_task.actor, models.RelationshipInstance):
-            context_class = operation_context.RelationshipOperationContext
-        else:
-            raise RuntimeError('No operation context could be created for {0}'
-                               .format(api_task.actor.model_cls))
-
         self._ctx = context_class(name=api_task.name,
                                   workflow_context=self._workflow_context,
                                   task=operation_task)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/storage/api.py
----------------------------------------------------------------------
diff --git a/aria/storage/api.py b/aria/storage/api.py
index 8fee34a..c313459 100644
--- a/aria/storage/api.py
+++ b/aria/storage/api.py
@@ -67,7 +67,7 @@ class ModelAPI(StorageAPI):
     def iter(self, **kwargs):
         raise NotImplementedError('Subclass must implement abstract iter 
method')
 
-    def update(self, entry_id, **kwargs):
+    def update(self, entry, **kwargs):
         raise NotImplementedError('Subclass must implement abstract update 
method')
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/storage/api_driver/inmemory.py
----------------------------------------------------------------------
diff --git a/aria/storage/api_driver/inmemory.py 
b/aria/storage/api_driver/inmemory.py
index 1bd438d..fb27673 100644
--- a/aria/storage/api_driver/inmemory.py
+++ b/aria/storage/api_driver/inmemory.py
@@ -45,14 +45,15 @@ class InMemoryModelAPI(api.ModelAPI):
             storage[self.name] = {}
             self._setup_pointers_mapping()
 
-    def _setup_pointers_mapping(self,):
+    def _setup_pointers_mapping(self):
         for field_name, field_cls in vars(self.model_cls).items():
-            if not(isinstance(field_cls, db.RelationshipProperty) and 
field_cls.type):
+            if not (getattr(field_cls, 'impl', None) is not None and
+                    isinstance(field_cls.impl.parent_token, 
db.RelationshipProperty)):
                 continue
             pointer_key = _Pointer(field_name, is_iter=False)
             self.pointer_mapping[pointer_key] = self.__class__(
-                name=api.generate_lower_name(field_cls.type),
-                model_cls=field_cls.type)
+                name=api.generate_lower_name(field_cls.class_),
+                model_cls=field_cls.class_)
 
     def get(self, entry_id, **kwargs):
         """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/storage/api_driver/sql.py
----------------------------------------------------------------------
diff --git a/aria/storage/api_driver/sql.py b/aria/storage/api_driver/sql.py
index ef11eb1..4cec761 100644
--- a/aria/storage/api_driver/sql.py
+++ b/aria/storage/api_driver/sql.py
@@ -25,16 +25,34 @@ except ImportError:
     sql_errors = (SQLAlchemyError, SQLiteDBError)
     Psycopg2DBError = None
 
+DEFAULT_SQL_DIALECT = 'sqlite'
+
 from ... import storage
 
 
 class SQLAlchemyModelAPI(storage.api.ModelAPI):
 
+    def __init__(self,
+                 app,
+                 sql_dialect=DEFAULT_SQL_DIALECT,
+                 username='',
+                 password='',
+                 host='localhost',
+                 db_name='',
+                 **kwargs):
+        super(SQLAlchemyModelAPI, self).__init__(**kwargs)
+        self._app = app
+        self._sql_dialect = sql_dialect
+        self._username = username
+        self._password = password
+        self._host = host
+        self._db_name = db_name
+
     def get(self, entry_id, include=None, filters=None, locking=False):
         """Return a single result based on the model class and element ID
         """
         filters = filters or {'id': entry_id}
-        query = self._get_query(self.model_cls, include, filters)
+        query = self._get_query(include, filters)
         if locking:
             query = query.with_for_update()
         result = query.first()
@@ -46,7 +64,7 @@ class SQLAlchemyModelAPI(storage.api.ModelAPI):
             )
         return result
 
-    def list(self,
+    def iter(self,
              include=None,
              filters=None,
              pagination=None,
@@ -55,9 +73,10 @@ class SQLAlchemyModelAPI(storage.api.ModelAPI):
         """
         query = self._get_query(include, filters, sort)
 
-        results, total, size, offset = self._paginate(query, pagination)
+        results, _, _, _ = self._paginate(query, pagination)
 
-        return iter(results)
+        for result in results:
+            yield result
 
     def store(self, entry, **kwargs):
         """Create a `model_class` instance from a serializable `model` object
@@ -92,15 +111,15 @@ class SQLAlchemyModelAPI(storage.api.ModelAPI):
         return instance
 
     # TODO: this might need rework
-    def update(self, entry_id, **kwargs):
+    def update(self, entry, **kwargs):
         """Add `instance` to the DB session, and attempt to commit
 
         :param entry_id: Instance to be updated in the DB
         :return: The updated instance
         """
-        storage.db.session.add(entry_id)
+        storage.db.session.add(entry)
         self._safe_commit()
-        return entry_id
+        return entry
 
     def refresh(self, entry):
         """Reload the instance with fresh information from the DB
@@ -119,8 +138,20 @@ class SQLAlchemyModelAPI(storage.api.ModelAPI):
         pass
 
     def create(self):
-        # TODO: This should be reworked
-        storage.db.create_all()
+        if self._sql_dialect == DEFAULT_SQL_DIALECT:
+            sql_uri = 'sqlite://'
+        else:
+            sql_uri = '{0}://{1}:{2}@{3}/{4}'.format(self._sql_dialect,
+                                                     self._username,
+                                                     self._password,
+                                                     self._host,
+                                                     self._db_name)
+        if self._app.config.get('SQLALCHEMY_DATABASE_URI', None) is None:
+            self._app.config['SQLALCHEMY_DATABASE_URI'] = sql_uri
+        with self._app.app_context():
+            storage.structures.db.app = self._app
+            storage.structures.db.init_app(self._app)
+            storage.db.create_all()
 
     @staticmethod
     def _safe_commit():
@@ -294,6 +325,7 @@ class SQLAlchemyModelAPI(storage.api.ModelAPI):
         else:
             return getattr(self.model_cls, column_name)
 
+    # TODO is this really needed in aria?
     @staticmethod
     def _paginate(query, pagination):
         """Paginate the query by size and offset
@@ -325,6 +357,9 @@ class SQLAlchemyModelAPI(storage.api.ModelAPI):
             for prop in instance.join_properties:
                 getattr(instance, prop)
 
+    def clear(self):
+        storage.db.session.query(self.name).delete()
+        storage.db.session.commit()
 
 class ListResult(object):
     """

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/storage/models.py
----------------------------------------------------------------------
diff --git a/aria/storage/models.py b/aria/storage/models.py
index e2cf317..8326190 100644
--- a/aria/storage/models.py
+++ b/aria/storage/models.py
@@ -39,10 +39,10 @@ classes:
 from datetime import datetime
 from uuid import uuid4
 
-from aria.storage import (
-    structures,
-    states,
-)
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.ext.declarative import declared_attr
+
+from aria.storage import structures
 
 __all__ = (
     'Model',
@@ -80,21 +80,29 @@ class Blueprint(structures.SQLModelBase):
     storage_id = structures.db.Column(structures.db.Integer, primary_key=True, 
autoincrement=True)
     id = structures.db.Column(structures.db.Text, index=True)
 
-    created_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
+    created_at = structures.db.Column(structures.db.DateTime, nullable=False, 
index=True)
     main_file_name = structures.db.Column(structures.db.Text, nullable=False)
     plan = structures.db.Column(structures.db.PickleType, nullable=False)
-    updated_at = structures.db.Column(structures.UTCDateTime)
+    updated_at = structures.db.Column(structures.db.DateTime)
     description = structures.db.Column(structures.db.Text)
 
 
 class Snapshot(structures.SQLModelBase):
     __tablename__ = 'snapshots'
 
+    CREATED = 'created'
+    FAILED = 'failed'
+    CREATING = 'creating'
+    UPLOADED = 'uploaded'
+
+    STATES = [CREATED, FAILED, CREATING, UPLOADED]
+    END_STATES = [CREATED, FAILED, UPLOADED]
+
     storage_id = structures.db.Column(structures.db.Integer, primary_key=True, 
autoincrement=True)
     id = structures.db.Column(structures.db.Text, index=True)
 
-    created_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
-    status = 
structures.db.Column(structures.db.Enum(*states.SnapshotState.STATES, 
name='snapshot_status'))
+    created_at = structures.db.Column(structures.db.DateTime, nullable=False, 
index=True)
+    status = structures.db.Column(structures.db.Enum(*STATES, 
name='snapshot_status'))
     error = structures.db.Column(structures.db.Text)
 
 
@@ -116,7 +124,7 @@ class Deployment(structures.SQLModelBase):
     storage_id = structures.db.Column(structures.db.Integer, primary_key=True, 
autoincrement=True)
     id = structures.db.Column(structures.db.Text, index=True)
 
-    created_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
+    created_at = structures.db.Column(structures.db.DateTime, nullable=False, 
index=True)
     description = structures.db.Column(structures.db.Text)
     inputs = structures.db.Column(structures.db.PickleType)
     groups = structures.db.Column(structures.db.PickleType)
@@ -125,7 +133,7 @@ class Deployment(structures.SQLModelBase):
     policy_types = structures.db.Column(structures.db.PickleType)
     outputs = structures.db.Column(structures.db.PickleType(comparator=lambda 
*a: False))
     scaling_groups = structures.db.Column(structures.db.PickleType)
-    updated_at = structures.db.Column(structures.UTCDateTime)
+    updated_at = structures.db.Column(structures.db.DateTime)
     workflows = 
structures.db.Column(structures.db.PickleType(comparator=lambda *a: False))
 
     blueprint_storage_id = structures.foreign_key(Blueprint)
@@ -144,6 +152,19 @@ class Deployment(structures.SQLModelBase):
 class Execution(structures.SQLModelBase):
     __tablename__ = 'executions'
 
+    TERMINATED = 'terminated'
+    FAILED = 'failed'
+    CANCELLED = 'cancelled'
+    PENDING = 'pending'
+    STARTED = 'started'
+    CANCELLING = 'cancelling'
+    FORCE_CANCELLING = 'force_cancelling'
+
+    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED,
+              CANCELLING, FORCE_CANCELLING]
+    END_STATES = [TERMINATED, FAILED, CANCELLED]
+    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
+
     # See base class for an explanation on these properties
     join_properties = {
         'blueprint_id': {
@@ -162,13 +183,13 @@ class Execution(structures.SQLModelBase):
     storage_id = structures.db.Column(structures.db.Integer, primary_key=True, 
autoincrement=True)
     id = structures.db.Column(structures.db.Text, index=True)
 
-    created_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
-    error = structures.db.Column(structures.db.Text)
-    is_system_workflow = structures.db.Column(structures.db.Boolean, 
nullable=False)
+    created_at = structures.db.Column(structures.db.DateTime, index=True)
+    started_at = structures.db.Column(structures.db.DateTime, nullable=True, 
index=True)
+    ended_at = structures.db.Column(structures.db.DateTime, nullable=True, 
index=True)
+    error = structures.db.Column(structures.db.Text, default='')
+    is_system_workflow = structures.db.Column(structures.db.Boolean, 
nullable=False, default=False)
     parameters = structures.db.Column(structures.db.PickleType)
-    status = structures.db.Column(
-        structures.db.Enum(*states.ExecutionState.STATES, 
name='execution_status')
-    )
+    status = structures.db.Column(structures.db.Enum(*STATES, 
name='execution_status'))
     workflow_id = structures.db.Column(structures.db.Text, nullable=False)
 
     deployment_storage_id = structures.foreign_key(Deployment, nullable=True)
@@ -218,7 +239,7 @@ class DeploymentUpdate(structures.SQLModelBase):
     storage_id = structures.db.Column(structures.db.Integer, primary_key=True, 
autoincrement=True)
     id = structures.db.Column(structures.db.Text, index=True)
 
-    created_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
+    created_at = structures.db.Column(structures.db.DateTime, nullable=False, 
index=True)
     deployment_plan = structures.db.Column(structures.db.PickleType)
     deployment_update_node_instances = 
structures.db.Column(structures.db.PickleType)
     deployment_update_deployment = 
structures.db.Column(structures.db.PickleType)
@@ -286,10 +307,6 @@ class DeploymentUpdateStep(structures.SQLModelBase):
     )
 
     @property
-    def tenant(self):
-        return self.deployment_update.tenant
-
-    @property
     def deployment_update_id(self):
         return self.deployment_update.id
 
@@ -297,6 +314,13 @@ class DeploymentUpdateStep(structures.SQLModelBase):
 class DeploymentModification(structures.SQLModelBase):
     __tablename__ = 'deployment_modifications'
 
+    STARTED = 'started'
+    FINISHED = 'finished'
+    ROLLEDBACK = 'rolledback'
+
+    STATES = [STARTED, FINISHED, ROLLEDBACK]
+    END_STATES = [FINISHED, ROLLEDBACK]
+
     # See base class for an explanation on these properties
     join_properties = {
         'deployment_id': {
@@ -312,14 +336,12 @@ class DeploymentModification(structures.SQLModelBase):
     id = structures.db.Column(structures.db.Text, index=True)
 
     context = structures.db.Column(structures.db.PickleType)
-    created_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
-    ended_at = structures.db.Column(structures.UTCDateTime, index=True)
+    created_at = structures.db.Column(structures.db.DateTime, nullable=False, 
index=True)
+    ended_at = structures.db.Column(structures.db.DateTime, index=True)
     modified_nodes = structures.db.Column(structures.db.PickleType)
     node_instances = structures.db.Column(structures.db.PickleType)
-    status = structures.db.Column(structures.db.Enum(
-        *states.DeploymentModificationState.STATES,
-        name='deployment_modification_status'
-    ))
+    status = structures.db.Column(
+        structures.db.Enum(*STATES, name='deployment_modification_status'))
 
     deployment_storage_id = structures.foreign_key(Deployment)
     deployment = structures.one_to_many_relationship(
@@ -330,10 +352,6 @@ class DeploymentModification(structures.SQLModelBase):
     )
 
     @property
-    def tenant(self):
-        return self.deployment.tenant
-
-    @property
     def deployment_id(self):
         return self.deployment.id
 
@@ -385,10 +403,6 @@ class Node(structures.SQLModelBase):
     )
 
     @property
-    def tenant(self):
-        return self.deployment.tenant
-
-    @property
     def deployment_id(self):
         return self.deployment.id
 
@@ -458,7 +472,7 @@ class NodeInstance(structures.SQLModelBase):
     }
     join_order = 4
 
-    _private_fields = ['node_storage_id']
+    _private_fields = ['node_storage_id', 'deployment_storage_id']
 
     storage_id = structures.db.Column(structures.db.Integer, primary_key=True, 
autoincrement=True)
     id = structures.db.Column(structures.db.Text, index=True)
@@ -480,16 +494,16 @@ class NodeInstance(structures.SQLModelBase):
     )
 
     @property
-    def tenant(self):
-        return self.deployment.tenant
-
-    @property
     def node_id(self):
         return self.node.id
 
-    @property
-    def deployment_id(self):
-        return self.node.deployment_id
+    deployment_storage_id = structures.foreign_key(Deployment)
+    deployment = structures.one_to_many_relationship(
+        child_class_name='NodeInstance',
+        column_name='deployment_storage_id',
+        parent_class_name='Deployment',
+        back_reference_name='node_instances'
+    )
 
 
 class RelationshipInstance(structures.SQLModelBase):
@@ -516,17 +530,17 @@ class RelationshipInstance(structures.SQLModelBase):
 
     type = structures.db.Column(structures.db.String)
 
-    source_node_instance_id = structures.foreign_key(NodeInstance)
+    source_node_instance_storage_id = structures.foreign_key(NodeInstance)
     source_node_instance = structures.one_to_many_relationship(
         child_class_name='RelationshipInstance',
-        column_name='source_node_instance_id',
+        column_name='source_node_instance_storage_id',
         parent_class_name='NodeInstance',
         back_reference_name='relationship_instance_source'
     )
-    target_node_instance_id = structures.foreign_key(NodeInstance)
+    target_node_instance_storage_id = structures.foreign_key(NodeInstance)
     target_node_instance = structures.one_to_many_relationship(
         child_class_name='RelationshipInstance',
-        column_name='target_node_instance_id',
+        column_name='target_node_instance_storage_id',
         parent_class_name='NodeInstance',
         back_reference_name='relationship_instance_target'
     )
@@ -563,7 +577,7 @@ class Plugin(structures.SQLModelBase):
     package_version = structures.db.Column(structures.db.Text)
     supported_platform = structures.db.Column(structures.db.PickleType)
     supported_py_versions = structures.db.Column(structures.db.PickleType)
-    uploaded_at = structures.db.Column(structures.UTCDateTime, nullable=False, 
index=True)
+    uploaded_at = structures.db.Column(structures.db.DateTime, nullable=False, 
index=True)
     wheels = structures.db.Column(structures.db.PickleType, nullable=False)
 
 
@@ -571,8 +585,31 @@ class Task(structures.SQLModelBase):
     """
     A Model which represents an task
     """
+
     __tablename__ = 'task'
 
+    _private_fields = ['node_instance_storage_id',
+                       'relationship_instance_storage_id']
+
+
+    PENDING = 'pending'
+    RETRYING = 'retrying'
+    SENT = 'sent'
+    STARTED = 'started'
+    SUCCESS = 'success'
+    FAILED = 'failed'
+    STATES = (
+        PENDING,
+        RETRYING,
+        SENT,
+        STARTED,
+        SUCCESS,
+        FAILED,
+    )
+
+    WAIT_STATES = [PENDING, RETRYING]
+    END_STATES = [SUCCESS, FAILED]
+
     class _Validation(object):
 
         @staticmethod
@@ -585,14 +622,14 @@ class Task(structures.SQLModelBase):
     INFINITE_RETRIES = -1
 
     id = structures.db.Column(structures.db.String, primary_key=True, 
default=uuid_generator)
-    status = structures.db.Column(structures.db.Enum(*states.TaskState.STATES),
-                                  name='task_status',
-                                  default=states.TaskState.PENDING)
+    status = structures.db.Column(structures.db.Enum(*STATES),
+                                  name='status',
+                                  default=PENDING)
 
     execution_id = structures.db.Column(structures.db.String)
-    due_at = structures.db.Column(structures.UTCDateTime, 
default=datetime.utcnow)
-    started_at = structures.db.Column(structures.UTCDateTime, default=None)
-    ended_at = structures.db.Column(structures.UTCDateTime, default=None)
+    due_at = structures.db.Column(structures.db.DateTime, 
default=datetime.utcnow, nullable=True)
+    started_at = structures.db.Column(structures.db.DateTime, default=None, 
nullable=True)
+    ended_at = structures.db.Column(structures.db.DateTime, default=None, 
nullable=True)
     max_attempts = structures.db.Column(structures.db.Integer, default=1) # , 
validation_func=_Validation.validate_max_attempts)
     retry_count = structures.db.Column(structures.db.Integer, default=0)
     retry_interval = structures.db.Column(structures.db.Float, default=0)
@@ -601,32 +638,36 @@ class Task(structures.SQLModelBase):
     # Operation specific fields
     name = structures.db.Column(structures.db.String)
     operation_mapping = structures.db.Column(structures.db.String)
-    inputs = structures.db.column(structures.db.PickleType(comparator=lambda 
*a: False))
+    inputs = structures.db.Column(structures.db.PickleType(comparator=lambda 
*a: False))
 
+    node_instance_storage_id = structures.foreign_key(NodeInstance, 
nullable=True)
+    relationship_instance_storage_id = 
structures.foreign_key(RelationshipInstance, nullable=True)
 
-class NodeInstanceTask(Task):
-    id = structures.db.Column(structures.db.Integer,
-                              structures.db.ForeignKey('task.id'),
-                              primary_key=True)
-
-    actor_storage_id = structures.foreign_key(NodeInstance)
-    actor = structures.one_to_many_relationship(
-        child_class_name='NodeInstanceTask',
-        column_name='actor_storage_id',
+    node_instance = structures.one_to_many_relationship(
+        child_class_name='Task',
+        column_name='node_instance_storage_id',
         parent_class_name='NodeInstance',
         back_reference_name='node_tasks',
     )
 
-
-class RelationshipInstanceTask(Task):
-    id = structures.db.Column(structures.db.Integer,
-                              structures.db.ForeignKey('task.id'),
-                              primary_key=True)
-
-    actor_storage_id = structures.foreign_key(RelationshipInstance)
-    actor = structures.one_to_many_relationship(
-        child_class_name='RelationshipInstanceTask',
-        column_name='actor_storage_id',
+    relationship_instance = structures.one_to_many_relationship(
+        child_class_name='Task',
+        column_name='relationship_instance_storage_id',
         parent_class_name='RelationshipInstance',
         back_reference_name='relationship_tasks',
     )
+
+    @property
+    def actor_storage_id(self):
+        return self.node_instance_storage_id or 
self.relationship_instance_storage_id
+
+    @property
+    def actor(self):
+        return self.node_instance or self.relationship_instance
+
+    def __init__(self, actor, **kwargs):
+        if isinstance(actor, RelationshipInstance):
+            kwargs['relationship_instance_storage_id'] = actor.storage_id
+        elif isinstance(actor, NodeInstance):
+            kwargs['node_instance_storage_id'] = actor.storage_id
+        super(Task, self).__init__(**kwargs)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/storage/states.py
----------------------------------------------------------------------
diff --git a/aria/storage/states.py b/aria/storage/states.py
deleted file mode 100644
index b3f6828..0000000
--- a/aria/storage/states.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class DeploymentModificationState(object):
-    STARTED = 'started'
-    FINISHED = 'finished'
-    ROLLEDBACK = 'rolledback'
-
-    STATES = [STARTED, FINISHED, ROLLEDBACK]
-    END_STATES = [FINISHED, ROLLEDBACK]
-
-
-class SnapshotState(object):
-    CREATED = 'created'
-    FAILED = 'failed'
-    CREATING = 'creating'
-    UPLOADED = 'uploaded'
-
-    STATES = [CREATED, FAILED, CREATING, UPLOADED]
-    END_STATES = [CREATED, FAILED, UPLOADED]
-
-
-class ExecutionState(object):
-    TERMINATED = 'terminated'
-    FAILED = 'failed'
-    CANCELLED = 'cancelled'
-    PENDING = 'pending'
-    STARTED = 'started'
-    CANCELLING = 'cancelling'
-    FORCE_CANCELLING = 'force_cancelling'
-
-    STATES = [TERMINATED, FAILED, CANCELLED, PENDING, STARTED,
-              CANCELLING, FORCE_CANCELLING]
-    END_STATES = [TERMINATED, FAILED, CANCELLED]
-    ACTIVE_STATES = [state for state in STATES if state not in END_STATES]
-
-
-class TaskState(object):
-    PENDING = 'pending'
-    RETRYING = 'retrying'
-    SENT = 'sent'
-    STARTED = 'started'
-    SUCCESS = 'success'
-    FAILED = 'failed'
-    STATES = (
-        PENDING,
-        RETRYING,
-        SENT,
-        STARTED,
-        SUCCESS,
-        FAILED,
-    )
-
-    WAIT_STATES = [PENDING, RETRYING]
-    END_STATES = [SUCCESS, FAILED]

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/aria/storage/structures.py
----------------------------------------------------------------------
diff --git a/aria/storage/structures.py b/aria/storage/structures.py
index 11df33f..2427721 100644
--- a/aria/storage/structures.py
+++ b/aria/storage/structures.py
@@ -129,27 +129,27 @@ def many_to_many_relationship(
     )
 
 
-class UTCDateTime(db.TypeDecorator):
-
-    impl = db.DateTime
-
-    def process_result_value(self, value, engine):
-        # Adhering to the same norms used in the rest of the code
-        if value is not None:
-            # When the date has a microsecond value equal to 0,
-            # isoformat returns the time as 17:22:11 instead of
-            # 17:22:11.000, so we need to adjust the returned value
-            if value.microsecond:
-                return '{0}Z'.format(value.isoformat()[:-3])
-            else:
-                return '{0}.000Z'.format(value.isoformat())
-
-    def process_bind_param(self, value, dialect):
-        if isinstance(value, basestring):
-            # SQLite only accepts datetime objects
-            return date_parser.parse(value)
-        else:
-            return value
+# class UTCDateTime(db.TypeDecorator):
+#
+#     impl = db.DateTime
+#
+#     def process_result_value(self, value, engine):
+#         # Adhering to the same norms used in the rest of the code
+#         if value is not None:
+#             # When the date has a microsecond value equal to 0,
+#             # isoformat returns the time as 17:22:11 instead of
+#             # 17:22:11.000, so we need to adjust the returned value
+#             if value.microsecond:
+#                 return '{0}Z'.format(value.isoformat()[:-3])
+#             else:
+#                 return '{0}.000Z'.format(value.isoformat())
+#
+#     def process_bind_param(self, value, dialect):
+#         if isinstance(value, basestring):
+#             # SQLite only accepts datetime objects
+#             return date_parser.parse(value)
+#         else:
+#             return value
 
 
 class SQLModelBase(db.Model):
@@ -204,5 +204,5 @@ class SQLModelBase(db.Model):
     def __unicode__(self):
         return str(self)
 
-    def __eq__(self, other):
-        return isinstance(other, self.__class__) and self.fields_dict == 
other.fields_dict
+    # def __eq__(self, other):
+    #     return isinstance(other, self.__class__) and self.fields_dict == 
other.fields_dict

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/mock/context.py
----------------------------------------------------------------------
diff --git a/tests/mock/context.py b/tests/mock/context.py
index b74ad0c..edbe5f3 100644
--- a/tests/mock/context.py
+++ b/tests/mock/context.py
@@ -13,17 +13,53 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import pytest
+from flask import Flask
+
 from aria import application_model_storage
 from aria.orchestrator import context
+from aria.storage.api_driver import SQLAlchemyModelAPI
 
 from . import models
-from aria.storage.api_driver.inmemory import InMemoryModelAPI
 
 
[email protected]
 def simple(**kwargs):
-    storage = application_model_storage(InMemoryModelAPI)
+    storage = application_model_storage(SQLAlchemyModelAPI, 
api_params=dict(app=Flask('app')))
     storage.blueprint.store(models.get_blueprint())
-    storage.deployment.store(models.get_deployment())
+    blueprint = storage.blueprint.get(models.BLUEPRINT_ID)
+    deployment = models.get_deployment(blueprint)
+    storage.deployment.store(deployment)
+    
+    
#################################################################################
+    # Creating a simple deployment with node -> node as a graph
+
+    dependency_node = models.get_dependency_node(deployment)
+    storage.node.store(dependency_node)
+    storage_dependency_node = storage.node.get(dependency_node.id)
+
+    dependency_node_instance = 
models.get_dependency_node_instance(storage_dependency_node)
+    storage.node_instance.store(dependency_node_instance)
+    storage_dependency_node_instance = 
storage.node_instance.get(dependency_node_instance.id)
+
+    dependent_node = models.get_dependent_node(deployment)
+    storage.node.store(dependent_node)
+    storage_dependent_node = storage.node.get(dependent_node.id)
+
+    dependent_node_instance = 
models.get_dependent_node_instance(storage_dependent_node)
+    storage.node_instance.store(dependent_node_instance)
+    storage_dependent_node_instance = 
storage.node_instance.get(dependent_node_instance.id)
+
+    relationship = models.get_relationship(storage_dependency_node, 
storage_dependent_node)
+    storage.relationship.store(relationship)
+    storage_relationship = storage.relationship.get(relationship.id)
+    relationship_instance = models.get_relationship_instance(
+        relationship=storage_relationship,
+        target_instance=storage_dependency_node_instance,
+        source_instance=storage_dependent_node_instance
+    )
+    storage.relationship_instance.store(relationship_instance)
+
     final_kwargs = dict(
         name='simple_context',
         model_storage=storage,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 585bffa..bdcbed9 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -30,13 +30,13 @@ DEPENDENCY_NODE_ID = 'dependency_node'
 DEPENDENCY_NODE_INSTANCE_ID = 'dependency_node_instance'
 DEPENDENT_NODE_ID = 'dependent_node'
 DEPENDENT_NODE_INSTANCE_ID = 'dependent_node_instance'
+RELATIONSHIP_ID = 'relationship'
+RELATIONSHIP_INSTANCE_ID = 'relationship_instance'
 
-
-def get_dependency_node():
+def get_dependency_node(deployment):
     return models.Node(
         id=DEPENDENCY_NODE_ID,
         host_id=DEPENDENCY_NODE_ID,
-        # blueprint_id=BLUEPRINT_ID,
         type='test_node_type',
         type_hierarchy=[],
         number_of_instances=1,
@@ -44,28 +44,30 @@ def get_dependency_node():
         deploy_number_of_instances=1,
         properties={},
         operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
-        relationships=[],
         min_number_of_instances=1,
         max_number_of_instances=1,
+        deployment_storage_id=deployment.storage_id
     )
 
 
-def get_dependency_node_instance(dependency_node=None):
+def get_dependency_node_instance(dependency_node):
     return models.NodeInstance(
         id=DEPENDENCY_NODE_INSTANCE_ID,
         host_id=DEPENDENCY_NODE_INSTANCE_ID,
-        deployment_id=DEPLOYMENT_ID,
         runtime_properties={'ip': '1.1.1.1'},
         version=None,
-        # relationship_instances=[],
-        node_storage_id=dependency_node.id or DEPENDENCY_NODE_ID
+        node_storage_id=dependency_node.storage_id,
+        deployment_storage_id=dependency_node.deployment.storage_id,
+        state='',
+        scaling_groups={}
     )
 
 
 def get_relationship(source=None, target=None):
     return models.Relationship(
-        source_id=source.id if source is not None else DEPENDENT_NODE_ID,
-        target_id=target.id if target is not None else DEPENDENCY_NODE_ID,
+        id=RELATIONSHIP_ID,
+        source_node_storage_id=source.storage_id,
+        target_node_storage_id=target.storage_id,
         source_interfaces={},
         source_operations=dict((key, {}) for key in 
operations.RELATIONSHIP_OPERATIONS),
         target_interfaces={},
@@ -76,23 +78,21 @@ def get_relationship(source=None, target=None):
     )
 
 
-def get_relationship_instance(source_instance=None, target_instance=None, 
relationship=None):
+def get_relationship_instance(source_instance, target_instance, relationship):
     return models.RelationshipInstance(
-        target_id=target_instance.id if target_instance else 
DEPENDENCY_NODE_INSTANCE_ID,
-        target_name='test_target_name',
-        source_id=source_instance.id if source_instance else 
DEPENDENT_NODE_INSTANCE_ID,
-        source_name='test_source_name',
+        id=RELATIONSHIP_INSTANCE_ID,
         type='some_type',
-        relationship=relationship or get_relationship(target_instance.node
-                                                      if target_instance else 
None)
+        relationship_storage_id=relationship.storage_id,
+        target_node_instance_storage_id=target_instance.storage_id,
+        source_node_instance_storage_id=source_instance.storage_id,
     )
 
 
-def get_dependent_node(relationship=None):
+def get_dependent_node(deployment):
     return models.Node(
         id=DEPENDENT_NODE_ID,
+        deployment_storage_id=deployment.storage_id,
         host_id=DEPENDENT_NODE_ID,
-        blueprint_id=BLUEPRINT_ID,
         type='test_node_type',
         type_hierarchy=[],
         number_of_instances=1,
@@ -100,21 +100,21 @@ def get_dependent_node(relationship=None):
         deploy_number_of_instances=1,
         properties={},
         operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
-        relationships=[relationship or get_relationship()],
         min_number_of_instances=1,
         max_number_of_instances=1,
     )
 
 
-def get_dependent_node_instance(relationship_instance=None, 
dependent_node=None):
+def get_dependent_node_instance(dependent_node):
     return models.NodeInstance(
         id=DEPENDENT_NODE_INSTANCE_ID,
         host_id=DEPENDENT_NODE_INSTANCE_ID,
-        deployment_id=DEPLOYMENT_ID,
         runtime_properties={},
         version=None,
-        relationship_instances=[relationship_instance or 
get_relationship_instance()],
-        node=dependent_node or get_dependency_node()
+        node_storage_id=dependent_node.storage_id,
+        deployment_storage_id=dependent_node.deployment.storage_id,
+        state='',
+        scaling_groups={}
     )
 
 
@@ -130,22 +130,22 @@ def get_blueprint():
     )
 
 
-def get_execution():
+def get_execution(deployment):
     return models.Execution(
         id=EXECUTION_ID,
+        deployment_storage_id=deployment.storage_id,
         status=models.Execution.STARTED,
-        deployment_id=DEPLOYMENT_ID,
         workflow_id=WORKFLOW_ID,
-        blueprint_id=BLUEPRINT_ID,
         started_at=datetime.utcnow(),
         parameters=None
     )
 
 
-def get_deployment():
+def get_deployment(blueprint):
     now = datetime.utcnow()
     return models.Deployment(
         id=DEPLOYMENT_ID,
+        blueprint_storage_id=blueprint.storage_id,
         description='',
         created_at=now,
         updated_at=now,

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py 
b/tests/orchestrator/workflows/api/test_task.py
index 8536902..6d9f28a 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -22,7 +22,7 @@ from aria.orchestrator.workflows import api
 from tests import mock
 
 
[email protected]()
[email protected]
 def ctx():
     """
     Create the following graph in storage:
@@ -30,50 +30,26 @@ def ctx():
     :return:
     """
     simple_context = mock.context.simple()
-    dependency_node = mock.models.get_dependency_node()
-    dependency_node_instance = mock.models.get_dependency_node_instance(
-        dependency_node=dependency_node)
-
-    relationship = mock.models.get_relationship(dependency_node)
-    relationship_instance = mock.models.get_relationship_instance(
-        relationship=relationship,
-        target_instance=dependency_node_instance
-    )
-
-    dependent_node = mock.models.get_dependent_node(relationship)
-    dependent_node_instance = mock.models.get_dependent_node_instance(
-        dependent_node=dependent_node,
-        relationship_instance=relationship_instance
-    )
-
-    simple_context.model.node.store(dependent_node)
-    simple_context.model.node.store(dependency_node)
-    simple_context.model.node_instance.store(dependent_node_instance)
-    simple_context.model.node_instance.store(dependency_node_instance)
-    simple_context.model.relationship.store(relationship)
-    simple_context.model.relationship_instance.store(relationship_instance)
-    simple_context.model.execution.store(mock.models.get_execution())
-    simple_context.model.deployment.store(mock.models.get_deployment())
+    
simple_context.model.execution.store(mock.models.get_execution(simple_context.deployment))
 
     return simple_context
 
 
 class TestOperationTask(object):
 
-    def test_node_operation_task_creation(self):
-        workflow_context = mock.context.simple()
-
+    def test_node_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.lifecycle.create'
         op_details = {'operation': True}
-        node = mock.models.get_dependency_node()
+        node = ctx.model.node.get(mock.models.DEPENDENT_NODE_ID)
         node.operations[operation_name] = op_details
-        node_instance = 
mock.models.get_dependency_node_instance(dependency_node=node)
+        ctx.model.node.update(node)
+        node_instance = 
ctx.model.node_instance.get(mock.models.DEPENDENT_NODE_INSTANCE_ID)
         inputs = {'inputs': True}
         max_attempts = 10
         retry_interval = 10
         ignore_failure = True
 
-        with context.workflow.current.push(workflow_context):
+        with context.workflow.current.push(ctx):
             api_task = api.task.OperationTask.node_instance(
                 name=operation_name,
                 instance=node_instance,
@@ -90,19 +66,18 @@ class TestOperationTask(object):
         assert api_task.max_attempts == max_attempts
         assert api_task.ignore_failure == ignore_failure
 
-    def test_relationship_operation_task_creation(self):
-        workflow_context = mock.context.simple()
-
+    def test_relationship_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.relationship_lifecycle.preconfigure'
         op_details = {'operation': True}
-        relationship = mock.models.get_relationship()
+        relationship = ctx.model.relationship.get(mock.models.RELATIONSHIP_ID)
         relationship.source_operations[operation_name] = op_details
-        relationship_instance = 
mock.models.get_relationship_instance(relationship=relationship)
+        ctx.model.relationship.update(relationship)
+        relationship_instance = 
ctx.model.relationship_instance.get(mock.models.RELATIONSHIP_INSTANCE_ID)
         inputs = {'inputs': True}
         max_attempts = 10
         retry_interval = 10
 
-        with context.workflow.current.push(workflow_context):
+        with context.workflow.current.push(ctx):
             api_task = api.task.OperationTask.relationship_instance(
                 name=operation_name,
                 instance=relationship_instance,
@@ -118,18 +93,21 @@ class TestOperationTask(object):
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
 
-    def test_operation_task_default_values(self):
-        workflow_context = mock.context.simple(task_ignore_failure=True)
-        with context.workflow.current.push(workflow_context):
-            model_task = api.task.OperationTask(
+    def test_operation_task_default_values(self, ctx):
+        dependency_node_instance = ctx.model.node_instance.get(
+            mock.models.DEPENDENCY_NODE_INSTANCE_ID)
+        with context.workflow.current.push(ctx):
+            task = api.task.OperationTask(
                 name='stub',
                 operation_mapping='',
-                actor=mock.models.get_dependency_node_instance())
-
-        assert model_task.inputs == {}
-        assert model_task.retry_interval == 
workflow_context._task_retry_interval
-        assert model_task.max_attempts == workflow_context._task_max_attempts
-        assert model_task.ignore_failure == 
workflow_context._task_ignore_failure
+                actor=dependency_node_instance,
+                model_task=None)
+
+        assert task.inputs == {}
+        assert task.retry_interval == ctx._task_retry_interval
+        assert task.max_attempts == ctx._task_max_attempts
+        assert task.ignore_failure == ctx._task_ignore_failure
+        assert task.model_task == None
 
 
 class TestWorkflowTask(object):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/builtin/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/__init__.py 
b/tests/orchestrator/workflows/builtin/__init__.py
index 5f2fa2e..4d465a0 100644
--- a/tests/orchestrator/workflows/builtin/__init__.py
+++ b/tests/orchestrator/workflows/builtin/__init__.py
@@ -13,9 +13,11 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import pytest
 
 from tests import mock
 
+
 def assert_node_install_operations(operations, with_relationships=False):
     if with_relationships:
         all_operations = [
@@ -53,36 +55,6 @@ def assert_node_uninstall_operations(operations, 
with_relationships=False):
             assert 
operation.name.startswith(mock.operations.NODE_OPERATIONS_UNINSTALL[i])
 
 
-def ctx_with_basic_graph():
-    """
-    Create the following graph in storage:
-    dependency_node <------ dependent_node
-    :return:
-    """
-    simple_context = mock.context.simple()
-    dependency_node = mock.models.get_dependency_node()
-    dependency_node_instance = mock.models.get_dependency_node_instance(
-        dependency_node=dependency_node)
-
-    relationship = mock.models.get_relationship(dependency_node)
-    relationship_instance = mock.models.get_relationship_instance(
-        relationship=relationship,
-        target_instance=dependency_node_instance
-    )
-
-    dependent_node = mock.models.get_dependent_node(relationship)
-    dependent_node_instance = mock.models.get_dependent_node_instance(
-        dependent_node=dependent_node,
-        relationship_instance=relationship_instance
-    )
-
-    simple_context.model.blueprint.store(blueprint)
-    simple_context.model.deployment.store(deployment)
-    simple_context.model.node.store(dependent_node)
-    simple_context.model.node.store(dependency_node)
-    simple_context.model.node_instance.store(dependent_node_instance)
-    simple_context.model.node_instance.store(dependency_node_instance)
-    simple_context.model.relationship.store(relationship)
-    simple_context.model.relationship_instance.store(relationship_instance)
-
-    return simple_context
[email protected](autouse=True)
+def teardown():
+    from aria import storage

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py 
b/tests/orchestrator/workflows/builtin/test_execute_operation.py
index 83e0d4d..c2639c9 100644
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ b/tests/orchestrator/workflows/builtin/test_execute_operation.py
@@ -12,19 +12,17 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import pytest
 
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.execute_operation import 
execute_operation
 
 from tests import mock
-from . import ctx_with_basic_graph
 
 
 @pytest.fixture
 def ctx():
-    return ctx_with_basic_graph()
+    return mock.context.simple()
 
 
 def test_execute_operation(ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/builtin/test_heal.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_heal.py 
b/tests/orchestrator/workflows/builtin/test_heal.py
index 940194b..8478666 100644
--- a/tests/orchestrator/workflows/builtin/test_heal.py
+++ b/tests/orchestrator/workflows/builtin/test_heal.py
@@ -12,20 +12,20 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import pytest
 
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.heal import heal
 
+from tests import mock
+
 from . import (assert_node_install_operations,
-               assert_node_uninstall_operations,
-               ctx_with_basic_graph)
+               assert_node_uninstall_operations)
 
 
 @pytest.fixture
 def ctx():
-    return ctx_with_basic_graph()
+    return mock.context.simple()
 
 
 def test_heal_dependent_node(ctx):
@@ -54,6 +54,7 @@ def test_heal_dependent_node(ctx):
 
 
 def test_heal_dependency_node(ctx):
+
     heal_graph = task.WorkflowTask(heal, ctx=ctx, 
node_instance_id='dependency_node_instance')
     # both subgraphs should contain un\install for both the dependent and the 
dependency
     assert len(list(heal_graph.tasks)) == 2

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/builtin/test_install.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_install.py 
b/tests/orchestrator/workflows/builtin/test_install.py
index 3b23c5a..ec305e5 100644
--- a/tests/orchestrator/workflows/builtin/test_install.py
+++ b/tests/orchestrator/workflows/builtin/test_install.py
@@ -15,16 +15,17 @@
 
 import pytest
 
-from aria.orchestrator.workflows.builtin.install import install
 from aria.orchestrator.workflows.api import task
+from aria.orchestrator.workflows.builtin.install import install
+
+from tests import mock
 
-from . import (assert_node_install_operations,
-               ctx_with_basic_graph)
+from . import assert_node_install_operations
 
 
 @pytest.fixture
 def ctx():
-    return ctx_with_basic_graph()
+    return mock.context.simple()
 
 
 def test_install(ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/builtin/test_uninstall.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_uninstall.py 
b/tests/orchestrator/workflows/builtin/test_uninstall.py
index 889e1d2..60ca963 100644
--- a/tests/orchestrator/workflows/builtin/test_uninstall.py
+++ b/tests/orchestrator/workflows/builtin/test_uninstall.py
@@ -18,13 +18,14 @@ import pytest
 from aria.orchestrator.workflows.api import task
 from aria.orchestrator.workflows.builtin.uninstall import uninstall
 
-from . import (assert_node_uninstall_operations,
-               ctx_with_basic_graph)
+from tests import mock
+
+from . import assert_node_uninstall_operations
 
 
 @pytest.fixture
 def ctx():
-    return ctx_with_basic_graph()
+    return mock.context.simple()
 
 
 def test_uninstall(ctx):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py 
b/tests/orchestrator/workflows/core/test_engine.py
index e543bcb..d27696b 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -19,7 +19,6 @@ from datetime import datetime
 
 import pytest
 
-import aria
 from aria.orchestrator import (
     events,
     workflow,
@@ -33,10 +32,7 @@ from aria.orchestrator.workflows import (
 )
 from aria.orchestrator.workflows.core import engine
 from aria.orchestrator.workflows.executor import thread
-from aria.storage.api_driver.inmemory import InMemoryModelAPI
 
-
-import tests.storage
 from tests import mock
 
 
@@ -66,11 +62,10 @@ class BaseTest(object):
             max_attempts=None,
             retry_interval=None,
             ignore_failure=None):
-        node_instance = ctx.model.node_instance.get('dependency_node_instance')
+        node_instance = 
ctx.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
         node_instance.node.operations['aria.interfaces.lifecycle.create'] = {
             'operation': '{name}.{func.__name__}'.format(name=__name__, 
func=func)
         }
-        ctx.model.node_instance.store(node_instance)
         return api.task.OperationTask.node_instance(
             instance=node_instance,
             name='aria.interfaces.lifecycle.create',
@@ -130,21 +125,12 @@ class BaseTest(object):
 
     @pytest.fixture(scope='function')
     def workflow_context(self):
-        model_storage = aria.application_model_storage(InMemoryModelAPI())
-        model_storage.setup()
-        blueprint = mock.models.get_blueprint()
-        deployment = mock.models.get_deployment()
-        model_storage.blueprint.store(blueprint)
-        model_storage.deployment.store(deployment)
-        node = mock.models.get_dependency_node()
-        node_instance = mock.models.get_dependency_node_instance(node)
-        model_storage.node.store(node)
-        model_storage.node_instance.store(node_instance)
+        workflow_context = mock.context.simple()
         result = context.workflow.WorkflowContext(
             name='test',
-            model_storage=model_storage,
+            model_storage=workflow_context.model,
             resource_storage=None,
-            deployment_id=deployment.id,
+            deployment_id=workflow_context.deployment.id,
             workflow_id='name')
         result.states = []
         result.exception = None

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py 
b/tests/orchestrator/workflows/core/test_task.py
index 6a4c8ac..6d790f0 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -31,21 +31,7 @@ from tests import mock
 
 @pytest.fixture
 def ctx():
-    simple_context = mock.context.simple()
-
-    blueprint = mock.models.get_blueprint()
-    deployment = mock.models.get_deployment()
-    node = mock.models.get_dependency_node()
-    node_instance = mock.models.get_dependency_node_instance(node)
-    execution = mock.models.get_execution()
-
-    simple_context.model.blueprint.store(blueprint)
-    simple_context.model.deployment.store(deployment)
-    simple_context.model.node.store(node)
-    simple_context.model.node_instance.store(node_instance)
-    simple_context.model.execution.store(execution)
-
-    return simple_context
+    return mock.context.simple()
 
 
 class TestOperationTask(object):
@@ -99,7 +85,7 @@ class TestOperationTask(object):
             core_task.started_at = future_time
             core_task.ended_at = future_time
             core_task.retry_count = 2
-            core_task.eta = future_time
+            core_task.due_at = future_time
             assert core_task.status != core_task.STARTED
             assert core_task.started_at != future_time
             assert core_task.ended_at != future_time
@@ -110,4 +96,4 @@ class TestOperationTask(object):
         assert core_task.started_at == future_time
         assert core_task.ended_at == future_time
         assert core_task.retry_count == 2
-        assert core_task.eta == future_time
+        assert core_task.due_at == future_time

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git 
a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py 
b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index a179e49..5506c40 100644
--- 
a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ 
b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -24,15 +24,7 @@ from tests import mock
 def test_task_graph_into_execution_graph():
     operation_name = 'aria.interfaces.lifecycle.create'
     task_context = mock.context.simple()
-    node = mock.models.get_dependency_node()
-    node_instance = mock.models.get_dependency_node_instance()
-    deployment = mock.models.get_deployment()
-    execution = mock.models.get_execution()
-    task_context.model.node.store(node)
-    task_context.model.node_instance.store(node_instance)
-    task_context.model.deployment.store(deployment)
-    task_context.model.execution.store(execution)
-
+    node_instance = 
task_context.model.node_instance.get(mock.models.DEPENDENCY_NODE_INSTANCE_ID)
     def sub_workflow(name, **_):
         return api.task_graph.TaskGraph(name)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b8051d43/tests/storage/test_model_storage.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_model_storage.py 
b/tests/storage/test_model_storage.py
index dbfbc58..99bda14 100644
--- a/tests/storage/test_model_storage.py
+++ b/tests/storage/test_model_storage.py
@@ -17,12 +17,12 @@ import tempfile
 import shutil
 
 import pytest
+from flask import Flask
 
 from aria import application_model_storage
 
 from aria.storage import (
     ModelStorage,
-    ResourceStorage,
     models,
     exceptions,
     api_driver as storage_api,
@@ -32,9 +32,9 @@ from aria.storage import (
 temp_dir = tempfile.mkdtemp()
 
 APIs = [
-    (storage_api.InMemoryModelAPI, dict()),
-    (storage_api.FileSystemModelAPI, dict(directory=temp_dir)),
-    (storage_api.SQLAlchemyModelAPI, dict())
+    # (storage_api.InMemoryModelAPI, dict()),
+    # (storage_api.FileSystemModelAPI, dict(directory=temp_dir)),
+    (storage_api.SQLAlchemyModelAPI, dict(app=Flask('app')))
 ]
 
 
@@ -70,8 +70,10 @@ def test_model_storage(api, api_params):
     assert [pc_from_storage.to_dict for pc_from_storage in 
storage.provider_context.iter()] == [pc.to_dict]
     assert [pc_from_storage.to_dict for pc_from_storage in 
storage.provider_context] == [pc.to_dict]
 
-    storage.provider_context.update('id1', context={'update_key': 0})
-    assert storage.provider_context.get('id1').context == {'update_key': 0}
+    new_context = {'update_key': 0}
+    pc.context = new_context
+    storage.provider_context.update(pc)
+    assert storage.provider_context.get('id1').context == new_context
 
     storage.provider_context.delete('id1')
     with pytest.raises(exceptions.StorageError):

Reply via email to