http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/mock/models.py
----------------------------------------------------------------------
diff --git a/tests/mock/models.py b/tests/mock/models.py
index 8229038..e735230 100644
--- a/tests/mock/models.py
+++ b/tests/mock/models.py
@@ -15,8 +15,7 @@
 
 from datetime import datetime
 
-from aria.storage import model
-
+from aria.storage.modeling import model
 from . import operations
 
 DEPLOYMENT_NAME = 'test_deployment_id'
@@ -35,84 +34,78 @@ RELATIONSHIP_INSTANCE_NAME = 'relationship_instance'
 
 
 def get_dependency_node(deployment):
-    return model.Node(
+    return model.NodeTemplate(
         name=DEPENDENCY_NODE_NAME,
-        type='test_node_type',
+        type_name='test_node_type',
         type_hierarchy=[],
-        number_of_instances=1,
-        planned_number_of_instances=1,
-        deploy_number_of_instances=1,
-        properties={},
-        operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
-        min_number_of_instances=1,
-        max_number_of_instances=1,
-        deployment_fk=deployment.id
+        default_instances=1,
+        min_instances=1,
+        max_instances=1,
+        service_template=deployment.service_template,
     )
 
 
-def get_dependency_node_instance(dependency_node):
-    return model.NodeInstance(
+def get_dependency_node_instance(dependency_node, deployment):
+    return model.Node(
         name=DEPENDENCY_NODE_INSTANCE_NAME,
+        service_instance=deployment,
         runtime_properties={'ip': '1.1.1.1'},
         version=None,
-        node_fk=dependency_node.id,
+        node_template=dependency_node,
         state='',
         scaling_groups=[]
     )
 
 
-def get_relationship(source=None, target=None):
-    return model.Relationship(
-        source_node_fk=source.id,
-        target_node_fk=target.id,
-        source_interfaces={},
-        source_operations=dict((key, {}) for key in 
operations.RELATIONSHIP_OPERATIONS),
-        target_interfaces={},
-        target_operations=dict((key, {}) for key in 
operations.RELATIONSHIP_OPERATIONS),
-        type='rel_type',
-        type_hierarchy=[],
-        properties={},
-    )
+def get_relationship(target):
+    requirement_template = 
model.RequirementTemplate(target_node_template_name=target.name)
+    capability_template = model.CapabilityTemplate()
+
+    return requirement_template, capability_template
 
 
-def get_relationship_instance(source_instance, target_instance, relationship):
-    return model.RelationshipInstance(
-        relationship_fk=relationship.id,
-        target_node_instance_fk=target_instance.id,
-        source_node_instance_fk=source_instance.id,
+def get_relationship_instance(source_instance, target_instance):
+    return model.Relationship(
+        target_node=target_instance,
+        source_node=source_instance,
     )
 
 
-def get_dependent_node(deployment):
-    return model.Node(
+def get_dependent_node(deployment, requirement_template, capability_template):
+    operation_templates = [model.OperationTemplate(implementation=op,
+                                                   
service_template=deployment.service_template)
+                           for op in operations.NODE_OPERATIONS]
+    interface_template = 
model.InterfaceTemplate(operation_templates=operation_templates)
+
+    return model.NodeTemplate(
         name=DEPENDENT_NODE_NAME,
-        deployment_fk=deployment.id,
-        type='test_node_type',
+        type_name='test_node_type',
         type_hierarchy=[],
-        number_of_instances=1,
-        planned_number_of_instances=1,
-        deploy_number_of_instances=1,
-        properties={},
-        operations=dict((key, {}) for key in operations.NODE_OPERATIONS),
-        min_number_of_instances=1,
-        max_number_of_instances=1,
+        default_instances=1,
+        min_instances=1,
+        max_instances=1,
+        service_template=deployment.service_template,
+        interface_templates=[interface_template],
+        requirement_templates=[requirement_template],
+        capability_templates=[capability_template],
     )
 
 
-def get_dependent_node_instance(dependent_node):
-    return model.NodeInstance(
+def get_dependent_node_instance(dependent_node, deployment):
+    return model.Node(
         name=DEPENDENT_NODE_INSTANCE_NAME,
+        service_instance=deployment,
         runtime_properties={},
         version=None,
-        node_fk=dependent_node.id,
+        node_template=dependent_node,
         state='',
-        scaling_groups=[]
+        scaling_groups=[],
     )
 
 
 def get_blueprint():
     now = datetime.now()
-    return model.Blueprint(
+    return model.ServiceTemplate(
         plan={},
         name=BLUEPRINT_NAME,
         description=None,
@@ -124,7 +117,7 @@ def get_blueprint():
 
 def get_execution(deployment):
     return model.Execution(
-        deployment_fk=deployment.id,
+        service_instance=deployment,
         status=model.Execution.STARTED,
         workflow_name=WORKFLOW_NAME,
         started_at=datetime.utcnow(),
@@ -134,19 +127,16 @@ def get_execution(deployment):
 
 def get_deployment(blueprint):
     now = datetime.utcnow()
-    return model.Deployment(
+    return model.ServiceInstance(
         name=DEPLOYMENT_NAME,
-        blueprint_fk=blueprint.id,
+        service_template=blueprint,
         description='',
         created_at=now,
         updated_at=now,
         workflows={},
-        inputs={},
-        groups={},
         permalink='',
         policy_triggers={},
         policy_types={},
-        outputs={},
         scaling_groups={},
     )
 
@@ -165,3 +155,24 @@ def get_plugin(package_name='package', 
package_version='0.1'):
         uploaded_at=datetime.now(),
         wheels=[],
     )
+
+
+def get_interface_template(operation_name, operation_kwargs=None, 
interface_kwargs=None):
+    operation_template = model.OperationTemplate(
+        name=operation_name,
+        **(operation_kwargs or {})
+
+    )
+    return model.InterfaceTemplate(
+        operation_templates=[operation_template],
+        name=operation_name.rsplit('.', 1)[0],
+        **(interface_kwargs or {})
+    )
+
+
+def get_interface(operation_name,
+                  operation_kwargs=None,
+                  interface_kwargs=None):
+    operation = model.Operation(name=operation_name, **(operation_kwargs or 
{}))
+    interface_name = operation_name.rsplit('.', 1)[0]
+    return model.Interface(operations=[operation], name=interface_name, 
**(interface_kwargs or {}))

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/mock/topology.py
----------------------------------------------------------------------
diff --git a/tests/mock/topology.py b/tests/mock/topology.py
index e219c33..811dfd9 100644
--- a/tests/mock/topology.py
+++ b/tests/mock/topology.py
@@ -60,37 +60,38 @@ def create_simple_topology_single_node(model_storage, 
deployment_id, create_oper
 
 def create_simple_topology_two_nodes(model_storage):
     blueprint = models.get_blueprint()
-    model_storage.blueprint.put(blueprint)
+    model_storage.service_template.put(blueprint)
     deployment = models.get_deployment(blueprint)
-    model_storage.deployment.put(deployment)
+    model_storage.service_instance.put(deployment)
 
     
#################################################################################
     # Creating a simple deployment with node -> node as a graph
 
     dependency_node = models.get_dependency_node(deployment)
-    model_storage.node.put(dependency_node)
-    storage_dependency_node = model_storage.node.get(dependency_node.id)
+    model_storage.node_template.put(dependency_node)
+    storage_dependency_node = 
model_storage.node_template.get(dependency_node.id)
+
+    dependency_node_instance = 
models.get_dependency_node_instance(storage_dependency_node,
+                                                                   deployment)
+    model_storage.node.put(dependency_node_instance)
+    storage_dependency_node_instance = 
model_storage.node.get(dependency_node_instance.id)
 
-    dependency_node_instance = 
models.get_dependency_node_instance(storage_dependency_node)
-    model_storage.node_instance.put(dependency_node_instance)
-    storage_dependency_node_instance = 
model_storage.node_instance.get(dependency_node_instance.id)
+    req_template, cap_template = 
models.get_relationship(storage_dependency_node)
+    model_storage.requirement_template.put(req_template)
+    model_storage.capability_template.put(cap_template)
 
-    dependent_node = models.get_dependent_node(deployment)
-    model_storage.node.put(dependent_node)
-    storage_dependent_node = model_storage.node.get(dependent_node.id)
+    dependent_node = models.get_dependent_node(deployment, req_template, 
cap_template)
+    model_storage.node_template.put(dependent_node)
+    storage_dependent_node = model_storage.node_template.get(dependent_node.id)
 
-    dependent_node_instance = 
models.get_dependent_node_instance(storage_dependent_node)
-    model_storage.node_instance.put(dependent_node_instance)
-    storage_dependent_node_instance = 
model_storage.node_instance.get(dependent_node_instance.id)
+    dependent_node_instance = 
models.get_dependent_node_instance(storage_dependent_node, deployment)
+    model_storage.node.put(dependent_node_instance)
+    storage_dependent_node_instance = 
model_storage.node.get(dependent_node_instance.id)
 
-    relationship = models.get_relationship(storage_dependent_node, 
storage_dependency_node)
-    model_storage.relationship.put(relationship)
-    storage_relationship = model_storage.relationship.get(relationship.id)
     relationship_instance = models.get_relationship_instance(
-        relationship=storage_relationship,
         target_instance=storage_dependency_node_instance,
         source_instance=storage_dependent_node_instance
     )
-    model_storage.relationship_instance.put(relationship_instance)
+    model_storage.relationship.put(relationship_instance)
 
     return deployment.id

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/context/test_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_operation.py 
b/tests/orchestrator/context/test_operation.py
index b0918d1..15b056a 100644
--- a/tests/orchestrator/context/test_operation.py
+++ b/tests/orchestrator/context/test_operation.py
@@ -55,14 +55,13 @@ def executor():
 def test_node_operation_task_execution(ctx, executor):
     operation_name = 'aria.interfaces.lifecycle.create'
 
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    node.operations[operation_name] = {
-        'operation': op_path(my_operation, module_path=__name__)
-
-    }
+    node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    interface = mock.models.get_interface(
+        operation_name,
+        operation_kwargs=dict(implementation=op_path(my_operation, 
module_path=__name__))
+    )
+    node.interfaces = [interface]
     ctx.model.node.update(node)
-    node_instance = 
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-
     inputs = {'putput': True}
 
     @workflow
@@ -70,77 +69,78 @@ def test_node_operation_task_execution(ctx, executor):
         graph.add_tasks(
             api.task.OperationTask.node_instance(
                 name=operation_name,
-                instance=node_instance,
+                instance=node,
                 inputs=inputs
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, 
executor=executor)
 
-    operation_context = global_test_holder[op_name(node_instance, 
operation_name)]
+    operation_context = global_test_holder[op_name(node, operation_name)]
 
     assert isinstance(operation_context, 
context.operation.NodeOperationContext)
 
     # Task bases assertions
-    assert operation_context.task.actor == node_instance
-    assert operation_context.task.name == op_name(node_instance, 
operation_name)
-    assert operation_context.task.operation_mapping == 
node.operations[operation_name]['operation']
+    assert operation_context.task.actor == node
+    assert operation_context.task.name == op_name(node, operation_name)
+    operations = interface.operations.filter_by(name=operation_name)
+    assert operations.count() == 1
+    assert operation_context.task.implementation == 
operations[0].implementation
     assert operation_context.task.inputs == inputs
 
     # Context based attributes (sugaring)
-    assert operation_context.node == node_instance.node
-    assert operation_context.node_instance == node_instance
+    assert operation_context.node_template == node.node_template
+    assert operation_context.node == node
 
 
 def test_relationship_operation_task_execution(ctx, executor):
-    operation_name = 'aria.interfaces.relationship_lifecycle.postconfigure'
+    operation_name = 'aria.interfaces.relationship_lifecycle.post_configure'
     relationship = ctx.model.relationship.list()[0]
-    relationship.source_operations[operation_name] = {
-        'operation': op_path(my_operation, module_path=__name__)
-    }
-    ctx.model.relationship.update(relationship)
-    relationship_instance = ctx.model.relationship_instance.list()[0]
 
-    dependency_node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    dependency_node_instance = \
-        
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependent_node = 
ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
-    dependent_node_instance = \
-        
ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+    interface = mock.models.get_interface(
+        operation_name=operation_name,
+        operation_kwargs=dict(implementation=op_path(my_operation, 
module_path=__name__)),
+    )
 
+    relationship.source_interfaces = [interface]
+    ctx.model.relationship.update(relationship)
     inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
             api.task.OperationTask.relationship_instance(
-                instance=relationship_instance,
-                name=operation_name,
-                operation_end=api.task.OperationTask.SOURCE_OPERATION,
+                instance=relationship,
+                name='{0}_source'.format(operation_name),
                 inputs=inputs
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, 
executor=executor)
 
-    operation_context = global_test_holder[op_name(relationship_instance, 
operation_name)]
+    operation_context = global_test_holder[op_name(relationship,
+                                                   
'{0}_source'.format(operation_name))]
 
     assert isinstance(operation_context, 
context.operation.RelationshipOperationContext)
 
     # Task bases assertions
-    assert operation_context.task.actor == relationship_instance
-    assert operation_context.task.name == op_name(relationship_instance, 
operation_name)
-    assert operation_context.task.operation_mapping == \
-           relationship.source_operations[operation_name]['operation']
+    assert operation_context.task.actor == relationship
+    assert operation_context.task.name.startswith(operation_name)
+    operation = interface.operations.filter_by(name=operation_name)
+    assert operation_context.task.implementation == 
operation.all()[0].implementation
     assert operation_context.task.inputs == inputs
 
     # Context based attributes (sugaring)
+    dependency_node_template = 
ctx.model.node_template.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
+    dependency_node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    dependent_node_template = 
ctx.model.node_template.get_by_name(mock.models.DEPENDENT_NODE_NAME)
+    dependent_node = 
ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+
+    assert operation_context.target_node_template == dependency_node_template
     assert operation_context.target_node == dependency_node
-    assert operation_context.target_node_instance == dependency_node_instance
     assert operation_context.relationship == relationship
-    assert operation_context.relationship_instance == relationship_instance
+    assert operation_context.source_node_template == dependent_node_template
     assert operation_context.source_node == dependent_node
-    assert operation_context.source_node_instance == dependent_node_instance
 
 
 def test_invalid_task_operation_id(ctx, executor):
@@ -152,39 +152,42 @@ def test_invalid_task_operation_id(ctx, executor):
     :return:
     """
     operation_name = 'aria.interfaces.lifecycle.create'
-    other_node_instance, node_instance = ctx.model.node_instance.list()
-    assert other_node_instance.id == 1
-    assert node_instance.id == 2
-
-    node = node_instance.node
-    node.operations[operation_name] = {
-        'operation': op_path(get_node_instance_id, module_path=__name__)
-
-    }
+    other_node, node = ctx.model.node.list()
+    assert other_node.id == 1
+    assert node.id == 2
+
+    interface = mock.models.get_interface(
+        operation_name=operation_name,
+        operation_kwargs=dict(implementation=op_path(get_node_instance_id, 
module_path=__name__))
+    )
+    node.interfaces = [interface]
     ctx.model.node.update(node)
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
-            api.task.OperationTask.node_instance(name=operation_name, 
instance=node_instance)
+            api.task.OperationTask.node_instance(name=operation_name, 
instance=node)
         )
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, 
executor=executor)
 
-    op_node_instance_id = global_test_holder[op_name(node_instance, 
operation_name)]
-    assert op_node_instance_id == node_instance.id
-    assert op_node_instance_id != other_node_instance.id
+    op_node_instance_id = global_test_holder[op_name(node, operation_name)]
+    assert op_node_instance_id == node.id
+    assert op_node_instance_id != other_node.id
 
 
 def test_plugin_workdir(ctx, executor, tmpdir):
     op = 'test.op'
     plugin_name = 'mock_plugin'
-    node = ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    node.operations[op] = {'operation': '{0}.{1}'.format(__name__, 
_test_plugin_workdir.__name__),
-                           'plugin': plugin_name}
+    node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    node.interfaces = [mock.models.get_interface(
+        op,
+        operation_kwargs=dict(
+            implementation='{0}.{1}'.format(__name__, 
_test_plugin_workdir.__name__),
+            plugin=plugin_name)
+    )]
     node.plugins = [{'name': plugin_name}]
     ctx.model.node.update(node)
-    node_instance = 
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
     filename = 'test_file'
     content = 'file content'
@@ -193,10 +196,12 @@ def test_plugin_workdir(ctx, executor, tmpdir):
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(api.task.OperationTask.node_instance(
-            name=op, instance=node_instance, inputs=inputs))
+            name=op, instance=node, inputs=inputs))
 
     execute(workflow_func=basic_workflow, workflow_context=ctx, 
executor=executor)
-    expected_file = tmpdir.join('workdir', 'plugins', str(ctx.deployment.id), 
plugin_name, filename)
+    expected_file = tmpdir.join('workdir', 'plugins', 
str(ctx.service_instance.id),
+                                plugin_name,
+                                filename)
     assert expected_file.read() == content
 
 
@@ -207,7 +212,7 @@ def my_operation(ctx, **_):
 
 @operation
 def get_node_instance_id(ctx, **_):
-    global_test_holder[ctx.name] = ctx.node_instance.id
+    global_test_holder[ctx.name] = ctx.node.id
 
 
 @operation

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/context/test_resource_render.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_resource_render.py 
b/tests/orchestrator/context/test_resource_render.py
index ca2ef42..3ba0873 100644
--- a/tests/orchestrator/context/test_resource_render.py
+++ b/tests/orchestrator/context/test_resource_render.py
@@ -17,7 +17,7 @@ import pytest
 
 from tests import mock, storage
 
-_IMPLICIT_CTX_TEMPLATE = '{{ctx.deployment.name}}'
+_IMPLICIT_CTX_TEMPLATE = '{{ctx.service_instance.name}}'
 _IMPLICIT_CTX_TEMPLATE_PATH = 'implicit-ctx.template'
 _VARIABLES_TEMPLATE = '{{variable}}'
 _VARIABLES_TEMPLATE_PATH = 'variables.template'

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/context/test_serialize.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_serialize.py 
b/tests/orchestrator/context/test_serialize.py
index 76930b1..eed98a4 100644
--- a/tests/orchestrator/context/test_serialize.py
+++ b/tests/orchestrator/context/test_serialize.py
@@ -49,13 +49,14 @@ def 
test_illegal_serialize_of_memory_model_storage(memory_model_storage):
 
 @workflow
 def _mock_workflow(ctx, graph):
-    op = 'test.op'
+    node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
     plugin_name = 'mock_plugin'
-    node_instance = 
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    node = node_instance.node
-    node.operations[op] = {'operation': _operation_mapping(), 'plugin': 
plugin_name}
+    node.interfaces = [mock.models.get_interface(
+        'test.op',
+        operation_kwargs=dict(implementation=_operation_mapping(), 
plugin=plugin_name)
+    )]
     node.plugins = [{'name': plugin_name}]
-    task = api.task.OperationTask.node_instance(instance=node_instance, 
name=op)
+    task = api.task.OperationTask.node_instance(instance=node, name='test.op')
     graph.add_tasks(task)
     return graph
 
@@ -65,14 +66,14 @@ def _mock_operation(ctx):
     # We test several things in this operation
     # ctx.task, ctx.node, etc... tell us that the model storage was properly 
re-created
     # a correct ctx.task.operation_mapping tells us we kept the correct task_id
-    assert ctx.task.operation_mapping == _operation_mapping()
+    assert ctx.task.implementation == _operation_mapping()
     # a correct ctx.node.name tells us we kept the correct actor_id
-    assert ctx.node.name == mock.models.DEPENDENCY_NODE_NAME
+    assert ctx.node.name == mock.models.DEPENDENCY_NODE_INSTANCE_NAME
     # a correct ctx.name tells us we kept the correct name
     assert ctx.name is not None
     assert ctx.name == ctx.task.name
     # a correct ctx.deployment.name tells us we kept the correct deployment_id
-    assert ctx.deployment.name == mock.models.DEPLOYMENT_NAME
+    assert ctx.service_instance.name == mock.models.DEPLOYMENT_NAME
     # Here we test that the resource storage was properly re-created
     test_file_content = ctx.resource.blueprint.read(TEST_FILE_ENTRY_ID, 
TEST_FILE_NAME)
     assert test_file_content == TEST_FILE_CONTENT

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/context/test_toolbelt.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_toolbelt.py 
b/tests/orchestrator/context/test_toolbelt.py
index b63811b..35306a6 100644
--- a/tests/orchestrator/context/test_toolbelt.py
+++ b/tests/orchestrator/context/test_toolbelt.py
@@ -48,45 +48,46 @@ def executor():
 
 
 def _get_elements(workflow_context):
-    dependency_node = 
workflow_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_NAME)
-    dependency_node.host = dependency_node
-    workflow_context.model.node.update(dependency_node)
+    dependency_node_template = 
workflow_context.model.node_template.get_by_name(
+        mock.models.DEPENDENCY_NODE_NAME)
+    dependency_node_template.host = dependency_node_template
+    workflow_context.model.node.update(dependency_node_template)
 
-    dependency_node_instance = 
workflow_context.model.node_instance.get_by_name(
+    dependency_node = workflow_context.model.node.get_by_name(
         mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-    dependency_node_instance.host_fk = dependency_node_instance.id
-    workflow_context.model.node_instance.update(dependency_node_instance)
+    dependency_node.host_fk = dependency_node.id
+    workflow_context.model.node.update(dependency_node)
 
-    dependent_node = 
workflow_context.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
-    dependent_node.host_fk = dependency_node.id
-    workflow_context.model.node.update(dependent_node)
+    dependent_node_template = workflow_context.model.node_template.get_by_name(
+        mock.models.DEPENDENT_NODE_NAME)
+    dependent_node_template.host = dependency_node_template
+    workflow_context.model.node_template.update(dependent_node_template)
 
-    dependent_node_instance = workflow_context.model.node_instance.get_by_name(
+    dependent_node = workflow_context.model.node.get_by_name(
         mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-    dependent_node_instance.host_fk = dependent_node_instance.id
-    workflow_context.model.node_instance.update(dependent_node_instance)
+    dependent_node.host = dependent_node
+    workflow_context.model.node.update(dependent_node)
 
     relationship = workflow_context.model.relationship.list()[0]
-    relationship_instance = 
workflow_context.model.relationship_instance.list()[0]
-    return dependency_node, dependency_node_instance, dependent_node, 
dependent_node_instance, \
-        relationship, relationship_instance
+    return dependency_node_template, dependency_node, dependent_node_template, 
dependent_node, \
+        relationship
 
 
 def test_host_ip(workflow_context, executor):
     operation_name = 'aria.interfaces.lifecycle.create'
-    dependency_node, dependency_node_instance, _, _, _, _ = 
_get_elements(workflow_context)
-    dependency_node.operations[operation_name] = {
-        'operation': op_path(host_ip, module_path=__name__)
-
-    }
-    workflow_context.model.node.put(dependency_node)
+    _, dependency_node, _, _, _ = _get_elements(workflow_context)
+    dependency_node.interfaces = [mock.models.get_interface(
+        operation_name,
+        operation_kwargs=dict(implementation=op_path(host_ip, 
module_path=__name__))
+    )]
+    workflow_context.model.node.update(dependency_node)
     inputs = {'putput': True}
 
     @workflow
     def basic_workflow(graph, **_):
         graph.add_tasks(
             api.task.OperationTask.node_instance(
-                instance=dependency_node_instance,
+                instance=dependency_node,
                 name=operation_name,
                 inputs=inputs
             )
@@ -94,18 +95,19 @@ def test_host_ip(workflow_context, executor):
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, 
executor=executor)
 
-    assert global_test_holder.get('host_ip') == \
-           dependency_node_instance.runtime_properties.get('ip')
+    assert global_test_holder.get('host_ip') == 
dependency_node.runtime_properties.get('ip')
 
 
 def test_relationship_tool_belt(workflow_context, executor):
-    operation_name = 'aria.interfaces.relationship_lifecycle.postconfigure'
-    _, _, _, _, relationship, relationship_instance = \
-        _get_elements(workflow_context)
-    relationship.source_operations[operation_name] = {
-        'operation': op_path(relationship_operation, module_path=__name__)
-    }
-    workflow_context.model.relationship.put(relationship)
+    operation_name = 'aria.interfaces.relationship_lifecycle.post_configure'
+    _, _, _, _, relationship = _get_elements(workflow_context)
+    relationship.source_interfaces = [
+        mock.models.get_interface(
+            operation_name,
+            operation_kwargs=dict(
+                implementation=op_path(relationship_operation, 
module_path=__name__)))
+    ]
+    workflow_context.model.relationship.update(relationship)
 
     inputs = {'putput': True}
 
@@ -113,16 +115,16 @@ def test_relationship_tool_belt(workflow_context, 
executor):
     def basic_workflow(graph, **_):
         graph.add_tasks(
             api.task.OperationTask.relationship_instance(
-                instance=relationship_instance,
-                name=operation_name,
-                operation_end=api.task.OperationTask.SOURCE_OPERATION,
+                instance=relationship,
+                name='{0}_source'.format(operation_name),
                 inputs=inputs
             )
         )
 
     execute(workflow_func=basic_workflow, workflow_context=workflow_context, 
executor=executor)
 
-    assert isinstance(global_test_holder.get(op_name(relationship_instance, 
operation_name)),
+    assert isinstance(global_test_holder.get(op_name(relationship,
+                                                     
'{0}_source'.format(operation_name))),
                       RelationshipToolBelt)
 
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/context/test_workflow.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/context/test_workflow.py 
b/tests/orchestrator/context/test_workflow.py
index 496c1ff..89afb39 100644
--- a/tests/orchestrator/context/test_workflow.py
+++ b/tests/orchestrator/context/test_workflow.py
@@ -29,9 +29,11 @@ class TestWorkflowContext(object):
     def test_execution_creation_on_workflow_context_creation(self, storage):
         ctx = self._create_ctx(storage)
         execution = storage.execution.get(ctx.execution.id)             # 
pylint: disable=no-member
-        assert execution.deployment == 
storage.deployment.get_by_name(models.DEPLOYMENT_NAME)
+        assert execution.service_instance == 
storage.service_instance.get_by_name(
+            models.DEPLOYMENT_NAME)
         assert execution.workflow_name == models.WORKFLOW_NAME
-        assert execution.blueprint == 
storage.blueprint.get_by_name(models.BLUEPRINT_NAME)
+        assert execution.service_template == 
storage.service_template.get_by_name(
+            models.BLUEPRINT_NAME)
         assert execution.status == storage.execution.model_cls.PENDING
         assert execution.parameters == {}
         assert execution.created_at <= datetime.utcnow()
@@ -51,7 +53,7 @@ class TestWorkflowContext(object):
             name='simple_context',
             model_storage=storage,
             resource_storage=None,
-            
deployment_id=storage.deployment.get_by_name(models.DEPLOYMENT_NAME).id,
+            
service_instance_id=storage.service_instance.get_by_name(models.DEPLOYMENT_NAME).id,
             workflow_name=models.WORKFLOW_NAME,
             task_max_attempts=models.TASK_MAX_ATTEMPTS,
             task_retry_interval=models.TASK_RETRY_INTERVAL
@@ -62,8 +64,8 @@ class TestWorkflowContext(object):
 def storage():
     api_kwargs = test_storage.get_sqlite_api_kwargs()
     workflow_storage = application_model_storage(SQLAlchemyModelAPI, 
api_kwargs=api_kwargs)
-    workflow_storage.blueprint.put(models.get_blueprint())
-    blueprint = workflow_storage.blueprint.get_by_name(models.BLUEPRINT_NAME)
-    workflow_storage.deployment.put(models.get_deployment(blueprint))
+    workflow_storage.service_template.put(models.get_blueprint())
+    blueprint = 
workflow_storage.service_template.get_by_name(models.BLUEPRINT_NAME)
+    workflow_storage.service_instance.put(models.get_deployment(blueprint))
     yield workflow_storage
     test_storage.release_sqlite_storage(workflow_storage)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/execution_plugin/test_local.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_local.py 
b/tests/orchestrator/execution_plugin/test_local.py
index 497da48..f9d4485 100644
--- a/tests/orchestrator/execution_plugin/test_local.py
+++ b/tests/orchestrator/execution_plugin/test_local.py
@@ -42,10 +42,10 @@ class TestLocalRunScript(object):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties map.key value
+            ctx node runtime-properties map.key value
             ''',
             windows_script='''
-            ctx node-instance runtime-properties map.key value
+            ctx node runtime-properties map.key value
         ''')
         props = self._run(
             executor, workflow_context,
@@ -56,12 +56,12 @@ class TestLocalRunScript(object):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties map.key1 $key1
-            ctx node-instance runtime-properties map.key2 $key2
+            ctx node runtime-properties map.key1 $key1
+            ctx node runtime-properties map.key2 $key2
             ''',
             windows_script='''
-            ctx node-instance runtime-properties map.key1 %key1%
-            ctx node-instance runtime-properties map.key2 %key2%
+            ctx node runtime-properties map.key1 %key1%
+            ctx node runtime-properties map.key2 %key2%
         ''')
         props = self._run(
             executor, workflow_context,
@@ -80,10 +80,10 @@ class TestLocalRunScript(object):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties map.cwd $PWD
+            ctx node runtime-properties map.cwd $PWD
             ''',
             windows_script='''
-            ctx node-instance runtime-properties map.cwd %CD%
+            ctx node runtime-properties map.cwd %CD%
             ''')
         tmpdir = str(tmpdir)
         props = self._run(
@@ -96,7 +96,7 @@ class TestLocalRunScript(object):
         assert p_map['cwd'] == tmpdir
 
     def test_process_command_prefix(self, executor, workflow_context, tmpdir):
-        use_ctx = 'ctx node-instance runtime-properties map.key value'
+        use_ctx = 'ctx node runtime-properties map.key value'
         python_script = ['import subprocess',
                          'subprocess.Popen("{0}".split(' 
')).communicate()[0]'.format(use_ctx)]
         python_script = '\n'.join(python_script)
@@ -120,12 +120,12 @@ class TestLocalRunScript(object):
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties map.arg1 "$1"
-            ctx node-instance runtime-properties map.arg2 $2
+            ctx node runtime-properties map.arg1 "$1"
+            ctx node runtime-properties map.arg2 $2
             ''',
             windows_script='''
-            ctx node-instance runtime-properties map.arg1 %1
-            ctx node-instance runtime-properties map.arg2 %2
+            ctx node runtime-properties map.arg1 %1
+            ctx node runtime-properties map.arg2 %2
             ''')
         props = self._run(
             executor, workflow_context,
@@ -186,7 +186,7 @@ class TestLocalRunScript(object):
         script = '''
 from aria.orchestrator.execution_plugin import ctx, inputs
 if __name__ == '__main__':
-    ctx.node_instance.runtime_properties['key'] = inputs['key']
+    ctx.node.runtime_properties['key'] = inputs['key']
 '''
         suffix = '.py'
         script_path = self._create_script(
@@ -208,10 +208,10 @@ if __name__ == '__main__':
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties key "${input_as_env_var}"
+            ctx node runtime-properties key "${input_as_env_var}"
             ''',
             windows_script='''
-            ctx node-instance runtime-properties key "%input_as_env_var%"
+            ctx node runtime-properties key "%input_as_env_var%"
         ''')
         props = self._run(
             executor, workflow_context,
@@ -226,10 +226,10 @@ if __name__ == '__main__':
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties key "${input_as_env_var}"
+            ctx node runtime-properties key "${input_as_env_var}"
             ''',
             windows_script='''
-            ctx node-instance runtime-properties key "%input_as_env_var%"
+            ctx node runtime-properties key "%input_as_env_var%"
         ''')
 
         props = self._run(
@@ -248,10 +248,10 @@ if __name__ == '__main__':
         script_path = self._create_script(
             tmpdir,
             linux_script='''#! /bin/bash -e
-            ctx node-instance runtime-properties nonexistent
+            ctx node runtime-properties nonexistent
             ''',
             windows_script='''
-            ctx node-instance runtime-properties nonexistent
+            ctx node runtime-properties nonexistent
         ''')
         exception = self._run_and_get_task_exception(
             executor, workflow_context,
@@ -462,7 +462,7 @@ if __name__ == '__main__':
         script_path = os.path.basename(local_script_path) if local_script_path 
else None
         if script_path:
             workflow_context.resource.deployment.upload(
-                entry_id=str(workflow_context.deployment.id),
+                entry_id=str(workflow_context.service_instance.id),
                 source=local_script_path,
                 path=script_path)
 
@@ -476,13 +476,18 @@ if __name__ == '__main__':
         @workflow
         def mock_workflow(ctx, graph):
             op = 'test.op'
-            node_instance = ctx.model.node_instance.get_by_name(
-                mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-            node_instance.node.operations[op] = {
-                'operation': '{0}.{1}'.format(operations.__name__,
-                                              
operations.run_script_locally.__name__)}
+            node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+            node.interfaces = [mock.models.get_interface(
+                op,
+                operation_kwargs=dict(implementation='{0}.{1}'.format(
+                    operations.__name__,
+                    operations.run_script_locally.__name__))
+            )]
+            # node.operations[op] = {
+            #     'operation': '{0}.{1}'.format(operations.__name__,
+            #                                   
operations.run_script_locally.__name__)}
             graph.add_tasks(api.task.OperationTask.node_instance(
-                instance=node_instance,
+                instance=node,
                 name=op,
                 inputs=inputs))
             return graph
@@ -492,7 +497,7 @@ if __name__ == '__main__':
             workflow_context=workflow_context,
             tasks_graph=tasks_graph)
         eng.execute()
-        return workflow_context.model.node_instance.get_by_name(
+        return workflow_context.model.node.get_by_name(
             mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
 
     @pytest.fixture

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/execution_plugin/test_ssh.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/execution_plugin/test_ssh.py 
b/tests/orchestrator/execution_plugin/test_ssh.py
index 6b5c783..1df1128 100644
--- a/tests/orchestrator/execution_plugin/test_ssh.py
+++ b/tests/orchestrator/execution_plugin/test_ssh.py
@@ -253,7 +253,7 @@ class TestWithActualSSHServer(object):
 
     def _upload(self, source, path):
         self._workflow_context.resource.deployment.upload(
-            entry_id=str(self._workflow_context.deployment.id),
+            entry_id=str(self._workflow_context.service_instance.id),
             source=source,
             path=path)
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/__init__.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/__init__.py 
b/tests/orchestrator/workflows/__init__.py
index fe04b2f..7f0fd56 100644
--- a/tests/orchestrator/workflows/__init__.py
+++ b/tests/orchestrator/workflows/__init__.py
@@ -13,4 +13,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from . import api, builtin, core
+from . import api, core

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/api/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/api/test_task.py 
b/tests/orchestrator/workflows/api/test_task.py
index 601c437..8873ffb 100644
--- a/tests/orchestrator/workflows/api/test_task.py
+++ b/tests/orchestrator/workflows/api/test_task.py
@@ -19,7 +19,7 @@ import pytest
 from aria.storage import model
 from aria.orchestrator import context
 from aria.orchestrator.workflows import api
-
+from aria.storage.modeling import model
 from tests import mock, storage
 
 
@@ -31,7 +31,7 @@ def ctx():
     :return:
     """
     simple_context = mock.context.simple(storage.get_sqlite_api_kwargs())
-    
simple_context.model.execution.put(mock.models.get_execution(simple_context.deployment))
+    
simple_context.model.execution.put(mock.models.get_execution(simple_context.service_instance))
     yield simple_context
     storage.release_sqlite_storage(simple_context.model)
 
@@ -40,16 +40,17 @@ class TestOperationTask(object):
 
     def test_node_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.lifecycle.create'
-        op_details = {'operation': True, 'plugin': 'plugin'}
-        node = ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_NAME)
-        node.operations[operation_name] = op_details
+        interface = mock.models.get_interface(
+            operation_name,
+            operation_kwargs=dict(plugin='plugin', implementation='op_path'))
+
+        node = 
ctx.model.node.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
+        node.interfaces = [interface]
         node.plugins = [{'name': 'plugin',
                          'package_name': 'package',
                          'package_version': '0.1'}]
-        ctx.model.node.update(node)
-        node_instance = \
-            
ctx.model.node_instance.get_by_name(mock.models.DEPENDENT_NODE_INSTANCE_NAME)
-        inputs = {'inputs': True}
+        ctx.model.node_template.update(node)
+        inputs = [model.Parameter(type_name='Boolean', value=True)]
         max_attempts = 10
         retry_interval = 10
         ignore_failure = True
@@ -57,15 +58,15 @@ class TestOperationTask(object):
         with context.workflow.current.push(ctx):
             api_task = api.task.OperationTask.node_instance(
                 name=operation_name,
-                instance=node_instance,
+                instance=node,
                 inputs=inputs,
                 max_attempts=max_attempts,
                 retry_interval=retry_interval,
                 ignore_failure=ignore_failure)
 
-        assert api_task.name == '{0}.{1}'.format(operation_name, 
node_instance.id)
-        assert api_task.operation_mapping is True
-        assert api_task.actor == node_instance
+        assert api_task.name == '{0}.{1}'.format(operation_name, node.id)
+        assert api_task.implementation == 'op_path'
+        assert api_task.actor == node
         assert api_task.inputs == inputs
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
@@ -77,30 +78,33 @@ class TestOperationTask(object):
 
     def test_source_relationship_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.relationship_lifecycle.preconfigure'
-        op_details = {'operation': True, 'plugin': 'plugin'}
+
+        interface = mock.models.get_interface(
+            operation_name,
+            operation_kwargs=dict(implementation='op_path', plugin='plugin'),
+        )
+
         relationship = ctx.model.relationship.list()[0]
-        relationship.source_operations[operation_name] = op_details
+        relationship.source_interfaces = [interface]
         relationship.source_node.plugins = [{'name': 'plugin',
                                              'package_name': 'package',
                                              'package_version': '0.1'}]
-        relationship_instance = ctx.model.relationship_instance.list()[0]
-        inputs = {'inputs': True}
+        input = model.Parameter(type_name='Boolean', value=True)
         max_attempts = 10
         retry_interval = 10
 
         with context.workflow.current.push(ctx):
             api_task = api.task.OperationTask.relationship_instance(
-                name=operation_name,
-                instance=relationship_instance,
-                operation_end=api.task.OperationTask.SOURCE_OPERATION,
-                inputs=inputs,
+                name='{0}_source'.format(operation_name),
+                instance=relationship,
+                inputs=[input],
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
-        assert api_task.name == '{0}.{1}'.format(operation_name, 
relationship_instance.id)
-        assert api_task.operation_mapping is True
-        assert api_task.actor == relationship_instance
-        assert api_task.inputs == inputs
+        assert api_task.name == '{0}_source.{1}'.format(operation_name, 
relationship.id)
+        assert api_task.implementation == 'op_path'
+        assert api_task.actor == relationship
+        assert api_task.inputs == [input]
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.plugin == {'name': 'plugin',
@@ -110,30 +114,32 @@ class TestOperationTask(object):
 
     def test_target_relationship_operation_task_creation(self, ctx):
         operation_name = 'aria.interfaces.relationship_lifecycle.preconfigure'
-        op_details = {'operation': True, 'plugin': 'plugin'}
+        interface = mock.models.get_interface(
+            operation_name,
+            operation_kwargs=dict(implementation='op_path', plugin='plugin'),
+        )
+
         relationship = ctx.model.relationship.list()[0]
-        relationship.target_operations[operation_name] = op_details
+        relationship.target_interfaces = [interface]
         relationship.target_node.plugins = [{'name': 'plugin',
                                              'package_name': 'package',
                                              'package_version': '0.1'}]
-        relationship_instance = ctx.model.relationship_instance.list()[0]
-        inputs = {'inputs': True}
+        input = model.Parameter(type_name='Boolean', value=True)
         max_attempts = 10
         retry_interval = 10
 
         with context.workflow.current.push(ctx):
             api_task = api.task.OperationTask.relationship_instance(
-                name=operation_name,
-                instance=relationship_instance,
-                operation_end=api.task.OperationTask.TARGET_OPERATION,
-                inputs=inputs,
+                name='{0}_target'.format(operation_name),
+                instance=relationship,
+                inputs=[input],
                 max_attempts=max_attempts,
                 retry_interval=retry_interval)
 
-        assert api_task.name == '{0}.{1}'.format(operation_name, 
relationship_instance.id)
-        assert api_task.operation_mapping is True
-        assert api_task.actor == relationship_instance
-        assert api_task.inputs == inputs
+        assert api_task.name == '{0}_target.{1}'.format(operation_name, 
relationship.id)
+        assert api_task.implementation == 'op_path'
+        assert api_task.actor == relationship
+        assert api_task.inputs == [input]
         assert api_task.retry_interval == retry_interval
         assert api_task.max_attempts == max_attempts
         assert api_task.plugin == {'name': 'plugin',
@@ -142,12 +148,12 @@ class TestOperationTask(object):
         assert api_task.runs_on == model.Task.RUNS_ON_TARGET
 
     def test_operation_task_default_values(self, ctx):
-        dependency_node_instance = ctx.model.node_instance.get_by_name(
+        dependency_node_instance = ctx.model.node.get_by_name(
             mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
         with context.workflow.current.push(ctx):
             task = api.task.OperationTask(
                 name='stub',
-                operation_mapping='',
+                implementation='',
                 actor=dependency_node_instance)
 
         assert task.inputs == {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/builtin/test_execute_operation.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_execute_operation.py 
b/tests/orchestrator/workflows/builtin/test_execute_operation.py
deleted file mode 100644
index b7e5678..0000000
--- a/tests/orchestrator/workflows/builtin/test_execute_operation.py
+++ /dev/null
@@ -1,56 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from aria.orchestrator.workflows.api import task
-from aria.orchestrator.workflows.builtin.execute_operation import 
execute_operation
-
-from tests import mock
-from tests import storage
-
-
-@pytest.fixture
-def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
-    yield context
-    storage.release_sqlite_storage(context.model)
-
-
-def test_execute_operation(ctx):
-    node_instance = 
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-
-    operation_name = mock.operations.NODE_OPERATIONS_INSTALL[0]
-
-    execute_tasks = list(
-        task.WorkflowTask(
-            execute_operation,
-            ctx=ctx,
-            operation=operation_name,
-            operation_kwargs={},
-            allow_kwargs_override=False,
-            run_by_dependency_order=False,
-            type_names=[],
-            node_ids=[],
-            node_instance_ids=[node_instance.id]
-        ).topological_order()
-    )
-
-    assert len(execute_tasks) == 1
-    assert execute_tasks[0].name == '{0}.{1}'.format(operation_name, 
node_instance.id)
-
-
-
-# TODO: add more scenarios

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/builtin/test_install.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_install.py 
b/tests/orchestrator/workflows/builtin/test_install.py
deleted file mode 100644
index 789a161..0000000
--- a/tests/orchestrator/workflows/builtin/test_install.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-import pytest
-
-from aria.orchestrator.workflows.api import task
-from aria.orchestrator.workflows.builtin.install import install
-
-from tests import mock
-from tests import storage
-
-from . import assert_node_install_operations
-
-
-@pytest.fixture
-def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
-    yield context
-    storage.release_sqlite_storage(context.model)
-
-
-def test_install(ctx):
-
-    install_tasks = list(task.WorkflowTask(install, 
ctx=ctx).topological_order(True))
-
-    assert len(install_tasks) == 2
-    dependency_node_subgraph, dependent_node_subgraph = install_tasks
-    dependent_node_tasks = 
list(dependent_node_subgraph.topological_order(reverse=True))
-    dependency_node_tasks = 
list(dependency_node_subgraph.topological_order(reverse=True))
-
-    assert_node_install_operations(dependency_node_tasks)
-    assert_node_install_operations(dependent_node_tasks, 
with_relationships=True)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/builtin/test_uninstall.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/builtin/test_uninstall.py 
b/tests/orchestrator/workflows/builtin/test_uninstall.py
deleted file mode 100644
index 126c4cf..0000000
--- a/tests/orchestrator/workflows/builtin/test_uninstall.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import pytest
-
-from aria.orchestrator.workflows.api import task
-from aria.orchestrator.workflows.builtin.uninstall import uninstall
-
-from tests import mock
-from tests import storage
-
-from . import assert_node_uninstall_operations
-
-
-@pytest.fixture
-def ctx(tmpdir):
-    context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
-    yield context
-    storage.release_sqlite_storage(context.model)
-
-
-def test_uninstall(ctx):
-
-    uninstall_tasks = list(task.WorkflowTask(uninstall, 
ctx=ctx).topological_order(True))
-
-    assert len(uninstall_tasks) == 2
-    dependent_node_subgraph, dependency_node_subgraph = uninstall_tasks
-    dependent_node_tasks = 
list(dependent_node_subgraph.topological_order(reverse=True))
-    dependency_node_tasks = 
list(dependency_node_subgraph.topological_order(reverse=True))
-
-    assert_node_uninstall_operations(operations=dependency_node_tasks)
-    assert_node_uninstall_operations(operations=dependent_node_tasks, 
with_relationships=True)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/core/test_engine.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_engine.py 
b/tests/orchestrator/workflows/core/test_engine.py
index d9b50a9..595ed4b 100644
--- a/tests/orchestrator/workflows/core/test_engine.py
+++ b/tests/orchestrator/workflows/core/test_engine.py
@@ -23,7 +23,7 @@ from aria.orchestrator import (
     workflow,
     operation,
 )
-from aria.storage import model
+from aria.storage.modeling import model
 from aria.orchestrator.workflows import (
     api,
     exceptions,
@@ -60,13 +60,14 @@ class BaseTest(object):
             max_attempts=None,
             retry_interval=None,
             ignore_failure=None):
-        node_instance = \
-            
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node_instance.node.operations['aria.interfaces.lifecycle.create'] = {
-            'operation': '{name}.{func.__name__}'.format(name=__name__, 
func=func)
-        }
+        node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node.interfaces = [mock.models.get_interface(
+            'aria.interfaces.lifecycle.create',
+            
operation_kwargs=dict(implementation='{name}.{func.__name__}'.format(name=__name__,
+                                                                               
  func=func))
+        )]
         return api.task.OperationTask.node_instance(
-            instance=node_instance,
+            instance=node,
             name='aria.interfaces.lifecycle.create',
             inputs=inputs,
             max_attempts=max_attempts,
@@ -219,8 +220,12 @@ class TestCancel(BaseTest):
 
         @workflow
         def mock_workflow(ctx, graph):
-            return graph.sequence(*(self._op(mock_sleep_task, ctx, 
inputs={'seconds': 0.1})
-                                    for _ in range(number_of_tasks)))
+            operations = (
+                self._op(mock_sleep_task, ctx, inputs=dict(seconds=0.1))
+                for _ in range(number_of_tasks)
+            )
+            return graph.sequence(*operations)
+
         eng = self._engine(workflow_func=mock_workflow,
                            workflow_context=workflow_context,
                            executor=executor)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/core/test_task.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/core/test_task.py 
b/tests/orchestrator/workflows/core/test_task.py
index 061a3f2..8d07b09 100644
--- a/tests/orchestrator/workflows/core/test_task.py
+++ b/tests/orchestrator/workflows/core/test_task.py
@@ -28,30 +28,41 @@ from aria.orchestrator.workflows import (
 
 from tests import mock, storage
 
+OP_NAME = 'tosca.interfaces.node.lifecycle.Standard.create'
+RELATIONSHIP_OP_NAME = 'tosca.interfaces.relationship.Configure.pre_configure'
+
 
 @pytest.fixture
 def ctx(tmpdir):
     context = mock.context.simple(storage.get_sqlite_api_kwargs(str(tmpdir)))
+
+    relationship = context.model.relationship.list()[0]
+    relationship.source_interfaces = 
[mock.models.get_interface(RELATIONSHIP_OP_NAME)]
+    relationship.target_interfaces = 
[mock.models.get_interface(RELATIONSHIP_OP_NAME)]
+    context.model.relationship.update(relationship)
+
+    dependent_node = 
context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    dependent_node.interfaces = [mock.models.get_interface(OP_NAME)]
+    context.model.node.update(dependent_node)
+
     yield context
     storage.release_sqlite_storage(context.model)
 
 
 class TestOperationTask(object):
 
-    def _create_node_operation_task(self, ctx, node_instance):
+    def _create_node_operation_task(self, ctx, node):
         with workflow_context.current.push(ctx):
             api_task = api.task.OperationTask.node_instance(
-                instance=node_instance,
+                instance=node,
                 name='tosca.interfaces.node.lifecycle.Standard.create')
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
-    def _create_relationship_operation_task(self, ctx, relationship_instance, 
operation_end):
+    def _create_relationship_operation_task(self, ctx, relationship, 
operation_name):
         with workflow_context.current.push(ctx):
             api_task = api.task.OperationTask.relationship_instance(
-                instance=relationship_instance,
-                
name='tosca.interfaces.relationship.Configure.pre_configure_source',
-                operation_end=operation_end)
+                instance=relationship, name=operation_name)
             core_task = core.task.OperationTask(api_task=api_task)
         return api_task, core_task
 
@@ -60,45 +71,47 @@ class TestOperationTask(object):
         storage_plugin_other = mock.models.get_plugin(package_name='p0', 
package_version='0.0')
         ctx.model.plugin.put(storage_plugin_other)
         ctx.model.plugin.put(storage_plugin)
-        node_instance = ctx.model.node_instance.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node = node_instance.node
+        node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node_template = node.node_template
         plugin_name = 'plugin1'
-        node.plugins = [{'name': plugin_name,
-                         'package_name': 'p1',
-                         'package_version': '0.1'}]
-        node.operations['tosca.interfaces.node.lifecycle.Standard.create'] = 
{'plugin': plugin_name}
-        api_task, core_task = self._create_node_operation_task(ctx, 
node_instance)
+        node_template.plugins = [{'name': 'plugin1',
+                                  'package_name': 'p1',
+                                  'package_version': '0.1'}]
+        node.interfaces = [mock.models.get_interface(
+            'tosca.interfaces.node.lifecycle.Standard.create',
+            operation_kwargs=dict(plugin='plugin1')
+        )]
+        ctx.model.node_template.update(node_template)
+        ctx.model.node.update(node)
+        api_task, core_task = self._create_node_operation_task(ctx, node)
         storage_task = ctx.model.task.get_by_name(core_task.name)
         assert storage_task.plugin_name == plugin_name
         assert storage_task.execution_name == ctx.execution.name
-        assert storage_task.runs_on.id == core_task.context.node_instance.id
+        assert storage_task.runs_on == core_task.context.node
         assert core_task.model_task == storage_task
         assert core_task.name == api_task.name
-        assert core_task.operation_mapping == api_task.operation_mapping
-        assert core_task.actor == api_task.actor == node_instance
+        assert core_task.implementation == api_task.implementation
+        assert core_task.actor == api_task.actor == node
         assert core_task.inputs == api_task.inputs == storage_task.inputs
         assert core_task.plugin == storage_plugin
 
     def test_source_relationship_operation_task_creation(self, ctx):
-        relationship_instance = ctx.model.relationship_instance.list()[0]
+        relationship = ctx.model.relationship.list()[0]
+        ctx.model.relationship.update(relationship)
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship_instance,
-            api.task.OperationTask.SOURCE_OPERATION)
-        assert core_task.model_task.runs_on.id == 
relationship_instance.source_node_instance.id
+            ctx, relationship, '{0}_source'.format(RELATIONSHIP_OP_NAME))
+        assert core_task.model_task.runs_on == relationship.source_node
 
     def test_target_relationship_operation_task_creation(self, ctx):
-        relationship_instance = ctx.model.relationship_instance.list()[0]
+        relationship = ctx.model.relationship.list()[0]
         _, core_task = self._create_relationship_operation_task(
-            ctx, relationship_instance,
-            api.task.OperationTask.TARGET_OPERATION)
-        assert core_task.model_task.runs_on.id == 
relationship_instance.target_node_instance.id
+            ctx, relationship, '{0}_target'.format(RELATIONSHIP_OP_NAME))
+        assert core_task.model_task.runs_on == relationship.target_node
 
     def test_operation_task_edit_locked_attribute(self, ctx):
-        node_instance = \
-            
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
-        _, core_task = self._create_node_operation_task(ctx, node_instance)
+        _, core_task = self._create_node_operation_task(ctx, node)
         now = datetime.utcnow()
         with pytest.raises(exceptions.TaskException):
             core_task.status = core_task.STARTED
@@ -112,10 +125,9 @@ class TestOperationTask(object):
             core_task.due_at = now
 
     def test_operation_task_edit_attributes(self, ctx):
-        node_instance = \
-            
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
-        _, core_task = self._create_node_operation_task(ctx, node_instance)
+        _, core_task = self._create_node_operation_task(ctx, node)
         future_time = datetime.utcnow() + timedelta(seconds=3)
 
         with core_task._update():

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
----------------------------------------------------------------------
diff --git 
a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py 
b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
index cd37bde..acdcb1d 100644
--- 
a/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
+++ 
b/tests/orchestrator/workflows/core/test_task_graph_into_exececution_graph.py
@@ -25,20 +25,22 @@ from tests import storage
 def test_task_graph_into_execution_graph():
     operation_name = 'tosca.interfaces.node.lifecycle.Standard.create'
     task_context = mock.context.simple(storage.get_sqlite_api_kwargs())
-    node_instance = \
-        
task_context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    node = 
task_context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    node.interfaces = [mock.models.get_interface(operation_name)]
+    task_context.model.node.update(node)
+
     def sub_workflow(name, **_):
         return api.task_graph.TaskGraph(name)
 
     with context.workflow.current.push(task_context):
         test_task_graph = api.task.WorkflowTask(sub_workflow, 
name='test_task_graph')
-        simple_before_task = 
api.task.OperationTask.node_instance(instance=node_instance,
+        simple_before_task = 
api.task.OperationTask.node_instance(instance=node,
                                                                   
name=operation_name)
-        simple_after_task = 
api.task.OperationTask.node_instance(instance=node_instance,
+        simple_after_task = api.task.OperationTask.node_instance(instance=node,
                                                                  
name=operation_name)
 
         inner_task_graph = api.task.WorkflowTask(sub_workflow, 
name='test_inner_task_graph')
-        inner_task = 
api.task.OperationTask.node_instance(instance=node_instance,
+        inner_task = api.task.OperationTask.node_instance(instance=node,
                                                           name=operation_name)
         inner_task_graph.add_tasks(inner_task)
 
@@ -91,7 +93,7 @@ def test_task_graph_into_execution_graph():
 def _assert_execution_is_api_task(execution_task, api_task):
     assert execution_task.id == api_task.id
     assert execution_task.name == api_task.name
-    assert execution_task.operation_mapping == api_task.operation_mapping
+    assert execution_task.implementation == api_task.implementation
     assert execution_task.actor == api_task.actor
     assert execution_task.inputs == api_task.inputs
 

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/executor/test_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_executor.py 
b/tests/orchestrator/workflows/executor/test_executor.py
index 2486a1e..97e1393 100644
--- a/tests/orchestrator/workflows/executor/test_executor.py
+++ b/tests/orchestrator/workflows/executor/test_executor.py
@@ -28,7 +28,7 @@ except ImportError:
     _celery = None
     app = None
 
-from aria.storage import model
+from aria.storage.modeling import model
 from aria.orchestrator import events
 from aria.orchestrator.workflows.executor import (
     thread,
@@ -43,7 +43,7 @@ def test_execute(executor):
     expected_value = 'value'
     successful_task = MockTask(mock_successful_task)
     failing_task = MockTask(mock_failing_task)
-    task_with_inputs = MockTask(mock_task_with_input, inputs={'input': 
expected_value})
+    task_with_inputs = MockTask(mock_task_with_input, 
inputs=dict(input='value'))
 
     for task in [successful_task, failing_task, task_with_inputs]:
         executor.execute(task)
@@ -98,8 +98,9 @@ class MockTask(object):
         self.exception = None
         self.id = str(uuid.uuid4())
         name = func.__name__
-        operation = 
'tests.orchestrator.workflows.executor.test_executor.{name}'.format(name=name)
-        self.operation_mapping = operation
+        implementation = 
'tests.orchestrator.workflows.executor.test_executor.{name}'.format(
+            name=name)
+        self.implementation = implementation
         self.logger = logging.getLogger()
         self.name = name
         self.inputs = inputs or {}

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/executor/test_process_executor.py
----------------------------------------------------------------------
diff --git a/tests/orchestrator/workflows/executor/test_process_executor.py 
b/tests/orchestrator/workflows/executor/test_process_executor.py
index 687e245..ec01d60 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor.py
@@ -22,7 +22,7 @@ from contextlib import contextmanager
 import pytest
 
 from aria import application_model_storage
-from aria.storage import model as aria_model
+from aria.storage.modeling import model as aria_model
 from aria.utils.plugin import create as create_plugin
 from aria.storage.sql_mapi import SQLAlchemyModelAPI
 from aria.orchestrator import events
@@ -38,7 +38,7 @@ class TestProcessExecutor(object):
 
     def test_plugin_execution(self, executor, mock_plugin):
         task = MockTask(plugin=mock_plugin,
-                        operation='mock_plugin1.operation')
+                        implementation='mock_plugin1.operation')
 
         queue = Queue.Queue()
 
@@ -119,11 +119,11 @@ class MockTask(object):
 
     INFINITE_RETRIES = aria_model.Task.INFINITE_RETRIES
 
-    def __init__(self, plugin, operation):
+    def __init__(self, plugin, implementation):
         self.id = str(uuid.uuid4())
-        self.operation_mapping = operation
+        self.implementation = implementation
         self.logger = logging.getLogger()
-        self.name = operation
+        self.name = implementation
         self.inputs = {}
         self.context = MockContext()
         self.retry_count = 0

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/executor/test_process_executor_extension.py
----------------------------------------------------------------------
diff --git 
a/tests/orchestrator/workflows/executor/test_process_executor_extension.py 
b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
index 4a8ef57..3c3ffb0 100644
--- a/tests/orchestrator/workflows/executor/test_process_executor_extension.py
+++ b/tests/orchestrator/workflows/executor/test_process_executor_extension.py
@@ -30,14 +30,17 @@ def test_decorate_extension(context, executor):
     inputs = {'input1': 1, 'input2': 2}
 
     def get_node_instance(ctx):
-        return 
ctx.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        return 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
 
     @workflow
     def mock_workflow(ctx, graph):
         node_instance = get_node_instance(ctx)
         op = 'test.op'
-        op_dict = {'operation': '{0}.{1}'.format(__name__, 
_mock_operation.__name__)}
-        node_instance.node.operations['test.op'] = op_dict
+        node_instance.interfaces = [mock.models.get_interface(
+            op,
+            operation_kwargs=dict(implementation='{0}.{1}'.format(__name__,
+                                                                  
_mock_operation.__name__))
+        )]
         task = api.task.OperationTask.node_instance(instance=node_instance, 
name=op, inputs=inputs)
         graph.add_tasks(task)
         return graph
@@ -55,7 +58,7 @@ class MockProcessExecutorExtension(object):
     def decorate(self):
         def decorator(function):
             def wrapper(ctx, **operation_inputs):
-                ctx.node_instance.runtime_properties['out'] = 
{'wrapper_inputs': operation_inputs}
+                ctx.node.runtime_properties['out'] = {'wrapper_inputs': 
operation_inputs}
                 function(ctx=ctx, **operation_inputs)
             return wrapper
         return decorator
@@ -63,7 +66,7 @@ class MockProcessExecutorExtension(object):
 
 @operation
 def _mock_operation(ctx, **operation_inputs):
-    ctx.node_instance.runtime_properties['out']['function_inputs'] = 
operation_inputs
+    ctx.node.runtime_properties['out']['function_inputs'] = operation_inputs
 
 
 @pytest.fixture

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
----------------------------------------------------------------------
diff --git 
a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
 
b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
index bd1fa96..af318c3 100644
--- 
a/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
+++ 
b/tests/orchestrator/workflows/executor/test_process_executor_tracked_changes.py
@@ -45,13 +45,13 @@ def test_track_changes_of_failed_operation(context, 
executor):
 
 
 def _assert_tracked_changes_are_applied(context):
-    instance = 
context.model.node_instance.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+    instance = 
context.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
     assert instance.runtime_properties == _TEST_RUNTIME_PROPERTIES
 
 
 def _update_runtime_properties(context):
-    context.node_instance.runtime_properties.clear()
-    context.node_instance.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
+    context.node.runtime_properties.clear()
+    context.node.runtime_properties.update(_TEST_RUNTIME_PROPERTIES)
 
 
 def test_refresh_state_of_tracked_attributes(context, executor):
@@ -66,7 +66,7 @@ def test_apply_tracked_changes_during_an_operation(context, 
executor):
         'changed_but_refreshed': {'some': 'newer', 'properties': 'right there'}
     }
 
-    expected_initial = context.model.node_instance.get_by_name(
+    expected_initial = context.model.node.get_by_name(
         mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties
 
     out = _run_workflow(context=context, executor=executor, 
op_func=_mock_updating_operation,
@@ -87,17 +87,18 @@ def test_apply_tracked_changes_during_an_operation(context, 
executor):
 def _run_workflow(context, executor, op_func, inputs=None):
     @workflow
     def mock_workflow(ctx, graph):
-        node_instance = ctx.model.node_instance.get_by_name(
-            mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
-        node_instance.node.operations['test.op'] = {'operation': 
_operation_mapping(op_func)}
-        task = api.task.OperationTask.node_instance(instance=node_instance, 
name='test.op',
+        node = 
ctx.model.node.get_by_name(mock.models.DEPENDENCY_NODE_INSTANCE_NAME)
+        node.interfaces = [mock.models.get_interface(
+            'test.op', 
operation_kwargs=dict(implementation=_operation_mapping(op_func)))]
+        task = api.task.OperationTask.node_instance(instance=node,
+                                                    name='test.op',
                                                     inputs=inputs or {})
         graph.add_tasks(task)
         return graph
     graph = mock_workflow(ctx=context)  # pylint: 
disable=no-value-for-parameter
     eng = engine.Engine(executor=executor, workflow_context=context, 
tasks_graph=graph)
     eng.execute()
-    return context.model.node_instance.get_by_name(
+    return context.model.node.get_by_name(
         
mock.models.DEPENDENCY_NODE_INSTANCE_NAME).runtime_properties.get('out')
 
 
@@ -114,25 +115,25 @@ def _mock_fail_operation(ctx):
 
 @operation
 def _mock_refreshing_operation(ctx):
-    out = {'initial': copy.deepcopy(ctx.node_instance.runtime_properties)}
-    ctx.node_instance.runtime_properties.update({'some': 'new', 'properties': 
'right here'})
-    out['after_change'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.model.node_instance.refresh(ctx.node_instance)
-    out['after_refresh'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.node_instance.runtime_properties['out'] = out
+    out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
+    ctx.node.runtime_properties.update({'some': 'new', 'properties': 'right 
here'})
+    out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.model.node.refresh(ctx.node)
+    out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.node.runtime_properties['out'] = out
 
 
 @operation
 def _mock_updating_operation(ctx, committed, changed_but_refreshed):
-    out = {'initial': copy.deepcopy(ctx.node_instance.runtime_properties)}
-    ctx.node_instance.runtime_properties.update(committed)
-    ctx.model.node_instance.update(ctx.node_instance)
-    out['after_update'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.node_instance.runtime_properties.update(changed_but_refreshed)
-    out['after_change'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.model.node_instance.refresh(ctx.node_instance)
-    out['after_refresh'] = copy.deepcopy(ctx.node_instance.runtime_properties)
-    ctx.node_instance.runtime_properties['out'] = out
+    out = {'initial': copy.deepcopy(ctx.node.runtime_properties)}
+    ctx.node.runtime_properties.update(committed)
+    ctx.model.node.update(ctx.node)
+    out['after_update'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.node.runtime_properties.update(changed_but_refreshed)
+    out['after_change'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.model.node.refresh(ctx.node)
+    out['after_refresh'] = copy.deepcopy(ctx.node.runtime_properties)
+    ctx.node.runtime_properties['out'] = out
 
 
 def _operation_mapping(func):

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/resources/scripts/test_ssh.sh
----------------------------------------------------------------------
diff --git a/tests/resources/scripts/test_ssh.sh 
b/tests/resources/scripts/test_ssh.sh
index 6f18278..90202c7 100644
--- a/tests/resources/scripts/test_ssh.sh
+++ b/tests/resources/scripts/test_ssh.sh
@@ -4,7 +4,7 @@ set -u
 set -e
 
 test_run_script_basic() {
-    ctx node-instance runtime-properties test_value $test_value
+    ctx node runtime-properties test_value $test_value
 }
 
 test_run_script_as_sudo() {
@@ -12,7 +12,7 @@ test_run_script_as_sudo() {
 }
 
 test_run_script_default_base_dir() {
-    ctx node-instance runtime-properties work_dir $PWD
+    ctx node runtime-properties work_dir $PWD
 }
 
 test_run_script_with_hide() {
@@ -20,44 +20,44 @@ test_run_script_with_hide() {
 }
 
 test_run_script_process_config() {
-    ctx node-instance runtime-properties env_value $test_value_env
-    ctx node-instance runtime-properties bash_version $BASH_VERSION
-    ctx node-instance runtime-properties arg1_value $1
-    ctx node-instance runtime-properties arg2_value $2
-    ctx node-instance runtime-properties cwd $PWD
-    ctx node-instance runtime-properties ctx_path $(which ctx)
+    ctx node runtime-properties env_value $test_value_env
+    ctx node runtime-properties bash_version $BASH_VERSION
+    ctx node runtime-properties arg1_value $1
+    ctx node runtime-properties arg2_value $2
+    ctx node runtime-properties cwd $PWD
+    ctx node runtime-properties ctx_path $(which ctx)
 }
 
 test_run_script_command_prefix() {
-    ctx node-instance runtime-properties dollar_dash $-
+    ctx node runtime-properties dollar_dash $-
 }
 
 test_run_script_reuse_existing_ctx_1() {
-    ctx node-instance runtime-properties test_value1 $test_value1
+    ctx node runtime-properties test_value1 $test_value1
 }
 
 test_run_script_reuse_existing_ctx_2() {
-    ctx node-instance runtime-properties test_value2 $test_value2
+    ctx node runtime-properties test_value2 $test_value2
 }
 
 test_run_script_download_resource_plain() {
     local destination=$(mktemp)
     ctx download-resource ${destination} test_resource
-    ctx node-instance runtime-properties test_value "$(cat ${destination})"
+    ctx node runtime-properties test_value "$(cat ${destination})"
 }
 
 test_run_script_download_resource_and_render() {
     local destination=$(mktemp)
     ctx download-resource-and-render ${destination} test_resource
-    ctx node-instance runtime-properties test_value "$(cat ${destination})"
+    ctx node runtime-properties test_value "$(cat ${destination})"
 }
 
 test_run_script_inputs_as_env_variables_no_override() {
-    ctx node-instance runtime-properties test_value "$custom_env_var"
+    ctx node runtime-properties test_value "$custom_env_var"
 }
 
 test_run_script_inputs_as_env_variables_process_env_override() {
-    ctx node-instance runtime-properties test_value "$custom_env_var"
+    ctx node runtime-properties test_value "$custom_env_var"
 }
 
 test_run_script_error_in_script() {

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/storage/__init__.py
----------------------------------------------------------------------
diff --git a/tests/storage/__init__.py b/tests/storage/__init__.py
index 9101fd0..2363957 100644
--- a/tests/storage/__init__.py
+++ b/tests/storage/__init__.py
@@ -14,16 +14,17 @@
 # limitations under the License.
 import os
 import platform
-from tempfile import mkdtemp
 from shutil import rmtree
+from tempfile import mkdtemp
 
-from aria.storage import model
 from sqlalchemy import (
     create_engine,
     orm)
 from sqlalchemy.orm import scoped_session
 from sqlalchemy.pool import StaticPool
 
+from aria.storage import modeling
+
 
 class TestFileSystem(object):
 
@@ -34,7 +35,8 @@ class TestFileSystem(object):
         rmtree(self.path, ignore_errors=True)
 
 
-def get_sqlite_api_kwargs(base_dir=None, filename='db.sqlite'):
+def get_sqlite_api_kwargs(base_dir=None,
+                          filename='db.sqlite'):
     """
     Create sql params. works in in-memory and in filesystem mode.
     If base_dir is passed, the mode will be filesystem mode. while the default 
mode is in-memory.
@@ -59,7 +61,7 @@ def get_sqlite_api_kwargs(base_dir=None, 
filename='db.sqlite'):
     session_factory = orm.sessionmaker(bind=engine)
     session = scoped_session(session_factory=session_factory) if base_dir else 
session_factory()
 
-    model.DeclarativeBase.metadata.create_all(bind=engine)
+    modeling.declarative_base.metadata.create_all(bind=engine)
     return dict(engine=engine, session=session)
 
 
@@ -76,4 +78,4 @@ def release_sqlite_storage(storage):
             session.rollback()
             session.close()
         for engine in set(mapi._engine for mapi in mapis):
-            model.DeclarativeBase.metadata.drop_all(engine)
+            modeling.declarative_base.metadata.drop_all(engine)

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/401e4752/tests/storage/test_instrumentation.py
----------------------------------------------------------------------
diff --git a/tests/storage/test_instrumentation.py 
b/tests/storage/test_instrumentation.py
index 8b826e9..16a655b 100644
--- a/tests/storage/test_instrumentation.py
+++ b/tests/storage/test_instrumentation.py
@@ -17,16 +17,15 @@ import pytest
 from sqlalchemy import Column, Text, Integer, event
 
 from aria.storage import (
-    model,
+    modeling,
     structure,
-    type as aria_type,
     ModelStorage,
     sql_mapi,
     instrumentation
 )
+from aria.storage.modeling import type as aria_type
 from ..storage import get_sqlite_api_kwargs, release_sqlite_storage
 
-
 STUB = instrumentation._STUB
 Value = instrumentation._Value
 instruments_holder = []
@@ -305,9 +304,9 @@ class _MockModel(structure.ModelMixin):
     string2 = Column(Text)
 
 
-class MockModel1(model.DeclarativeBase, _MockModel):
+class MockModel1(modeling.declarative_base, _MockModel):
     __tablename__ = 'mock_model1'
 
 
-class MockModel2(model.DeclarativeBase, _MockModel):
+class MockModel2(modeling.declarative_base, _MockModel):
     __tablename__ = 'mock_model2'

Reply via email to