Repository: incubator-ariatosca
Updated Branches:
  refs/heads/generic_storage_update_serialization_logger_handler [created] 
9dcb4af5a


changed the update mechanism in all events to be more generic


Project: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/commit/b12bfd21
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/tree/b12bfd21
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/diff/b12bfd21

Branch: refs/heads/generic_storage_update_serialization_logger_handler
Commit: b12bfd211aa242cc009b0d7b5b33e52fc1fa1896
Parents: e71ddc9
Author: max-orlov <ma...@gigaspaces.com>
Authored: Tue Nov 28 17:19:48 2017 +0200
Committer: max-orlov <ma...@gigaspaces.com>
Committed: Tue Nov 28 17:19:48 2017 +0200

----------------------------------------------------------------------
 .../workflows/core/events_handler.py            | 64 +++++++++++---------
 aria/orchestrator/workflows/executor/base.py    |  5 +-
 2 files changed, 40 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b12bfd21/aria/orchestrator/workflows/core/events_handler.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/core/events_handler.py 
b/aria/orchestrator/workflows/core/events_handler.py
index 473475e..6da59ba 100644
--- a/aria/orchestrator/workflows/core/events_handler.py
+++ b/aria/orchestrator/workflows/core/events_handler.py
@@ -28,16 +28,20 @@ from ... import exceptions
 
 @events.sent_task_signal.connect
 def _task_sent(ctx, *args, **kwargs):
-    with ctx.persist_changes:
-        ctx.task.status = ctx.task.SENT
+    # with ctx.persist_changes:
+    task = ctx.task
+    task.status = ctx.task.SENT
+    ctx.model.task.update(task)
 
 
 @events.start_task_signal.connect
 def _task_started(ctx, *args, **kwargs):
-    with ctx.persist_changes:
-        ctx.task.started_at = datetime.utcnow()
-        ctx.task.status = ctx.task.STARTED
-        _update_node_state_if_necessary(ctx, is_transitional=True)
+    # with ctx.persist_changes:
+    task = ctx.task
+    ctx.task.started_at = datetime.utcnow()
+    ctx.task.status = ctx.task.STARTED
+    _update_node_state_if_necessary(ctx, is_transitional=True)
+    ctx.model.task.update(task)
 
 
 @events.on_failure_task_signal.connect
@@ -68,40 +72,46 @@ def _task_failed(ctx, exception, *args, **kwargs):
 
 @events.on_success_task_signal.connect
 def _task_succeeded(ctx, *args, **kwargs):
-    with ctx.persist_changes:
-        ctx.task.ended_at = datetime.utcnow()
-        ctx.task.status = ctx.task.SUCCESS
-        ctx.task.attempts_count += 1
+    # with ctx.persist_changes:
 
-        _update_node_state_if_necessary(ctx)
+    task = ctx.task
+    ctx.task.ended_at = datetime.utcnow()
+    ctx.task.status = ctx.task.SUCCESS
+    ctx.task.attempts_count += 1
+
+    _update_node_state_if_necessary(ctx)
+    ctx.model.task.update(task)
 
 
 @events.start_workflow_signal.connect
 def _workflow_started(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        # the execution may already be in the process of cancelling
-        if execution.status in (execution.CANCELLING, execution.CANCELLED):
-            return
-        execution.status = execution.STARTED
-        execution.started_at = datetime.utcnow()
+    # with workflow_context.persist_changes:
+    execution = workflow_context.execution
+    # the execution may already be in the process of cancelling
+    if execution.status in (execution.CANCELLING, execution.CANCELLED):
+        return
+    execution.status = execution.STARTED
+    execution.started_at = datetime.utcnow()
+    workflow_context.model.execution.update(execution)
 
 
 @events.on_failure_workflow_signal.connect
 def _workflow_failed(workflow_context, exception, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        execution.error = str(exception)
-        execution.status = execution.FAILED
-        execution.ended_at = datetime.utcnow()
+    # with workflow_context.persist_changes:
+    execution = workflow_context.execution
+    execution.error = str(exception)
+    execution.status = execution.FAILED
+    execution.ended_at = datetime.utcnow()
+    workflow_context.model.execution.update(execution)
 
 
 @events.on_success_workflow_signal.connect
 def _workflow_succeeded(workflow_context, *args, **kwargs):
-    with workflow_context.persist_changes:
-        execution = workflow_context.execution
-        execution.status = execution.SUCCEEDED
-        execution.ended_at = datetime.utcnow()
+    # with workflow_context.persist_changes:
+    execution = workflow_context.execution
+    execution.status = execution.SUCCEEDED
+    execution.ended_at = datetime.utcnow()
+    workflow_context.model.execution.update(execution)
 
 
 @events.on_cancelled_workflow_signal.connect

http://git-wip-us.apache.org/repos/asf/incubator-ariatosca/blob/b12bfd21/aria/orchestrator/workflows/executor/base.py
----------------------------------------------------------------------
diff --git a/aria/orchestrator/workflows/executor/base.py 
b/aria/orchestrator/workflows/executor/base.py
index e7d03ea..d550b53 100644
--- a/aria/orchestrator/workflows/executor/base.py
+++ b/aria/orchestrator/workflows/executor/base.py
@@ -71,5 +71,6 @@ class BaseExecutor(logger.LoggerMixin):
 
 class StubTaskExecutor(BaseExecutor):                                          
                     # pylint: disable=abstract-method
     def execute(self, ctx, *args, **kwargs):
-        with ctx.persist_changes:
-            ctx.task.status = ctx.task.SUCCESS
+        task = ctx.task
+        task.status = ctx.task.SUCCESS
+        ctx.model.task.update(task)

Reply via email to