diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
index 0ec6c1064..7d33fc4e8 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
@@ -1349,6 +1349,7 @@ class ProcessInstanceProcessor:
for task_to_update in tasks_to_update:
# print(f"task_to_update: {task_to_update}")
TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit)
+ # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit)
# if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate':
# TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit)
# else:
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py
index be6407b58..a67a7755d 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py
@@ -59,6 +59,8 @@ class TaskService:
It also returns the relating json_data object so they can be imported later.
"""
new_properties_json = serializer.task_to_dict(spiff_task)
+ if new_properties_json["task_spec"] == "Start":
+ new_properties_json["parent"] = None
spiff_task_data = new_properties_json.pop("data")
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
task_model.properties_json = new_properties_json
@@ -251,11 +253,7 @@ class TaskService:
# bpmn process defintion so let's avoid using it.
if task_properties["task_spec"] == "Root":
continue
- if task_properties["task_spec"] == "Start":
- task_properties["parent"] = None
- task_data_dict = task_properties.pop("data")
- state_int = task_properties["state"]
spiff_task = spiff_workflow.get_task_from_id(UUID(task_id))
task_model = TaskModel.query.filter_by(guid=task_id).first()
@@ -266,23 +264,14 @@ class TaskService:
spiff_task,
bpmn_definition_to_task_definitions_mappings,
)
- task_model.state = TaskStateNames[state_int]
- task_model.properties_json = task_properties
- new_task_models[task_model.guid] = task_model
- json_data_dict = TaskService.update_task_data_on_task_model(
- task_model, task_data_dict, "json_data_hash"
- )
+ json_data_dict, python_env_dict = cls.update_task_model(task_model, spiff_task, serializer)
+
+ new_task_models[task_model.guid] = task_model
if json_data_dict is not None:
new_json_data_dicts[json_data_dict["hash"]] = json_data_dict
-
- python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
- python_env_dict = TaskService.update_task_data_on_task_model(
- task_model, python_env_data_dict, "python_env_data_hash"
- )
if python_env_dict is not None:
new_json_data_dicts[python_env_dict["hash"]] = python_env_dict
-
return (bpmn_process, new_task_models, new_json_data_dicts)
@classmethod
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py
index ac0f1a41d..cc6a3a02f 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py
@@ -11,6 +11,8 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from spiffworkflow_backend.exceptions.api_error import ApiError
+from spiffworkflow_backend.models import task_definition
+from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_instance_correlation import (
@@ -19,7 +21,8 @@ from spiffworkflow_backend.models.message_instance_correlation import (
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
-from spiffworkflow_backend.models.task import TaskModel # noqa: F401
+from spiffworkflow_backend.models.task import TaskModel
+from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401
from spiffworkflow_backend.services.assertion_service import safe_assertion
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
@@ -93,6 +96,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
failing_spiff_task = script_engine.failing_spiff_task
self._update_task_model_with_spiff_task(failing_spiff_task, task_failed=True)
+ # import pdb; pdb.set_trace()
db.session.bulk_save_objects(self.task_models.values())
db.session.bulk_save_objects(self.process_instance_events.values())
@@ -123,7 +127,7 @@ class TaskModelSavingDelegate(EngineStepDelegate):
def _process_spiff_task_parents(self, spiff_task: SpiffTask) -> None:
(parent_subprocess_guid, _parent_subprocess) = TaskService.task_subprocess(spiff_task)
if parent_subprocess_guid is not None:
- spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task(
+ spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id(
UUID(parent_subprocess_guid)
)
@@ -156,6 +160,17 @@ class TaskModelSavingDelegate(EngineStepDelegate):
bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process(
bpmn_process or task_model.bpmn_process, spiff_task.workflow.data
)
+ # stp = False
+ # for ntm in new_task_models.values():
+ # td = TaskDefinitionModel.query.filter_by(id=ntm.task_definition_id).first()
+ # if td.bpmn_identifier == 'Start':
+ # # import pdb; pdb.set_trace()
+ # stp = True
+ # print("HEY")
+
+ # if stp:
+ # # import pdb; pdb.set_trace()
+ # print("HEY2")
self.task_models.update(new_task_models)
self.json_data_dicts.update(new_json_data_dicts)
json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer)
diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn
index f49f99cd9..d2b1d94e5 100644
--- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn
+++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn
@@ -7,12 +7,12 @@
Flow_1ygcsbt
-
+
## Hello
- Flow_1fktmf7
Flow_1t9ywmr
+ Flow_0q30935
Flow_09gjylo
@@ -21,8 +21,8 @@
Flow_1fktmf7
set_in_top_level_script = 1
-
-
+
+
Flow_09gjylo
Flow_0yxus36
@@ -65,34 +65,43 @@ except:
Flow_1ygcsbt
set_top_level_process_script_after_gate = 1
-
+
+
+
+ Flow_1fktmf7
+ Flow_0q30935
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -100,33 +109,37 @@ except:
-
+
-
-
+
+
-
-
+
+
-
-
+
+
-
-
+
+
-
-
-
-
+
+
+
+
-
-
+
+
+
+
+
+
diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
index b85bbec32..0675394b5 100644
--- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
+++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
@@ -1,5 +1,6 @@
"""Test_process_instance_processor."""
from uuid import UUID
+import json
import pytest
from flask import g
@@ -305,7 +306,7 @@ class TestProcessInstanceProcessor(BaseTest):
processor.resume()
processor.do_engine_steps(save=True)
human_task_one = process_instance.active_human_tasks[0]
- spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id))
+ spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
assert process_instance.status == "complete"
@@ -335,34 +336,30 @@ class TestProcessInstanceProcessor(BaseTest):
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
- import pdb; pdb.set_trace()
+ with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2))
assert len(process_instance.active_human_tasks) == 1
initial_human_task_id = process_instance.active_human_tasks[0].id
-
- # save again to ensure we go attempt to process the human tasks again
- processor.save()
-
assert len(process_instance.active_human_tasks) == 1
assert initial_human_task_id == process_instance.active_human_tasks[0].id
- processor = ProcessInstanceProcessor(process_instance)
human_task_one = process_instance.active_human_tasks[0]
- spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
- human_task_one.task_name, processor.bpmn_process_instance
- )
+ spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
+ ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
+
processor.suspend()
- ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True)
- import pdb; pdb.set_trace()
+ ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True)
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
processor = ProcessInstanceProcessor(process_instance)
+ with open("after_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2))
processor.resume()
processor.do_engine_steps(save=True)
- import pdb; pdb.set_trace()
human_task_one = process_instance.active_human_tasks[0]
- spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id))
+ spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
+
+ import pdb; pdb.set_trace()
assert process_instance.status == "complete"
def test_properly_saves_tasks_when_running(