Merge pull request #200 from sartography/feature/fix_process_instance_rewind

Feature/fix process instance rewind
This commit is contained in:
Kevin Burnett 2023-03-31 07:57:45 -07:00 committed by GitHub
commit 457487ff63
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 625 additions and 197 deletions

View File

@ -1344,7 +1344,14 @@ class ProcessInstanceProcessor:
# db.session.commit()
#
# for task_to_update in tasks_to_update:
# # print(f"task_to_update: {task_to_update}")
# print(f"task_to_update.state: {task_to_update.state}")
# TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit)
# # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit)
# # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate':
# # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit)
# # else:
# # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit)
#
# parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first()
# if parent_task_model is None:
@ -1362,6 +1369,13 @@ class ProcessInstanceProcessor:
# for task_model in task_models_of_parent_bpmn_processes:
# TaskService.reset_task_model(task_model, state="WAITING", commit=commit)
#
# bpmn_process = to_task_model.bpmn_process
# properties_json = copy.copy(bpmn_process.properties_json)
# properties_json["last_task"] = parent_task_model.guid
# bpmn_process.properties_json = properties_json
# db.session.add(bpmn_process)
# db.session.commit()
#
# if commit:
# processor = ProcessInstanceProcessor(process_instance)
# processor.save()
@ -1802,6 +1816,13 @@ class ProcessInstanceProcessor:
user_id=user.id,
)
task_service = TaskService(
process_instance=self.process_instance_model,
serializer=self._serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
task_service.process_parents_and_children_and_save_to_database(spiff_task)
# this is the thing that actually commits the db transaction (on behalf of the other updates above as well)
self.save()
@ -1873,6 +1894,9 @@ class ProcessInstanceProcessor:
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]]
def get_task_by_guid(self, task_guid: str) -> Optional[SpiffTask]:
return self.bpmn_process_instance.get_task_from_id(UUID(task_guid))
@classmethod
def get_task_by_bpmn_identifier(
cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow

View File

@ -1,4 +1,6 @@
import copy
import json
import time
from hashlib import sha256
from typing import Optional
from typing import Tuple
@ -19,6 +21,8 @@ from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
@ -30,6 +34,145 @@ class JsonDataDict(TypedDict):
class TaskService:
PYTHON_ENVIRONMENT_STATE_KEY = "spiff__python_env_state"
def __init__(
self,
process_instance: ProcessInstanceModel,
serializer: BpmnWorkflowSerializer,
bpmn_definition_to_task_definitions_mappings: dict,
) -> None:
self.process_instance = process_instance
self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings
self.serializer = serializer
self.bpmn_processes: dict[str, BpmnProcessModel] = {}
self.task_models: dict[str, TaskModel] = {}
self.json_data_dicts: dict[str, JsonDataDict] = {}
self.process_instance_events: dict[str, ProcessInstanceEventModel] = {}
def save_objects_to_database(self) -> None:
db.session.bulk_save_objects(self.bpmn_processes.values())
db.session.bulk_save_objects(self.task_models.values())
db.session.bulk_save_objects(self.process_instance_events.values())
self.__class__.insert_or_update_json_data_records(self.json_data_dicts)
def process_parents_and_children_and_save_to_database(
self,
spiff_task: SpiffTask,
) -> None:
self.process_spiff_task_children(spiff_task)
self.process_spiff_task_parent_subprocess_tasks(spiff_task)
self.save_objects_to_database()
def process_spiff_task_children(
self,
spiff_task: SpiffTask,
) -> None:
for child_spiff_task in spiff_task.children:
# if child_spiff_task._has_state(TaskState.PREDICTED_MASK):
# self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models)
# continue
self.update_task_model_with_spiff_task(
spiff_task=child_spiff_task,
)
self.process_spiff_task_children(
spiff_task=child_spiff_task,
)
def process_spiff_task_parent_subprocess_tasks(
self,
spiff_task: SpiffTask,
) -> None:
"""Find the parent subprocess of a given spiff_task and update its data.
This will also process that subprocess task's children and will recurse upwards
to process its parent subprocesses as well.
"""
(parent_subprocess_guid, _parent_subprocess) = self.__class__.task_subprocess(spiff_task)
if parent_subprocess_guid is not None:
spiff_task_of_parent_subprocess = spiff_task.workflow._get_outermost_workflow().get_task_from_id(
UUID(parent_subprocess_guid)
)
if spiff_task_of_parent_subprocess is not None:
self.update_task_model_with_spiff_task(
spiff_task=spiff_task_of_parent_subprocess,
)
self.process_spiff_task_children(
spiff_task=spiff_task_of_parent_subprocess,
)
self.process_spiff_task_parent_subprocess_tasks(
spiff_task=spiff_task_of_parent_subprocess,
)
def update_task_model_with_spiff_task(
self,
spiff_task: SpiffTask,
task_failed: bool = False,
) -> TaskModel:
(
new_bpmn_process,
task_model,
new_task_models,
new_json_data_dicts,
) = self.__class__.find_or_create_task_model_from_spiff_task(
spiff_task,
self.process_instance,
self.serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
bpmn_process = new_bpmn_process or task_model.bpmn_process
bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process(
bpmn_process, spiff_task.workflow.data
)
self.task_models.update(new_task_models)
self.json_data_dicts.update(new_json_data_dicts)
json_data_dict_list = self.__class__.update_task_model(task_model, spiff_task, self.serializer)
self.task_models[task_model.guid] = task_model
if bpmn_process_json_data is not None:
json_data_dict_list.append(bpmn_process_json_data)
self._update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts)
if task_model.state == "COMPLETED" or task_failed:
event_type = ProcessInstanceEventType.task_completed.value
if task_failed:
event_type = ProcessInstanceEventType.task_failed.value
# FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete
# which script tasks execute when READY.
timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time()
process_instance_event = ProcessInstanceEventModel(
task_guid=task_model.guid,
process_instance_id=self.process_instance.id,
event_type=event_type,
timestamp=timestamp,
)
self.process_instance_events[task_model.guid] = process_instance_event
self.update_bpmn_process(spiff_task.workflow, bpmn_process)
return task_model
def update_bpmn_process(
self,
spiff_workflow: BpmnWorkflow,
bpmn_process: BpmnProcessModel,
) -> None:
new_properties_json = copy.copy(bpmn_process.properties_json)
new_properties_json["last_task"] = str(spiff_workflow.last_task) if spiff_workflow.last_task else None
new_properties_json["success"] = spiff_workflow.success
bpmn_process.properties_json = new_properties_json
bpmn_process_json_data = self.__class__.update_task_data_on_bpmn_process(bpmn_process, spiff_workflow.data)
if bpmn_process_json_data is not None:
self.json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data
self.bpmn_processes[bpmn_process.guid or "top_level"] = bpmn_process
if spiff_workflow.outer_workflow != spiff_workflow:
direct_parent_bpmn_process = BpmnProcessModel.query.filter_by(
id=bpmn_process.direct_parent_process_id
).first()
self.update_bpmn_process(spiff_workflow.outer_workflow, direct_parent_bpmn_process)
@classmethod
def insert_or_update_json_data_records(
cls, json_data_hash_to_json_data_dict_mapping: dict[str, JsonDataDict]
@ -58,6 +201,8 @@ class TaskService:
It also returns the relating json_data object so they can be imported later.
"""
new_properties_json = serializer.task_to_dict(spiff_task)
if new_properties_json["task_spec"] == "Start":
new_properties_json["parent"] = None
spiff_task_data = new_properties_json.pop("data")
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
task_model.properties_json = new_properties_json
@ -110,9 +255,9 @@ class TaskService:
for sp_id, sp in top_level_workflow.subprocesses.items():
if sp == my_wf:
my_sp = sp
my_sp_id = sp_id
my_sp_id = str(sp_id)
break
return (str(my_sp_id), my_sp)
return (my_sp_id, my_sp)
@classmethod
def task_bpmn_process(
@ -180,7 +325,7 @@ class TaskService:
if "subprocess_specs" in bpmn_process_dict:
bpmn_process_dict.pop("subprocess_specs")
new_task_models = {}
new_task_models: dict[str, TaskModel] = {}
new_json_data_dicts: dict[str, JsonDataDict] = {}
bpmn_process = None
@ -250,12 +395,13 @@ class TaskService:
# bpmn process defintion so let's avoid using it.
if task_properties["task_spec"] == "Root":
continue
if task_properties["task_spec"] == "Start":
task_properties["parent"] = None
task_data_dict = task_properties.pop("data")
state_int = task_properties["state"]
# we are going to avoid saving likely and maybe tasks to the db.
# that means we need to remove them from their parents' lists of children as well.
spiff_task = spiff_workflow.get_task_from_id(UUID(task_id))
# if spiff_task._has_state(TaskState.PREDICTED_MASK):
# cls.remove_spiff_task_from_parent(spiff_task, new_task_models)
# continue
task_model = TaskModel.query.filter_by(guid=task_id).first()
if task_model is None:
@ -265,25 +411,28 @@ class TaskService:
spiff_task,
bpmn_definition_to_task_definitions_mappings,
)
task_model.state = TaskStateNames[state_int]
task_model.properties_json = task_properties
new_task_models[task_model.guid] = task_model
json_data_dict = TaskService.update_task_data_on_task_model(
task_model, task_data_dict, "json_data_hash"
)
json_data_dict, python_env_dict = cls.update_task_model(task_model, spiff_task, serializer)
new_task_models[task_model.guid] = task_model
if json_data_dict is not None:
new_json_data_dicts[json_data_dict["hash"]] = json_data_dict
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer)
python_env_dict = TaskService.update_task_data_on_task_model(
task_model, python_env_data_dict, "python_env_data_hash"
)
if python_env_dict is not None:
new_json_data_dicts[python_env_dict["hash"]] = python_env_dict
return (bpmn_process, new_task_models, new_json_data_dicts)
@classmethod
def remove_spiff_task_from_parent(cls, spiff_task: SpiffTask, task_models: dict[str, TaskModel]) -> None:
"""Removes the given spiff task from its parent and then updates the task_models dict with the changes."""
spiff_task_parent_guid = str(spiff_task.parent.id)
spiff_task_guid = str(spiff_task.id)
if spiff_task_parent_guid in task_models:
parent_task_model = task_models[spiff_task_parent_guid]
new_parent_properties_json = copy.copy(parent_task_model.properties_json)
new_parent_properties_json["children"].remove(spiff_task_guid)
parent_task_model.properties_json = new_parent_properties_json
task_models[spiff_task_parent_guid] = parent_task_model
@classmethod
def update_task_data_on_bpmn_process(
cls, bpmn_process: BpmnProcessModel, bpmn_process_data_dict: dict
@ -360,7 +509,7 @@ class TaskService:
else:
task_model.python_env_data_hash = python_env_data_hash
new_properties_json = task_model.properties_json
new_properties_json = copy.copy(task_model.properties_json)
task_model.state = state
task_model.start_in_seconds = None
task_model.end_in_seconds = None
@ -405,3 +554,11 @@ class TaskService:
# this helps to convert items like datetime objects to be json serializable
converted_data: dict = serializer.data_converter.convert(user_defined_state)
return converted_data
@classmethod
def _update_json_data_dicts_using_list(
cls, json_data_dict_list: list[Optional[JsonDataDict]], json_data_dicts: dict[str, JsonDataDict]
) -> None:
for json_data_dict in json_data_dict_list:
if json_data_dict is not None:
json_data_dicts[json_data_dict["hash"]] = json_data_dict

View File

@ -15,14 +15,12 @@ from spiffworkflow_backend.models.message_instance_correlation import (
MessageInstanceCorrelationRuleModel,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.task import TaskModel
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel # noqa: F401
from spiffworkflow_backend.services.assertion_service import safe_assertion
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.task_service import JsonDataDict
from spiffworkflow_backend.services.task_service import TaskService
@ -63,23 +61,29 @@ class TaskModelSavingDelegate(EngineStepDelegate):
self.current_task_model: Optional[TaskModel] = None
self.current_task_start_in_seconds: Optional[float] = None
self.task_models: dict[str, TaskModel] = {}
self.json_data_dicts: dict[str, JsonDataDict] = {}
self.process_instance_events: dict[str, ProcessInstanceEventModel] = {}
self.last_completed_spiff_task: Optional[SpiffTask] = None
self.task_service = TaskService(
process_instance=self.process_instance,
serializer=self.serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
def will_complete_task(self, spiff_task: SpiffTask) -> None:
if self._should_update_task_model():
self.current_task_start_in_seconds = time.time()
spiff_task.task_spec._predict(spiff_task, mask=TaskState.NOT_FINISHED_MASK)
if self.secondary_engine_step_delegate:
self.secondary_engine_step_delegate.will_complete_task(spiff_task)
def did_complete_task(self, spiff_task: SpiffTask) -> None:
if self._should_update_task_model():
task_model = self._update_task_model_with_spiff_task(spiff_task)
task_model = self.task_service.update_task_model_with_spiff_task(spiff_task)
if self.current_task_start_in_seconds is None:
raise Exception("Could not find cached current_task_start_in_seconds. This should never have happend")
task_model.start_in_seconds = self.current_task_start_in_seconds
task_model.end_in_seconds = time.time()
self.last_completed_spiff_task = spiff_task
if self.secondary_engine_step_delegate:
self.secondary_engine_step_delegate.did_complete_task(spiff_task)
@ -87,12 +91,9 @@ class TaskModelSavingDelegate(EngineStepDelegate):
script_engine = bpmn_process_instance.script_engine
if hasattr(script_engine, "failing_spiff_task") and script_engine.failing_spiff_task is not None:
failing_spiff_task = script_engine.failing_spiff_task
self._update_task_model_with_spiff_task(failing_spiff_task, task_failed=True)
self.task_service.update_task_model_with_spiff_task(failing_spiff_task, task_failed=True)
db.session.bulk_save_objects(self.task_models.values())
db.session.bulk_save_objects(self.process_instance_events.values())
TaskService.insert_or_update_json_data_records(self.json_data_dicts)
self.task_service.save_objects_to_database()
if self.secondary_engine_step_delegate:
self.secondary_engine_step_delegate.save(bpmn_process_instance, commit=False)
@ -103,10 +104,13 @@ class TaskModelSavingDelegate(EngineStepDelegate):
# TODO: also include children of the last task processed. This may help with task resets
# if we have to set their states to FUTURE.
# excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion.
for waiting_spiff_task in bpmn_process_instance.get_tasks(
TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY
):
self._update_task_model_with_spiff_task(waiting_spiff_task)
# for waiting_spiff_task in bpmn_process_instance.get_tasks(
# TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY
# ):
# self._update_task_model_with_spiff_task(waiting_spiff_task)
if self.last_completed_spiff_task is not None:
self.task_service.process_spiff_task_parent_subprocess_tasks(self.last_completed_spiff_task)
self.task_service.process_spiff_task_children(self.last_completed_spiff_task)
def _should_update_task_model(self) -> bool:
"""We need to figure out if we have previously save task info on this process intance.
@ -116,52 +120,6 @@ class TaskModelSavingDelegate(EngineStepDelegate):
# return self.process_instance.bpmn_process_id is not None
return True
def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None:
for json_data_dict in json_data_dict_list:
if json_data_dict is not None:
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
def _update_task_model_with_spiff_task(self, spiff_task: SpiffTask, task_failed: bool = False) -> TaskModel:
(
bpmn_process,
task_model,
new_task_models,
new_json_data_dicts,
) = TaskService.find_or_create_task_model_from_spiff_task(
spiff_task,
self.process_instance,
self.serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
)
bpmn_process_json_data = TaskService.update_task_data_on_bpmn_process(
bpmn_process or task_model.bpmn_process, spiff_task.workflow.data
)
self.task_models.update(new_task_models)
self.json_data_dicts.update(new_json_data_dicts)
json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self.serializer)
self.task_models[task_model.guid] = task_model
if bpmn_process_json_data is not None:
json_data_dict_list.append(bpmn_process_json_data)
self._update_json_data_dicts_using_list(json_data_dict_list)
if task_model.state == "COMPLETED" or task_failed:
event_type = ProcessInstanceEventType.task_completed.value
if task_failed:
event_type = ProcessInstanceEventType.task_failed.value
# FIXME: some failed tasks will currently not have either timestamp since we only hook into spiff when tasks complete
# which script tasks execute when READY.
timestamp = task_model.end_in_seconds or task_model.start_in_seconds or time.time()
process_instance_event = ProcessInstanceEventModel(
task_guid=task_model.guid,
process_instance_id=self.process_instance.id,
event_type=event_type,
timestamp=timestamp,
)
self.process_instance_events[task_model.guid] = process_instance_event
return task_model
class ExecutionStrategy:
"""Interface of sorts for a concrete execution strategy."""
@ -169,13 +127,12 @@ class ExecutionStrategy:
def __init__(self, delegate: EngineStepDelegate):
"""__init__."""
self.delegate = delegate
self.bpmn_process_instance = None
def do_engine_steps(self, bpmn_process_instance: BpmnWorkflow, exit_at: None = None) -> None:
pass
def save(self) -> None:
self.delegate.save(self.bpmn_process_instance)
def save(self, bpmn_process_instance: BpmnWorkflow) -> None:
self.delegate.save(bpmn_process_instance)
class GreedyExecutionStrategy(ExecutionStrategy):
@ -281,7 +238,7 @@ class WorkflowExecutionService:
raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe
finally:
self.execution_strategy.save()
self.execution_strategy.save(self.bpmn_process_instance)
db.session.commit()
if save:

View File

@ -0,0 +1,116 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="test_loopback_to_subprocess" isExecutable="true">
<bpmn:startEvent id="Event_17ujsfj">
<bpmn:outgoing>Flow_1dk6oyl</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:exclusiveGateway id="gateway" name="Gateway" default="Flow_11uu31d">
<bpmn:incoming>Flow_0s9lss3</bpmn:incoming>
<bpmn:outgoing>Flow_02xy1ag</bpmn:outgoing>
<bpmn:outgoing>Flow_11uu31d</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_0s9lss3" sourceRef="script_task" targetRef="gateway" />
<bpmn:scriptTask id="script_task" name="Script Task">
<bpmn:incoming>Flow_0sw85uk</bpmn:incoming>
<bpmn:outgoing>Flow_0s9lss3</bpmn:outgoing>
<bpmn:script>x=1</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_0ryttlc">
<bpmn:incoming>Flow_02xy1ag</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_02xy1ag" sourceRef="gateway" targetRef="Event_0ryttlc">
<bpmn:conditionExpression>x==2</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_1dk6oyl" sourceRef="Event_17ujsfj" targetRef="subprocess" />
<bpmn:sequenceFlow id="Flow_0sw85uk" sourceRef="subprocess" targetRef="script_task" />
<bpmn:subProcess id="subprocess" name="Subprocess">
<bpmn:incoming>Flow_1dk6oyl</bpmn:incoming>
<bpmn:incoming>Flow_11uu31d</bpmn:incoming>
<bpmn:outgoing>Flow_0sw85uk</bpmn:outgoing>
<bpmn:startEvent id="Event_17df4es">
<bpmn:outgoing>Flow_0ih1i19</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0ih1i19" sourceRef="Event_17df4es" targetRef="subprocess_manual_task" />
<bpmn:endEvent id="Event_1ehwj0c">
<bpmn:incoming>Flow_0dua5j8</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0dua5j8" sourceRef="subprocess_manual_task" targetRef="Event_1ehwj0c" />
<bpmn:manualTask id="subprocess_manual_task" name="Subprocess Manual Task">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser>HEY MANUAL</spiffworkflow:instructionsForEndUser>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0ih1i19</bpmn:incoming>
<bpmn:outgoing>Flow_0dua5j8</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:subProcess>
<bpmn:sequenceFlow id="Flow_11uu31d" sourceRef="gateway" targetRef="subprocess" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="test_loopback_to_subprocess">
<bpmndi:BPMNShape id="Event_17ujsfj_di" bpmnElement="Event_17ujsfj">
<dc:Bounds x="122" y="-168" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_16ouwyf_di" bpmnElement="gateway" isMarkerVisible="true">
<dc:Bounds x="565" y="-175" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="568" y="-118" width="44" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1tvzm43_di" bpmnElement="script_task">
<dc:Bounds x="370" y="-190" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0ryttlc_di" bpmnElement="Event_0ryttlc">
<dc:Bounds x="712" y="-168" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_10og25a_di" bpmnElement="subprocess">
<dc:Bounds x="210" y="-190" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0s9lss3_di" bpmnElement="Flow_0s9lss3">
<di:waypoint x="470" y="-150" />
<di:waypoint x="565" y="-150" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_02xy1ag_di" bpmnElement="Flow_02xy1ag">
<di:waypoint x="615" y="-150" />
<di:waypoint x="712" y="-150" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1dk6oyl_di" bpmnElement="Flow_1dk6oyl">
<di:waypoint x="158" y="-150" />
<di:waypoint x="210" y="-150" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0sw85uk_di" bpmnElement="Flow_0sw85uk">
<di:waypoint x="310" y="-150" />
<di:waypoint x="370" y="-150" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_11uu31d_di" bpmnElement="Flow_11uu31d">
<di:waypoint x="590" y="-175" />
<di:waypoint x="590" y="-250" />
<di:waypoint x="438" y="-250" />
<di:waypoint x="303" y="-189" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
<bpmndi:BPMNDiagram id="BPMNDiagram_0d2d8pf">
<bpmndi:BPMNPlane id="BPMNPlane_0ez33hq" bpmnElement="subprocess">
<bpmndi:BPMNShape id="Event_17df4es_di" bpmnElement="Event_17df4es">
<dc:Bounds x="212" y="172" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1ehwj0c_di" bpmnElement="Event_1ehwj0c">
<dc:Bounds x="452" y="172" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0va03mf_di" bpmnElement="subprocess_manual_task">
<dc:Bounds x="300" y="150" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0ih1i19_di" bpmnElement="Flow_0ih1i19">
<di:waypoint x="248" y="190" />
<di:waypoint x="300" y="190" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0dua5j8_di" bpmnElement="Flow_0dua5j8">
<di:waypoint x="400" y="190" />
<di:waypoint x="452" y="190" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -7,8 +7,8 @@
<bpmn:endEvent id="end_event_of_manual_task_model">
<bpmn:incoming>Flow_0nnh2x9</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0nnh2x9" sourceRef="Activity_Hello" targetRef="end_event_of_manual_task_model" />
<bpmn:manualTask id="Activity_Hello" name="Hello">
<bpmn:sequenceFlow id="Flow_0nnh2x9" sourceRef="manual_task_one" targetRef="end_event_of_manual_task_model" />
<bpmn:manualTask id="manual_task_one" name="Hello">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser>## Hello</spiffworkflow:instructionsForEndUser>
</bpmn:extensionElements>
@ -16,7 +16,7 @@
<bpmn:outgoing>Flow_0nnh2x9</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:sequenceFlow id="Flow_0stlaxe" sourceRef="StartEvent_1" targetRef="the_script" />
<bpmn:sequenceFlow id="Flow_1pmem7s" sourceRef="the_script" targetRef="Activity_Hello" />
<bpmn:sequenceFlow id="Flow_1pmem7s" sourceRef="the_script" targetRef="manual_task_one" />
<bpmn:scriptTask id="the_script">
<bpmn:incoming>Flow_0stlaxe</bpmn:incoming>
<bpmn:outgoing>Flow_1pmem7s</bpmn:outgoing>
@ -31,7 +31,7 @@
<bpmndi:BPMNShape id="Event_0ia26nb_di" bpmnElement="end_event_of_manual_task_model">
<dc:Bounds x="592" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="Activity_Hello">
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="manual_task_one">
<dc:Bounds x="420" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1vokg57_di" bpmnElement="the_script">

View File

@ -4,15 +4,15 @@
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0stlaxe</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="end_event_of_manual_task_model">
<bpmn:endEvent id="end_event_of_manual_task_model" name="End Event Of Manual Task Model">
<bpmn:incoming>Flow_1ygcsbt</bpmn:incoming>
</bpmn:endEvent>
<bpmn:manualTask id="manual_task" name="Hello">
<bpmn:manualTask id="top_level_manual_task_two" name="Top Level Manual Task Two">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser>## Hello</spiffworkflow:instructionsForEndUser>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1fktmf7</bpmn:incoming>
<bpmn:incoming>Flow_1t9ywmr</bpmn:incoming>
<bpmn:incoming>Flow_0q30935</bpmn:incoming>
<bpmn:outgoing>Flow_09gjylo</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:sequenceFlow id="Flow_0stlaxe" sourceRef="StartEvent_1" targetRef="top_level_script" />
@ -21,9 +21,9 @@
<bpmn:outgoing>Flow_1fktmf7</bpmn:outgoing>
<bpmn:script>set_in_top_level_script = 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1fktmf7" sourceRef="top_level_script" targetRef="manual_task" />
<bpmn:sequenceFlow id="Flow_09gjylo" sourceRef="manual_task" targetRef="top_level_subprocess" />
<bpmn:subProcess id="top_level_subprocess">
<bpmn:sequenceFlow id="Flow_1fktmf7" sourceRef="top_level_script" targetRef="top_level_manual_task_one" />
<bpmn:sequenceFlow id="Flow_09gjylo" sourceRef="top_level_manual_task_two" targetRef="top_level_subprocess" />
<bpmn:subProcess id="top_level_subprocess" name="Top Level Subprocess">
<bpmn:incoming>Flow_09gjylo</bpmn:incoming>
<bpmn:outgoing>Flow_0yxus36</bpmn:outgoing>
<bpmn:startEvent id="Event_0g7txdo">
@ -46,7 +46,7 @@ except:
we_move_on = False</bpmn:script>
</bpmn:scriptTask>
</bpmn:subProcess>
<bpmn:callActivity id="top_level_call_activity" calledElement="test_process_to_call">
<bpmn:callActivity id="top_level_call_activity" name="Top Level Call Activity" calledElement="test_process_to_call">
<bpmn:incoming>Flow_0yxus36</bpmn:incoming>
<bpmn:outgoing>Flow_187mcqe</bpmn:outgoing>
</bpmn:callActivity>
@ -60,13 +60,18 @@ except:
<bpmn:conditionExpression>we_move_on == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_1ygcsbt" sourceRef="top_level_process_script_after_gate" targetRef="end_event_of_manual_task_model" />
<bpmn:scriptTask id="top_level_process_script_after_gate">
<bpmn:scriptTask id="top_level_process_script_after_gate" name="Top Level Process Script After Gate">
<bpmn:incoming>Flow_0lw7sda</bpmn:incoming>
<bpmn:outgoing>Flow_1ygcsbt</bpmn:outgoing>
<bpmn:script>set_top_level_process_script_after_gate = 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1t9ywmr" sourceRef="Gateway_0p8naw0" targetRef="manual_task" />
<bpmn:sequenceFlow id="Flow_1t9ywmr" sourceRef="Gateway_0p8naw0" targetRef="top_level_manual_task_two" />
<bpmn:sequenceFlow id="Flow_0yxus36" sourceRef="top_level_subprocess" targetRef="top_level_call_activity" />
<bpmn:sequenceFlow id="Flow_0q30935" sourceRef="top_level_manual_task_one" targetRef="top_level_manual_task_two" />
<bpmn:manualTask id="top_level_manual_task_one" name="Top Level Manual Task One">
<bpmn:incoming>Flow_1fktmf7</bpmn:incoming>
<bpmn:outgoing>Flow_0q30935</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top_level_process">
@ -74,25 +79,35 @@ except:
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0ia26nb_di" bpmnElement="end_event_of_manual_task_model">
<dc:Bounds x="1092" y="159" width="36" height="36" />
<dc:Bounds x="1212" y="159" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1200" y="202" width="67" height="40" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="manual_task">
<dc:Bounds x="400" y="137" width="100" height="80" />
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="top_level_manual_task_two">
<dc:Bounds x="610" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1vokg57_di" bpmnElement="top_level_script">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_19a46sv_di" bpmnElement="top_level_subprocess">
<dc:Bounds x="530" y="137" width="100" height="80" />
<dc:Bounds x="740" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_04hrmow_di" bpmnElement="top_level_call_activity">
<dc:Bounds x="680" y="137" width="100" height="80" />
<dc:Bounds x="870" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0p8naw0_di" bpmnElement="Gateway_0p8naw0" isMarkerVisible="true">
<dc:Bounds x="835" y="152" width="50" height="50" />
<dc:Bounds x="1005" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1yhtryv_di" bpmnElement="top_level_process_script_after_gate">
<dc:Bounds x="940" y="137" width="100" height="80" />
<dc:Bounds x="1080" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0ctgju0_di" bpmnElement="top_level_manual_task_one">
<dc:Bounds x="450" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0stlaxe_di" bpmnElement="Flow_0stlaxe">
<di:waypoint x="215" y="177" />
@ -100,33 +115,37 @@ except:
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1fktmf7_di" bpmnElement="Flow_1fktmf7">
<di:waypoint x="370" y="177" />
<di:waypoint x="400" y="177" />
<di:waypoint x="450" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_09gjylo_di" bpmnElement="Flow_09gjylo">
<di:waypoint x="500" y="177" />
<di:waypoint x="530" y="177" />
<di:waypoint x="710" y="177" />
<di:waypoint x="740" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_187mcqe_di" bpmnElement="Flow_187mcqe">
<di:waypoint x="780" y="177" />
<di:waypoint x="835" y="177" />
<di:waypoint x="970" y="177" />
<di:waypoint x="1005" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0lw7sda_di" bpmnElement="Flow_0lw7sda">
<di:waypoint x="885" y="177" />
<di:waypoint x="940" y="177" />
<di:waypoint x="1055" y="177" />
<di:waypoint x="1080" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ygcsbt_di" bpmnElement="Flow_1ygcsbt">
<di:waypoint x="1040" y="177" />
<di:waypoint x="1092" y="177" />
<di:waypoint x="1180" y="177" />
<di:waypoint x="1212" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1t9ywmr_di" bpmnElement="Flow_1t9ywmr">
<di:waypoint x="860" y="152" />
<di:waypoint x="860" y="100" />
<di:waypoint x="450" y="100" />
<di:waypoint x="450" y="137" />
<di:waypoint x="1030" y="152" />
<di:waypoint x="1030" y="100" />
<di:waypoint x="660" y="100" />
<di:waypoint x="660" y="137" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0yxus36_di" bpmnElement="Flow_0yxus36">
<di:waypoint x="630" y="177" />
<di:waypoint x="680" y="177" />
<di:waypoint x="840" y="177" />
<di:waypoint x="870" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0q30935_di" bpmnElement="Flow_0q30935">
<di:waypoint x="550" y="177" />
<di:waypoint x="610" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>

View File

@ -2616,6 +2616,8 @@ class TestProcessApi(BaseTest):
content_type="application/json",
data=json.dumps(data),
)
assert response.status_code == 200
assert response.json is not None
assert response.json["status"] == "complete"
response = client.get(
@ -2641,9 +2643,9 @@ class TestProcessApi(BaseTest):
) -> None:
"""Test_script_unit_test_run."""
process_group_id = "test_group"
process_model_id = "process_navigation"
bpmn_file_name = "process_navigation.bpmn"
bpmn_file_location = "process_navigation"
process_model_id = "manual_task"
bpmn_file_name = "manual_task.bpmn"
bpmn_file_location = "manual_task"
process_model_identifier = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
@ -2674,25 +2676,11 @@ class TestProcessApi(BaseTest):
headers=self.logged_in_headers(with_super_admin_user),
)
data = {
"dateTime": "PT1H",
"external": True,
"internal": True,
"label": "Event_0e4owa3",
"typename": "TimerEventDefinition",
}
response = client.post(
f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(data),
)
response = client.get(
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info",
headers=self.logged_in_headers(with_super_admin_user),
)
assert len(response.json) == 9
assert len(response.json) == 7
human_task = next(task for task in response.json if task["bpmn_identifier"] == "manual_task_one")
response = client.post(
@ -2711,7 +2699,7 @@ class TestProcessApi(BaseTest):
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert len(response.json) == 9
assert len(response.json) == 7
def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None:
"""Setup_initial_groups_for_move_tests."""

View File

@ -5,7 +5,8 @@ import pytest
from flask import g
from flask.app import Flask
from flask.testing import FlaskClient
from SpiffWorkflow.task import TaskState # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -256,7 +257,6 @@ class TestProcessInstanceProcessor(BaseTest):
assert spiff_task is not None
assert spiff_task.state == TaskState.COMPLETED
# TODO: FIX resetting a process instance to a task
# def test_properly_resets_process_to_given_task(
# self,
# app: Flask,
@ -275,6 +275,60 @@ class TestProcessInstanceProcessor(BaseTest):
# assert finance_group is not None
#
# process_model = load_test_spec(
# process_model_id="test_group/manual_task",
# process_model_source_directory="manual_task",
# )
# process_instance = self.create_process_instance_from_process_model(
# process_model=process_model, user=initiator_user
# )
# processor = ProcessInstanceProcessor(process_instance)
# processor.do_engine_steps(save=True)
# assert len(process_instance.active_human_tasks) == 1
# initial_human_task_id = process_instance.active_human_tasks[0].id
#
# # save again to ensure we go attempt to process the human tasks again
# processor.save()
#
# assert len(process_instance.active_human_tasks) == 1
# assert initial_human_task_id == process_instance.active_human_tasks[0].id
#
# processor = ProcessInstanceProcessor(process_instance)
# human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
# human_task_one.task_name, processor.bpmn_process_instance
# )
# assert spiff_manual_task is not None
#
# processor.suspend()
# ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True)
#
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
# processor = ProcessInstanceProcessor(process_instance)
# processor.resume()
# processor.do_engine_steps(save=True)
# human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# assert process_instance.status == "complete"
#
# def test_properly_resets_process_to_given_task_with_call_activity(
# self,
# app: Flask,
# client: FlaskClient,
# with_db_and_bpmn_file_cleanup: None,
# with_super_admin_user: UserModel,
# ) -> None:
# self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
# initiator_user = self.find_or_create_user("initiator_user")
# finance_user_three = self.find_or_create_user("testuser3")
# assert initiator_user.principal is not None
# assert finance_user_three.principal is not None
# AuthorizationService.import_permissions_from_yaml_file()
#
# finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
# assert finance_group is not None
#
# process_model = load_test_spec(
# process_model_id="test_group/manual_task_with_subprocesses",
# process_model_source_directory="manual_task_with_subprocesses",
# )
@ -283,33 +337,48 @@ class TestProcessInstanceProcessor(BaseTest):
# )
# processor = ProcessInstanceProcessor(process_instance)
# processor.do_engine_steps(save=True)
# # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2))
# assert len(process_instance.active_human_tasks) == 1
# initial_human_task_id = process_instance.active_human_tasks[0].id
#
# # save again to ensure we go attempt to process the human tasks again
# processor.save()
#
# assert len(process_instance.active_human_tasks) == 1
# assert initial_human_task_id == process_instance.active_human_tasks[0].id
#
# processor = ProcessInstanceProcessor(process_instance)
# human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
# human_task_one.task_name, processor.bpmn_process_instance
# )
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
#
# # NOTES:
# # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task
# # is not marked READY but instead stays as FUTURE. Running things like:
# # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task)
# # and
# # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK)
# # did not help.
#
# processor.suspend()
# ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True)
# task_model_to_reset_to = (
# TaskModel.query.join(TaskDefinitionModel)
# .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script")
# .order_by(TaskModel.id.desc()) # type: ignore
# .first()
# )
# assert task_model_to_reset_to is not None
# ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True)
#
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
# processor = ProcessInstanceProcessor(process_instance)
# processor.resume()
# processor.do_engine_steps(save=True)
#
# assert len(process_instance.active_human_tasks) == 1
# human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
#
# assert process_instance.status == "complete"
def test_properly_saves_tasks_when_running(
self,
@ -358,54 +427,104 @@ class TestProcessInstanceProcessor(BaseTest):
human_task_one = process_instance.active_human_tasks[0]
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
human_task_one = process_instance.active_human_tasks[0]
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# recreate variables to ensure all bpmn json was recreated from scratch from the db
process_instance_relookup = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
processor_final = ProcessInstanceProcessor(process_instance_relookup)
assert process_instance_relookup.status == "complete"
first_data_set = {"set_in_top_level_script": 1}
second_data_set = {
**first_data_set,
data_set_1 = {"set_in_top_level_script": 1}
data_set_2 = {
**data_set_1,
**{"set_in_top_level_subprocess": 1, "we_move_on": False},
}
third_data_set = {
**second_data_set,
data_set_3 = {
**data_set_2,
**{
"set_in_test_process_to_call_script": 1,
"set_in_test_process_to_call_subprocess_subprocess_script": 1,
"set_in_test_process_to_call_subprocess_script": 1,
},
}
fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}}
fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}}
data_set_4 = {
**data_set_3,
**{
"set_in_test_process_to_call_script": 1,
},
}
data_set_5 = {**data_set_4, **{"a": 1, "we_move_on": True}}
data_set_6 = {**data_set_5, **{"set_top_level_process_script_after_gate": 1}}
data_set_7 = {**data_set_6, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}}
expected_task_data = {
"top_level_script": first_data_set,
"manual_task": first_data_set,
"top_level_subprocess_script": second_data_set,
"top_level_subprocess": second_data_set,
"test_process_to_call_subprocess_script": third_data_set,
"top_level_call_activity": third_data_set,
"end_event_of_manual_task_model": third_data_set,
"top_level_subprocess_script_second": fourth_data_set,
"test_process_to_call_subprocess_script_second": fourth_data_set,
"top_level_script": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_one": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_two": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"},
"top_level_subprocess_script": {
"data": data_set_2,
"bpmn_process_identifier": "top_level_subprocess",
},
"top_level_subprocess": {"data": data_set_2, "bpmn_process_identifier": "top_level_process"},
"test_process_to_call_subprocess_script": {
"data": data_set_3,
"bpmn_process_identifier": "test_process_to_call_subprocess",
},
"top_level_call_activity": {"data": data_set_4, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_two_second": {
"data": data_set_4,
"bpmn_process_identifier": "top_level_process",
},
"top_level_subprocess_script_second": {
"data": data_set_5,
"bpmn_process_identifier": "top_level_subprocess",
},
"top_level_subprocess_second": {"data": data_set_5, "bpmn_process_identifier": "top_level_process"},
"test_process_to_call_subprocess_script_second": {
"data": data_set_5,
"bpmn_process_identifier": "test_process_to_call_subprocess",
},
"top_level_call_activity_second": {
"data": data_set_5,
"bpmn_process_identifier": "top_level_process",
},
"end_event_of_manual_task_model": {"data": data_set_6, "bpmn_process_identifier": "top_level_process"},
}
spiff_tasks_checked_once: list = []
spiff_tasks_checked: list[str] = []
# TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly
def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None:
if spiff_task.task_spec.name == spiff_task_identifier:
expected_task_data_key = spiff_task.task_spec.name
if spiff_task.task_spec.name in spiff_tasks_checked_once:
def assert_spiff_task_is_in_process(spiff_task: SpiffTask) -> None:
spiff_task_identifier = spiff_task.task_spec.name
if spiff_task_identifier in expected_task_data:
bpmn_process_identifier = expected_task_data[spiff_task_identifier]["bpmn_process_identifier"]
expected_task_data_key = spiff_task_identifier
if spiff_task_identifier in spiff_tasks_checked:
expected_task_data_key = f"{spiff_task.task_spec.name}_second"
expected_python_env_data = expected_task_data[expected_task_data_key]
assert expected_task_data_key not in spiff_tasks_checked
spiff_tasks_checked.append(expected_task_data_key)
expected_python_env_data = expected_task_data[expected_task_data_key]["data"]
base_failure_message = (
f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key"
f" {expected_task_data_key}."
)
# TODO: add back in when removing MAYBE and LIKELY tasks
# count_failure_message = (
# f"{base_failure_message} There are more than 2 entries of this task in the db."
# " There should only ever be max 2."
# )
# task_models_with_bpmn_identifier_count = (
# TaskModel.query.join(TaskDefinitionModel)
# .filter(TaskModel.process_instance_id == process_instance_relookup.id)
# .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name)
# .count()
# )
# assert task_models_with_bpmn_identifier_count < 3, count_failure_message
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task_model.start_in_seconds is not None
@ -415,7 +534,9 @@ class TestProcessInstanceProcessor(BaseTest):
task_definition = task_model.task_definition
assert task_definition.bpmn_identifier == spiff_task_identifier
assert task_definition.bpmn_name == spiff_task_identifier.replace("_", " ").title()
assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier
assert (
task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier
), base_failure_message
message = (
f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received:"
@ -424,17 +545,12 @@ class TestProcessInstanceProcessor(BaseTest):
# TODO: if we split out env data again we will need to use it here instead of json_data
# assert task_model.python_env_data() == expected_python_env_data, message
assert task_model.json_data() == expected_python_env_data, message
spiff_tasks_checked_once.append(spiff_task.task_spec.name)
all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks()
assert len(all_spiff_tasks) > 1
for spiff_task in all_spiff_tasks:
assert spiff_task.state == TaskState.COMPLETED
assert_spiff_task_is_in_process(
"test_process_to_call_subprocess_script", "test_process_to_call_subprocess"
)
assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess")
assert_spiff_task_is_in_process("top_level_script", "top_level_process")
assert_spiff_task_is_in_process(spiff_task)
if spiff_task.task_spec.name == "top_level_call_activity":
# the task id / guid of the call activity gets used as the guid of the bpmn process that it calls
@ -462,7 +578,22 @@ class TestProcessInstanceProcessor(BaseTest):
assert direct_parent_process is not None
assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call"
assert processor.get_data() == fifth_data_set
for task_bpmn_identifier in expected_task_data.keys():
message = (
f"Expected to have seen a task with a bpmn_identifier of {task_bpmn_identifier} but did not. "
f"Only saw {sorted(spiff_tasks_checked)}"
)
assert task_bpmn_identifier in spiff_tasks_checked, message
# TODO: add back in when removing MAYBE and LIKELY tasks
# task_models_that_are_predicted_count = (
# TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id)
# .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore
# .count()
# )
# assert task_models_that_are_predicted_count == 0
assert processor.get_data() == data_set_7
def test_does_not_recreate_human_tasks_on_multiple_saves(
self,
@ -569,14 +700,47 @@ class TestProcessInstanceProcessor(BaseTest):
assert len(process_instance.human_tasks) == 2
human_task_two = process_instance.active_human_tasks[0]
# this is just asserting the way the functionality currently works in spiff.
# we would actually expect this to change one day if we stop reusing the same guid
# when we re-do a task.
# assert human_task_two.task_id == human_task_one.task_id
# EDIT: when using feature/remove-loop-reset branch of SpiffWorkflow, these should be different.
assert human_task_two.task_id != human_task_one.task_id
def test_it_can_loopback_to_previous_bpmn_subprocess_with_gateway(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
initiator_user = self.find_or_create_user("initiator_user")
process_model = load_test_spec(
process_model_id="test_group/loopback_to_subprocess",
process_model_source_directory="loopback_to_subprocess",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert len(process_instance.active_human_tasks) == 1
assert len(process_instance.human_tasks) == 1
human_task_one = process_instance.active_human_tasks[0]
spiff_task = processor.get_task_by_guid(human_task_one.task_id)
ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_one)
processor = ProcessInstanceProcessor(process_instance)
assert len(process_instance.active_human_tasks) == 1
assert len(process_instance.human_tasks) == 2
human_task_two = process_instance.active_human_tasks[0]
spiff_task = processor.get_task_by_guid(human_task_two.task_id)
ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_two)
# ensure this does not raise a KeyError
processor = ProcessInstanceProcessor(process_instance)
assert len(process_instance.active_human_tasks) == 1
assert len(process_instance.human_tasks) == 3
human_task_three = process_instance.active_human_tasks[0]
spiff_task = processor.get_task_by_guid(human_task_three.task_id)
ProcessInstanceService.complete_form_task(processor, spiff_task, {}, initiator_user, human_task_three)
def test_task_data_is_set_even_if_process_instance_errors(
self,
app: Flask,

View File

@ -260,7 +260,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
return !taskToTimeTravelTo;
};
const completionViewLink = (label: any, taskGuid: string) => {
const queryParams = () => {
const processIdentifier = searchParams.get('process_identifier');
const callActivityTaskId = searchParams.get('bpmn_process_guid');
const queryParamArray = [];
@ -270,16 +270,19 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
if (callActivityTaskId) {
queryParamArray.push(`bpmn_process_guid=${callActivityTaskId}`);
}
let queryParams = '';
let queryParamString = '';
if (queryParamArray.length > 0) {
queryParams = `?${queryParamArray.join('&')}`;
queryParamString = `?${queryParamArray.join('&')}`;
}
return queryParamString;
};
const completionViewLink = (label: any, taskGuid: string) => {
return (
<Link
reloadDocument
data-qa="process-instance-step-link"
to={`${processInstanceShowPageBaseUrl}/${taskGuid}${queryParams}`}
to={`${processInstanceShowPageBaseUrl}/${taskGuid}${queryParams()}`}
>
{label}
</Link>
@ -287,7 +290,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
};
const returnToProcessInstance = () => {
window.location.href = processInstanceShowPageBaseUrl;
window.location.href = `${processInstanceShowPageBaseUrl}${queryParams()}`;
};
const resetProcessInstance = () => {