Merge pull request #202 from sartography/feature/fix_process_instance_rewind_two
Feature/fix process instance rewind two
This commit is contained in:
commit
90c1242032
|
@ -1875,7 +1875,7 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.2.1"
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
description = ""
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1890,7 +1890,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "62454c99c3a711c38f4249a3b5e7215d42037d72"
|
||||
resolved_reference = "e1add839ddf2512f27cd0afe681ff3e0460d6f7a"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
|
|
@ -669,6 +669,15 @@ def process_instance_task_list(
|
|||
task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids))
|
||||
|
||||
task_models = task_model_query.all()
|
||||
task_model_list = {}
|
||||
if most_recent_tasks_only:
|
||||
for task_model in task_models:
|
||||
bpmn_process_guid = task_model.bpmn_process_direct_parent_guid or "TOP"
|
||||
row_key = f"{bpmn_process_guid}:::{task_model.bpmn_identifier}"
|
||||
if row_key not in task_model_list:
|
||||
task_model_list[row_key] = task_model
|
||||
task_models = list(task_model_list.values())
|
||||
|
||||
if to_task_model is not None:
|
||||
task_models_dict = json.loads(current_app.json.dumps(task_models))
|
||||
for task_model in task_models_dict:
|
||||
|
@ -693,7 +702,7 @@ def process_instance_reset(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Reset a process instance to a particular step."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
ProcessInstanceProcessor.reset_process(process_instance, to_task_guid, commit=True)
|
||||
ProcessInstanceProcessor.reset_process(process_instance, to_task_guid)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Process_instance_processor."""
|
||||
import _strptime # type: ignore
|
||||
import copy
|
||||
import decimal
|
||||
import json
|
||||
import logging
|
||||
|
@ -50,6 +51,8 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign
|
|||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||
|
@ -741,6 +744,9 @@ class ProcessInstanceProcessor:
|
|||
spec, subprocesses
|
||||
)
|
||||
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only
|
||||
|
||||
# run _predict to ensure tasks are predicted to add back in LIKELY and MAYBE tasks
|
||||
bpmn_process_instance._predict()
|
||||
return (
|
||||
bpmn_process_instance,
|
||||
full_bpmn_process_dict,
|
||||
|
@ -1259,127 +1265,124 @@ class ProcessInstanceProcessor:
|
|||
# they never get picked up by spiff and processed. The process instance just stops after the to_task_guid
|
||||
# and marks itself complete without processing any of the children.
|
||||
@classmethod
|
||||
def reset_process(
|
||||
cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False
|
||||
) -> None:
|
||||
def reset_process(cls, process_instance: ProcessInstanceModel, to_task_guid: str) -> None:
|
||||
"""Reset a process to an earlier state."""
|
||||
raise Exception("This feature to reset a process instance to a given task is currently unavaiable")
|
||||
# cls.add_event_to_process_instance(
|
||||
# process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid
|
||||
# )
|
||||
#
|
||||
# to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first()
|
||||
# if to_task_model is None:
|
||||
# raise TaskNotFoundError(
|
||||
# f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
|
||||
# )
|
||||
#
|
||||
# parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes(
|
||||
# to_task_model
|
||||
# )
|
||||
# [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
|
||||
# [p.id for p in parent_bpmn_processes]
|
||||
# tasks_to_update_query = db.session.query(TaskModel).filter(
|
||||
# and_(
|
||||
# or_(
|
||||
# TaskModel.end_in_seconds > to_task_model.end_in_seconds,
|
||||
# TaskModel.end_in_seconds.is_(None), # type: ignore
|
||||
# ),
|
||||
# TaskModel.process_instance_id == process_instance.id,
|
||||
# # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore
|
||||
# )
|
||||
# )
|
||||
# tasks_to_update = tasks_to_update_query.all()
|
||||
#
|
||||
# # run all queries before making changes to task_model
|
||||
# if commit:
|
||||
# # tasks_to_delete_query = db.session.query(TaskModel).filter(
|
||||
# # and_(
|
||||
# # or_(
|
||||
# # TaskModel.end_in_seconds > to_task_model.end_in_seconds,
|
||||
# # TaskModel.end_in_seconds.is_not(None), # type: ignore
|
||||
# # ),
|
||||
# # TaskModel.process_instance_id == process_instance.id,
|
||||
# # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore
|
||||
# # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore
|
||||
# # )
|
||||
# # )
|
||||
# #
|
||||
# # tasks_to_delete = tasks_to_delete_query.all()
|
||||
# #
|
||||
# # # delete any later tasks from to_task_model and delete bpmn processes that may be
|
||||
# # # link directly to one of those tasks.
|
||||
# # tasks_to_delete_guids = [t.guid for t in tasks_to_delete]
|
||||
# # tasks_to_delete_ids = [t.id for t in tasks_to_delete]
|
||||
# # bpmn_processes_to_delete = BpmnProcessModel.query.filter(
|
||||
# # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore
|
||||
# # ).order_by(BpmnProcessModel.id.desc()).all()
|
||||
# # human_tasks_to_delete = HumanTaskModel.query.filter(
|
||||
# # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore
|
||||
# # ).all()
|
||||
# #
|
||||
# #
|
||||
# # import pdb; pdb.set_trace()
|
||||
# # # ensure the correct order for foreign keys
|
||||
# # for human_task_to_delete in human_tasks_to_delete:
|
||||
# # db.session.delete(human_task_to_delete)
|
||||
# # db.session.commit()
|
||||
# # for task_to_delete in tasks_to_delete:
|
||||
# # db.session.delete(task_to_delete)
|
||||
# # db.session.commit()
|
||||
# # for bpmn_process_to_delete in bpmn_processes_to_delete:
|
||||
# # db.session.delete(bpmn_process_to_delete)
|
||||
# # db.session.commit()
|
||||
#
|
||||
# related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first()
|
||||
# if related_human_task is not None:
|
||||
# db.session.delete(related_human_task)
|
||||
#
|
||||
# tasks_to_update_ids = [t.id for t in tasks_to_update]
|
||||
# human_tasks_to_delete = HumanTaskModel.query.filter(
|
||||
# HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore
|
||||
# ).all()
|
||||
# for human_task_to_delete in human_tasks_to_delete:
|
||||
# db.session.delete(human_task_to_delete)
|
||||
# db.session.commit()
|
||||
#
|
||||
# for task_to_update in tasks_to_update:
|
||||
# # print(f"task_to_update: {task_to_update}")
|
||||
# print(f"task_to_update.state: {task_to_update.state}")
|
||||
# TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit)
|
||||
# # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit)
|
||||
# # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate':
|
||||
# # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit)
|
||||
# # else:
|
||||
# # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit)
|
||||
#
|
||||
# parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first()
|
||||
# if parent_task_model is None:
|
||||
# raise TaskNotFoundError(
|
||||
# f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
|
||||
# )
|
||||
#
|
||||
# TaskService.reset_task_model(
|
||||
# to_task_model,
|
||||
# state="READY",
|
||||
# json_data_hash=parent_task_model.json_data_hash,
|
||||
# python_env_data_hash=parent_task_model.python_env_data_hash,
|
||||
# commit=commit,
|
||||
# )
|
||||
# for task_model in task_models_of_parent_bpmn_processes:
|
||||
# TaskService.reset_task_model(task_model, state="WAITING", commit=commit)
|
||||
#
|
||||
# bpmn_process = to_task_model.bpmn_process
|
||||
# properties_json = copy.copy(bpmn_process.properties_json)
|
||||
# properties_json["last_task"] = parent_task_model.guid
|
||||
# bpmn_process.properties_json = properties_json
|
||||
# db.session.add(bpmn_process)
|
||||
# db.session.commit()
|
||||
#
|
||||
# if commit:
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.save()
|
||||
# processor.suspend()
|
||||
# raise Exception("This feature to reset a process instance to a given task is currently unavaiable")
|
||||
cls.add_event_to_process_instance(
|
||||
process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid
|
||||
)
|
||||
|
||||
to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first()
|
||||
if to_task_model is None:
|
||||
raise TaskNotFoundError(
|
||||
f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
|
||||
)
|
||||
|
||||
# NOTE: run ALL queries before making changes to ensure we get everything before anything changes
|
||||
parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes(
|
||||
to_task_model
|
||||
)
|
||||
task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
|
||||
parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes]
|
||||
|
||||
tasks_to_update_query = db.session.query(TaskModel).filter(
|
||||
and_(
|
||||
or_(
|
||||
TaskModel.end_in_seconds > to_task_model.end_in_seconds,
|
||||
TaskModel.end_in_seconds.is_(None), # type: ignore
|
||||
),
|
||||
TaskModel.process_instance_id == process_instance.id,
|
||||
TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore
|
||||
)
|
||||
)
|
||||
tasks_to_update = tasks_to_update_query.all()
|
||||
tasks_to_update_guids = [t.guid for t in tasks_to_update]
|
||||
|
||||
tasks_to_delete_query = db.session.query(TaskModel).filter(
|
||||
and_(
|
||||
or_(
|
||||
TaskModel.end_in_seconds > to_task_model.end_in_seconds,
|
||||
TaskModel.end_in_seconds.is_not(None), # type: ignore
|
||||
),
|
||||
TaskModel.process_instance_id == process_instance.id,
|
||||
TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore
|
||||
TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore
|
||||
)
|
||||
)
|
||||
tasks_to_delete = tasks_to_delete_query.all()
|
||||
tasks_to_delete_guids = [t.guid for t in tasks_to_delete]
|
||||
tasks_to_delete_ids = [t.id for t in tasks_to_delete]
|
||||
|
||||
# delete bpmn processes that are also tasks that we either deleted or will update.
|
||||
# this is to force spiff to recreate those bpmn processes with the correct associated task guids.
|
||||
bpmn_processes_to_delete_query = db.session.query(BpmnProcessModel).filter(
|
||||
or_(
|
||||
BpmnProcessModel.guid.in_(tasks_to_delete_guids), # type: ignore
|
||||
and_(
|
||||
BpmnProcessModel.guid.in_(tasks_to_update_guids), # type: ignore
|
||||
BpmnProcessModel.id.not_in(parent_bpmn_processes_ids), # type: ignore
|
||||
),
|
||||
)
|
||||
)
|
||||
bpmn_processes_to_delete = bpmn_processes_to_delete_query.order_by(
|
||||
BpmnProcessModel.id.desc() # type: ignore
|
||||
).all()
|
||||
|
||||
# delete any human task that was for a task that we deleted since they will get recreated later.
|
||||
human_tasks_to_delete = HumanTaskModel.query.filter(
|
||||
HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore
|
||||
).all()
|
||||
|
||||
# ensure the correct order for foreign keys
|
||||
for human_task_to_delete in human_tasks_to_delete:
|
||||
db.session.delete(human_task_to_delete)
|
||||
for task_to_delete in tasks_to_delete:
|
||||
db.session.delete(task_to_delete)
|
||||
for bpmn_process_to_delete in bpmn_processes_to_delete:
|
||||
db.session.delete(bpmn_process_to_delete)
|
||||
|
||||
related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first()
|
||||
if related_human_task is not None:
|
||||
db.session.delete(related_human_task)
|
||||
|
||||
tasks_to_update_ids = [t.id for t in tasks_to_update]
|
||||
human_tasks_to_delete = HumanTaskModel.query.filter(
|
||||
HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore
|
||||
).all()
|
||||
for human_task_to_delete in human_tasks_to_delete:
|
||||
db.session.delete(human_task_to_delete)
|
||||
|
||||
for task_to_update in tasks_to_update:
|
||||
TaskService.reset_task_model(task_to_update, state="FUTURE")
|
||||
db.session.bulk_save_objects(tasks_to_update)
|
||||
|
||||
parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first()
|
||||
if parent_task_model is None:
|
||||
raise TaskNotFoundError(
|
||||
f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
|
||||
)
|
||||
|
||||
TaskService.reset_task_model(
|
||||
to_task_model,
|
||||
state="READY",
|
||||
json_data_hash=parent_task_model.json_data_hash,
|
||||
python_env_data_hash=parent_task_model.python_env_data_hash,
|
||||
)
|
||||
db.session.add(to_task_model)
|
||||
for task_model in task_models_of_parent_bpmn_processes:
|
||||
TaskService.reset_task_model(task_model, state="WAITING")
|
||||
db.session.bulk_save_objects(task_models_of_parent_bpmn_processes)
|
||||
|
||||
bpmn_process = to_task_model.bpmn_process
|
||||
properties_json = copy.copy(bpmn_process.properties_json)
|
||||
properties_json["last_task"] = parent_task_model.guid
|
||||
bpmn_process.properties_json = properties_json
|
||||
db.session.add(bpmn_process)
|
||||
db.session.commit()
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.save()
|
||||
processor.suspend()
|
||||
|
||||
@staticmethod
|
||||
def get_parser() -> MyCustomParser:
|
||||
|
|
|
@ -68,9 +68,9 @@ class TaskService:
|
|||
spiff_task: SpiffTask,
|
||||
) -> None:
|
||||
for child_spiff_task in spiff_task.children:
|
||||
# if child_spiff_task._has_state(TaskState.PREDICTED_MASK):
|
||||
# self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models)
|
||||
# continue
|
||||
if child_spiff_task._has_state(TaskState.PREDICTED_MASK):
|
||||
self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models)
|
||||
continue
|
||||
self.update_task_model_with_spiff_task(
|
||||
spiff_task=child_spiff_task,
|
||||
)
|
||||
|
@ -157,7 +157,7 @@ class TaskService:
|
|||
bpmn_process: BpmnProcessModel,
|
||||
) -> None:
|
||||
new_properties_json = copy.copy(bpmn_process.properties_json)
|
||||
new_properties_json["last_task"] = str(spiff_workflow.last_task) if spiff_workflow.last_task else None
|
||||
new_properties_json["last_task"] = str(spiff_workflow.last_task.id) if spiff_workflow.last_task else None
|
||||
new_properties_json["success"] = spiff_workflow.success
|
||||
bpmn_process.properties_json = new_properties_json
|
||||
|
||||
|
@ -403,9 +403,9 @@ class TaskService:
|
|||
# we are going to avoid saving likely and maybe tasks to the db.
|
||||
# that means we need to remove them from their parents' lists of children as well.
|
||||
spiff_task = spiff_workflow.get_task_from_id(UUID(task_id))
|
||||
# if spiff_task._has_state(TaskState.PREDICTED_MASK):
|
||||
# cls.remove_spiff_task_from_parent(spiff_task, new_task_models)
|
||||
# continue
|
||||
if spiff_task._has_state(TaskState.PREDICTED_MASK):
|
||||
cls.remove_spiff_task_from_parent(spiff_task, new_task_models)
|
||||
continue
|
||||
|
||||
task_model = TaskModel.query.filter_by(guid=task_id).first()
|
||||
if task_model is None:
|
||||
|
@ -504,7 +504,6 @@ class TaskService:
|
|||
cls,
|
||||
task_model: TaskModel,
|
||||
state: str,
|
||||
commit: Optional[bool] = True,
|
||||
json_data_hash: Optional[str] = None,
|
||||
python_env_data_hash: Optional[str] = None,
|
||||
) -> None:
|
||||
|
@ -517,24 +516,14 @@ class TaskService:
|
|||
else:
|
||||
task_model.python_env_data_hash = python_env_data_hash
|
||||
|
||||
new_properties_json = copy.copy(task_model.properties_json)
|
||||
task_model.state = state
|
||||
task_model.start_in_seconds = None
|
||||
task_model.end_in_seconds = None
|
||||
|
||||
if commit:
|
||||
db.session.add(task_model)
|
||||
db.session.commit()
|
||||
|
||||
new_properties_json = copy.copy(task_model.properties_json)
|
||||
new_properties_json["state"] = getattr(TaskState, state)
|
||||
task_model.properties_json = new_properties_json
|
||||
|
||||
if commit:
|
||||
# if we commit the properties json at the same time as the other items
|
||||
# the json gets reset for some reason.
|
||||
db.session.add(task_model)
|
||||
db.session.commit()
|
||||
|
||||
@classmethod
|
||||
def _create_task(
|
||||
cls,
|
||||
|
|
|
@ -12,10 +12,12 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
|||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.authorization_service import (
|
||||
|
@ -254,128 +256,128 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
assert spiff_task is not None
|
||||
assert spiff_task.state == TaskState.COMPLETED
|
||||
|
||||
# def test_properly_resets_process_to_given_task(
|
||||
# self,
|
||||
# app: Flask,
|
||||
# client: FlaskClient,
|
||||
# with_db_and_bpmn_file_cleanup: None,
|
||||
# with_super_admin_user: UserModel,
|
||||
# ) -> None:
|
||||
# self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
|
||||
# initiator_user = self.find_or_create_user("initiator_user")
|
||||
# finance_user_three = self.find_or_create_user("testuser3")
|
||||
# assert initiator_user.principal is not None
|
||||
# assert finance_user_three.principal is not None
|
||||
# AuthorizationService.import_permissions_from_yaml_file()
|
||||
#
|
||||
# finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
|
||||
# assert finance_group is not None
|
||||
#
|
||||
# process_model = load_test_spec(
|
||||
# process_model_id="test_group/manual_task",
|
||||
# process_model_source_directory="manual_task",
|
||||
# )
|
||||
# process_instance = self.create_process_instance_from_process_model(
|
||||
# process_model=process_model, user=initiator_user
|
||||
# )
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.do_engine_steps(save=True)
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# initial_human_task_id = process_instance.active_human_tasks[0].id
|
||||
#
|
||||
# # save again to ensure we go attempt to process the human tasks again
|
||||
# processor.save()
|
||||
#
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
#
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||
# human_task_one.task_name, processor.bpmn_process_instance
|
||||
# )
|
||||
# assert spiff_manual_task is not None
|
||||
#
|
||||
# processor.suspend()
|
||||
# ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True)
|
||||
#
|
||||
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.resume()
|
||||
# processor.do_engine_steps(save=True)
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
# assert process_instance.status == "complete"
|
||||
#
|
||||
# def test_properly_resets_process_to_given_task_with_call_activity(
|
||||
# self,
|
||||
# app: Flask,
|
||||
# client: FlaskClient,
|
||||
# with_db_and_bpmn_file_cleanup: None,
|
||||
# with_super_admin_user: UserModel,
|
||||
# ) -> None:
|
||||
# self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
|
||||
# initiator_user = self.find_or_create_user("initiator_user")
|
||||
# finance_user_three = self.find_or_create_user("testuser3")
|
||||
# assert initiator_user.principal is not None
|
||||
# assert finance_user_three.principal is not None
|
||||
# AuthorizationService.import_permissions_from_yaml_file()
|
||||
#
|
||||
# finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
|
||||
# assert finance_group is not None
|
||||
#
|
||||
# process_model = load_test_spec(
|
||||
# process_model_id="test_group/manual_task_with_subprocesses",
|
||||
# process_model_source_directory="manual_task_with_subprocesses",
|
||||
# )
|
||||
# process_instance = self.create_process_instance_from_process_model(
|
||||
# process_model=process_model, user=initiator_user
|
||||
# )
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.do_engine_steps(save=True)
|
||||
# # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2))
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# initial_human_task_id = process_instance.active_human_tasks[0].id
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
#
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
#
|
||||
# # NOTES:
|
||||
# # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task
|
||||
# # is not marked READY but instead stays as FUTURE. Running things like:
|
||||
# # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task)
|
||||
# # and
|
||||
# # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK)
|
||||
# # did not help.
|
||||
#
|
||||
# processor.suspend()
|
||||
# task_model_to_reset_to = (
|
||||
# TaskModel.query.join(TaskDefinitionModel)
|
||||
# .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script")
|
||||
# .order_by(TaskModel.id.desc()) # type: ignore
|
||||
# .first()
|
||||
# )
|
||||
# assert task_model_to_reset_to is not None
|
||||
# ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True)
|
||||
#
|
||||
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
|
||||
# processor = ProcessInstanceProcessor(process_instance)
|
||||
# processor.resume()
|
||||
# processor.do_engine_steps(save=True)
|
||||
#
|
||||
# assert len(process_instance.active_human_tasks) == 1
|
||||
# human_task_one = process_instance.active_human_tasks[0]
|
||||
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
#
|
||||
# assert process_instance.status == "complete"
|
||||
def test_properly_resets_process_to_given_task(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
|
||||
initiator_user = self.find_or_create_user("initiator_user")
|
||||
finance_user_three = self.find_or_create_user("testuser3")
|
||||
assert initiator_user.principal is not None
|
||||
assert finance_user_three.principal is not None
|
||||
AuthorizationService.import_permissions_from_yaml_file()
|
||||
|
||||
finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
|
||||
assert finance_group is not None
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id="test_group/manual_task",
|
||||
process_model_source_directory="manual_task",
|
||||
)
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=initiator_user
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
initial_human_task_id = process_instance.active_human_tasks[0].id
|
||||
|
||||
# save again to ensure we go attempt to process the human tasks again
|
||||
processor.save()
|
||||
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||
human_task_one.task_name, processor.bpmn_process_instance
|
||||
)
|
||||
assert spiff_manual_task is not None
|
||||
|
||||
processor.suspend()
|
||||
ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id))
|
||||
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.resume()
|
||||
processor.do_engine_steps(save=True)
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
assert process_instance.status == "complete"
|
||||
|
||||
def test_properly_resets_process_to_given_task_with_call_activity(
|
||||
self,
|
||||
app: Flask,
|
||||
client: FlaskClient,
|
||||
with_db_and_bpmn_file_cleanup: None,
|
||||
with_super_admin_user: UserModel,
|
||||
) -> None:
|
||||
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
|
||||
initiator_user = self.find_or_create_user("initiator_user")
|
||||
finance_user_three = self.find_or_create_user("testuser3")
|
||||
assert initiator_user.principal is not None
|
||||
assert finance_user_three.principal is not None
|
||||
AuthorizationService.import_permissions_from_yaml_file()
|
||||
|
||||
finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
|
||||
assert finance_group is not None
|
||||
|
||||
process_model = load_test_spec(
|
||||
process_model_id="test_group/manual_task_with_subprocesses",
|
||||
process_model_source_directory="manual_task_with_subprocesses",
|
||||
)
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model=process_model, user=initiator_user
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
initial_human_task_id = process_instance.active_human_tasks[0].id
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
assert initial_human_task_id == process_instance.active_human_tasks[0].id
|
||||
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
|
||||
processor.suspend()
|
||||
task_model_to_reset_to = (
|
||||
TaskModel.query.join(TaskDefinitionModel)
|
||||
.filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script")
|
||||
.order_by(TaskModel.id.desc()) # type: ignore
|
||||
.first()
|
||||
)
|
||||
assert task_model_to_reset_to is not None
|
||||
ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid)
|
||||
|
||||
# make sure sqlalchemy session matches current db state
|
||||
db.session.expire_all()
|
||||
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
# make sure we reset to the task we expected
|
||||
ready_or_waiting_tasks = processor.get_all_ready_or_waiting_tasks()
|
||||
top_level_subprocess_script_spiff_task = next(
|
||||
task for task in ready_or_waiting_tasks if task.task_spec.name == "top_level_subprocess_script"
|
||||
)
|
||||
assert top_level_subprocess_script_spiff_task is not None
|
||||
processor.resume()
|
||||
processor.do_engine_steps(save=True)
|
||||
|
||||
assert len(process_instance.active_human_tasks) == 1
|
||||
human_task_one = process_instance.active_human_tasks[0]
|
||||
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
|
||||
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
|
||||
|
||||
assert process_instance.status == "complete"
|
||||
|
||||
def test_properly_saves_tasks_when_running(
|
||||
self,
|
||||
|
@ -510,18 +512,17 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
f" {expected_task_data_key}."
|
||||
)
|
||||
|
||||
# TODO: add back in when removing MAYBE and LIKELY tasks
|
||||
# count_failure_message = (
|
||||
# f"{base_failure_message} There are more than 2 entries of this task in the db."
|
||||
# " There should only ever be max 2."
|
||||
# )
|
||||
# task_models_with_bpmn_identifier_count = (
|
||||
# TaskModel.query.join(TaskDefinitionModel)
|
||||
# .filter(TaskModel.process_instance_id == process_instance_relookup.id)
|
||||
# .filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name)
|
||||
# .count()
|
||||
# )
|
||||
# assert task_models_with_bpmn_identifier_count < 3, count_failure_message
|
||||
count_failure_message = (
|
||||
f"{base_failure_message} There are more than 2 entries of this task in the db."
|
||||
" There should only ever be max 2."
|
||||
)
|
||||
task_models_with_bpmn_identifier_count = (
|
||||
TaskModel.query.join(TaskDefinitionModel)
|
||||
.filter(TaskModel.process_instance_id == process_instance_relookup.id)
|
||||
.filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name)
|
||||
.count()
|
||||
)
|
||||
assert task_models_with_bpmn_identifier_count < 3, count_failure_message
|
||||
task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
|
||||
|
||||
assert task_model.start_in_seconds is not None
|
||||
|
@ -582,13 +583,12 @@ class TestProcessInstanceProcessor(BaseTest):
|
|||
)
|
||||
assert task_bpmn_identifier in spiff_tasks_checked, message
|
||||
|
||||
# TODO: add back in when removing MAYBE and LIKELY tasks
|
||||
# task_models_that_are_predicted_count = (
|
||||
# TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id)
|
||||
# .filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore
|
||||
# .count()
|
||||
# )
|
||||
# assert task_models_that_are_predicted_count == 0
|
||||
task_models_that_are_predicted_count = (
|
||||
TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id)
|
||||
.filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore
|
||||
.count()
|
||||
)
|
||||
assert task_models_that_are_predicted_count == 0
|
||||
|
||||
assert processor.get_data() == data_set_7
|
||||
|
||||
|
|
|
@ -236,8 +236,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
tasks.forEach(function getUserTasksElement(task: Task) {
|
||||
if (task.state === 'COMPLETED') {
|
||||
taskIds.completed.push(task);
|
||||
}
|
||||
if (task.state === 'READY' || task.state === 'WAITING') {
|
||||
} else if (task.state === 'READY' || task.state === 'WAITING') {
|
||||
taskIds.readyOrWaiting.push(task);
|
||||
}
|
||||
return null;
|
||||
|
@ -674,16 +673,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
|||
);
|
||||
};
|
||||
|
||||
const canResetProcess = (_task: Task) => {
|
||||
// disabling this feature for now
|
||||
return false;
|
||||
// return (
|
||||
// ability.can('POST', targetUris.processInstanceResetPath) &&
|
||||
// processInstance &&
|
||||
// processInstance.status === 'suspended' &&
|
||||
// task.state === 'READY' &&
|
||||
// !showingActiveTask()
|
||||
// );
|
||||
const canResetProcess = (task: Task) => {
|
||||
return (
|
||||
ability.can('POST', targetUris.processInstanceResetPath) &&
|
||||
processInstance &&
|
||||
processInstance.status === 'suspended' &&
|
||||
task.state === 'READY' &&
|
||||
!showingActiveTask()
|
||||
);
|
||||
};
|
||||
|
||||
const getEvents = (task: Task) => {
|
||||
|
|
Loading…
Reference in New Issue