rewind test passed w/ burnettk

This commit is contained in:
jasquat 2023-03-31 15:14:25 -04:00
parent 099973c701
commit 343aae0628
5 changed files with 269 additions and 250 deletions

View File

@ -1889,8 +1889,8 @@ lxml = "*"
[package.source] [package.source]
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "bugfix/execute-event-gateways-on-ready"
resolved_reference = "62454c99c3a711c38f4249a3b5e7215d42037d72" resolved_reference = "a1795209b415037630a44522fc7cc9d6e70e50d6"
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"
@ -2273,7 +2273,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = ">=3.9,<3.12" python-versions = ">=3.9,<3.12"
content-hash = "9fea44386fbab29102a051a254058909568c4ee3dbd6a402fb91aacbcf1f7fd2" content-hash = "2dc5b510dcd40c461934921401b09ce6cf9f49ddb440192e819556fbbc6cdbfc"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [

View File

@ -27,7 +27,7 @@ flask-marshmallow = "*"
flask-migrate = "*" flask-migrate = "*"
flask-restful = "*" flask-restful = "*"
werkzeug = "*" werkzeug = "*"
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "bugfix/execute-event-gateways-on-ready"}
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"} # SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
sentry-sdk = "^1.10" sentry-sdk = "^1.10"

View File

@ -1,5 +1,8 @@
"""Process_instance_processor.""" """Process_instance_processor."""
import _strptime # type: ignore import _strptime # type: ignore
import copy
from sqlalchemy import or_
from sqlalchemy import and_
import decimal import decimal
import json import json
import logging import logging
@ -741,6 +744,9 @@ class ProcessInstanceProcessor:
spec, subprocesses spec, subprocesses
) )
bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only bpmn_process_instance.data[ProcessInstanceProcessor.VALIDATION_PROCESS_KEY] = validate_only
# run _predict to ensure tasks are predicted to add back in LIKELY and MAYBE tasks
bpmn_process_instance._predict()
return ( return (
bpmn_process_instance, bpmn_process_instance,
full_bpmn_process_dict, full_bpmn_process_dict,
@ -1263,123 +1269,129 @@ class ProcessInstanceProcessor:
cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False
) -> None: ) -> None:
"""Reset a process to an earlier state.""" """Reset a process to an earlier state."""
raise Exception("This feature to reset a process instance to a given task is currently unavaiable") # raise Exception("This feature to reset a process instance to a given task is currently unavaiable")
# cls.add_event_to_process_instance( cls.add_event_to_process_instance(
# process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid
# ) )
#
# to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first()
# if to_task_model is None: if to_task_model is None:
# raise TaskNotFoundError( raise TaskNotFoundError(
# f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
# ) )
#
# parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes(
# to_task_model to_task_model
# ) )
# [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
# [p.id for p in parent_bpmn_processes] parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes]
# tasks_to_update_query = db.session.query(TaskModel).filter( tasks_to_update_query = db.session.query(TaskModel).filter(
# and_( and_(
# or_( or_(
# TaskModel.end_in_seconds > to_task_model.end_in_seconds, TaskModel.end_in_seconds > to_task_model.end_in_seconds,
# TaskModel.end_in_seconds.is_(None), # type: ignore TaskModel.end_in_seconds.is_(None), # type: ignore
# ), ),
# TaskModel.process_instance_id == process_instance.id, TaskModel.process_instance_id == process_instance.id,
# # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore
# ) )
# ) )
# tasks_to_update = tasks_to_update_query.all() tasks_to_update = tasks_to_update_query.all()
# tasks_to_update_guids = [t.guid for t in tasks_to_update]
# # run all queries before making changes to task_model bpmn_processes_to_update_query = db.session.query(BpmnProcessModel).filter(
# if commit: and_(
# # tasks_to_delete_query = db.session.query(TaskModel).filter( BpmnProcessModel.guid.in_(tasks_to_update_guids), # type: ignore
# # and_( BpmnProcessModel.id.not_in(parent_bpmn_processes_ids), # type: ignore
# # or_( )
# # TaskModel.end_in_seconds > to_task_model.end_in_seconds, )
# # TaskModel.end_in_seconds.is_not(None), # type: ignore bpmn_processes_to_update = bpmn_processes_to_update_query.all()
# # ),
# # TaskModel.process_instance_id == process_instance.id, # run all queries before making changes to task_model
# # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore if commit:
# # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore tasks_to_delete_query = db.session.query(TaskModel).filter(
# # ) and_(
# # ) or_(
# # TaskModel.end_in_seconds > to_task_model.end_in_seconds,
# # tasks_to_delete = tasks_to_delete_query.all() TaskModel.end_in_seconds.is_not(None), # type: ignore
# # ),
# # # delete any later tasks from to_task_model and delete bpmn processes that may be TaskModel.process_instance_id == process_instance.id,
# # # link directly to one of those tasks. TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore
# # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore
# # tasks_to_delete_ids = [t.id for t in tasks_to_delete] )
# # bpmn_processes_to_delete = BpmnProcessModel.query.filter( )
# # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore
# # ).order_by(BpmnProcessModel.id.desc()).all() tasks_to_delete = tasks_to_delete_query.all()
# # human_tasks_to_delete = HumanTaskModel.query.filter(
# # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore # delete any later tasks from to_task_model and delete bpmn processes that may be
# # ).all() # link directly to one of those tasks.
# # tasks_to_delete_guids = [t.guid for t in tasks_to_delete]
# # tasks_to_delete_ids = [t.id for t in tasks_to_delete]
# # import pdb; pdb.set_trace() bpmn_processes_to_delete = BpmnProcessModel.query.filter(
# # # ensure the correct order for foreign keys BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore
# # for human_task_to_delete in human_tasks_to_delete: ).order_by(BpmnProcessModel.id.desc()).all()
# # db.session.delete(human_task_to_delete) human_tasks_to_delete = HumanTaskModel.query.filter(
# # db.session.commit() HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore
# # for task_to_delete in tasks_to_delete: ).all()
# # db.session.delete(task_to_delete)
# # db.session.commit()
# # for bpmn_process_to_delete in bpmn_processes_to_delete: # import pdb; pdb.set_trace()
# # db.session.delete(bpmn_process_to_delete) # ensure the correct order for foreign keys
# # db.session.commit() for human_task_to_delete in human_tasks_to_delete:
# db.session.delete(human_task_to_delete)
# related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() db.session.commit()
# if related_human_task is not None: for task_to_delete in tasks_to_delete:
# db.session.delete(related_human_task) db.session.delete(task_to_delete)
# db.session.commit()
# tasks_to_update_ids = [t.id for t in tasks_to_update] for bpmn_process_to_delete in bpmn_processes_to_delete:
# human_tasks_to_delete = HumanTaskModel.query.filter( db.session.delete(bpmn_process_to_delete)
# HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore db.session.commit()
# ).all()
# for human_task_to_delete in human_tasks_to_delete: related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first()
# db.session.delete(human_task_to_delete) if related_human_task is not None:
# db.session.commit() db.session.delete(related_human_task)
#
# for task_to_update in tasks_to_update: tasks_to_update_ids = [t.id for t in tasks_to_update]
# # print(f"task_to_update: {task_to_update}") human_tasks_to_delete = HumanTaskModel.query.filter(
# print(f"task_to_update.state: {task_to_update.state}") HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore
# TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) ).all()
# # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) for human_task_to_delete in human_tasks_to_delete:
# # if task_to_update.task_definition.bpmn_identifier != 'top_level_process_script_after_gate': db.session.delete(human_task_to_delete)
# # TaskService.reset_task_model(task_to_update, state='FUTURE', commit=commit) db.session.commit()
# # else:
# # TaskService.reset_task_model(task_to_update, state=task_to_update.state, commit=commit) for task_to_update in tasks_to_update:
# print(f"task_to_update.state: {task_to_update.state}")
# parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit)
# if parent_task_model is None:
# raise TaskNotFoundError( for bpmn_process_to_update in bpmn_processes_to_update:
# f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" db.session.delete(bpmn_process_to_update)
# )
# parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first()
# TaskService.reset_task_model( if parent_task_model is None:
# to_task_model, raise TaskNotFoundError(
# state="READY", f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'"
# json_data_hash=parent_task_model.json_data_hash, )
# python_env_data_hash=parent_task_model.python_env_data_hash,
# commit=commit, TaskService.reset_task_model(
# ) to_task_model,
# for task_model in task_models_of_parent_bpmn_processes: state="READY",
# TaskService.reset_task_model(task_model, state="WAITING", commit=commit) json_data_hash=parent_task_model.json_data_hash,
# python_env_data_hash=parent_task_model.python_env_data_hash,
# bpmn_process = to_task_model.bpmn_process commit=commit,
# properties_json = copy.copy(bpmn_process.properties_json) )
# properties_json["last_task"] = parent_task_model.guid for task_model in task_models_of_parent_bpmn_processes:
# bpmn_process.properties_json = properties_json TaskService.reset_task_model(task_model, state="WAITING", commit=commit)
# db.session.add(bpmn_process)
# db.session.commit() bpmn_process = to_task_model.bpmn_process
# properties_json = copy.copy(bpmn_process.properties_json)
# if commit: properties_json["last_task"] = parent_task_model.guid
# processor = ProcessInstanceProcessor(process_instance) bpmn_process.properties_json = properties_json
# processor.save() db.session.add(bpmn_process)
# processor.suspend() db.session.commit()
import pdb; pdb.set_trace()
if commit:
processor = ProcessInstanceProcessor(process_instance)
processor.save()
processor.suspend()
@staticmethod @staticmethod
def get_parser() -> MyCustomParser: def get_parser() -> MyCustomParser:

View File

@ -68,9 +68,9 @@ class TaskService:
spiff_task: SpiffTask, spiff_task: SpiffTask,
) -> None: ) -> None:
for child_spiff_task in spiff_task.children: for child_spiff_task in spiff_task.children:
# if child_spiff_task._has_state(TaskState.PREDICTED_MASK): if child_spiff_task._has_state(TaskState.PREDICTED_MASK):
# self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models) self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models)
# continue continue
self.update_task_model_with_spiff_task( self.update_task_model_with_spiff_task(
spiff_task=child_spiff_task, spiff_task=child_spiff_task,
) )
@ -157,7 +157,7 @@ class TaskService:
bpmn_process: BpmnProcessModel, bpmn_process: BpmnProcessModel,
) -> None: ) -> None:
new_properties_json = copy.copy(bpmn_process.properties_json) new_properties_json = copy.copy(bpmn_process.properties_json)
new_properties_json["last_task"] = str(spiff_workflow.last_task) if spiff_workflow.last_task else None new_properties_json["last_task"] = str(spiff_workflow.last_task.id) if spiff_workflow.last_task else None
new_properties_json["success"] = spiff_workflow.success new_properties_json["success"] = spiff_workflow.success
bpmn_process.properties_json = new_properties_json bpmn_process.properties_json = new_properties_json
@ -403,9 +403,9 @@ class TaskService:
# we are going to avoid saving likely and maybe tasks to the db. # we are going to avoid saving likely and maybe tasks to the db.
# that means we need to remove them from their parents' lists of children as well. # that means we need to remove them from their parents' lists of children as well.
spiff_task = spiff_workflow.get_task_from_id(UUID(task_id)) spiff_task = spiff_workflow.get_task_from_id(UUID(task_id))
# if spiff_task._has_state(TaskState.PREDICTED_MASK): if spiff_task._has_state(TaskState.PREDICTED_MASK):
# cls.remove_spiff_task_from_parent(spiff_task, new_task_models) cls.remove_spiff_task_from_parent(spiff_task, new_task_models)
# continue continue
task_model = TaskModel.query.filter_by(guid=task_id).first() task_model = TaskModel.query.filter_by(guid=task_id).first()
if task_model is None: if task_model is None:

View File

@ -1,5 +1,6 @@
"""Test_process_instance_processor.""" """Test_process_instance_processor."""
from uuid import UUID from uuid import UUID
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
import pytest import pytest
from flask import g from flask import g
@ -254,128 +255,134 @@ class TestProcessInstanceProcessor(BaseTest):
assert spiff_task is not None assert spiff_task is not None
assert spiff_task.state == TaskState.COMPLETED assert spiff_task.state == TaskState.COMPLETED
# def test_properly_resets_process_to_given_task( def test_properly_resets_process_to_given_task(
# self, self,
# app: Flask, app: Flask,
# client: FlaskClient, client: FlaskClient,
# with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
# with_super_admin_user: UserModel, with_super_admin_user: UserModel,
# ) -> None: ) -> None:
# self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
# initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
# finance_user_three = self.find_or_create_user("testuser3") finance_user_three = self.find_or_create_user("testuser3")
# assert initiator_user.principal is not None assert initiator_user.principal is not None
# assert finance_user_three.principal is not None assert finance_user_three.principal is not None
# AuthorizationService.import_permissions_from_yaml_file() AuthorizationService.import_permissions_from_yaml_file()
#
# finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
# assert finance_group is not None assert finance_group is not None
#
# process_model = load_test_spec( process_model = load_test_spec(
# process_model_id="test_group/manual_task", process_model_id="test_group/manual_task",
# process_model_source_directory="manual_task", process_model_source_directory="manual_task",
# ) )
# process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
# process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user
# ) )
# processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
# processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)
# assert len(process_instance.active_human_tasks) == 1 assert len(process_instance.active_human_tasks) == 1
# initial_human_task_id = process_instance.active_human_tasks[0].id initial_human_task_id = process_instance.active_human_tasks[0].id
#
# # save again to ensure we go attempt to process the human tasks again # save again to ensure we go attempt to process the human tasks again
# processor.save() processor.save()
#
# assert len(process_instance.active_human_tasks) == 1 assert len(process_instance.active_human_tasks) == 1
# assert initial_human_task_id == process_instance.active_human_tasks[0].id assert initial_human_task_id == process_instance.active_human_tasks[0].id
#
# processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
# human_task_one = process_instance.active_human_tasks[0] human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier(
# human_task_one.task_name, processor.bpmn_process_instance human_task_one.task_name, processor.bpmn_process_instance
# ) )
# assert spiff_manual_task is not None assert spiff_manual_task is not None
#
# processor.suspend() processor.suspend()
# ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True) ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.parent.id), commit=True)
#
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
# processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
# processor.resume() processor.resume()
# processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)
# human_task_one = process_instance.active_human_tasks[0] human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# assert process_instance.status == "complete" assert process_instance.status == "complete"
#
# def test_properly_resets_process_to_given_task_with_call_activity( def test_properly_resets_process_to_given_task_with_call_activity(
# self, self,
# app: Flask, app: Flask,
# client: FlaskClient, client: FlaskClient,
# with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
# with_super_admin_user: UserModel, with_super_admin_user: UserModel,
# ) -> None: ) -> None:
# self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group") self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
# initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
# finance_user_three = self.find_or_create_user("testuser3") finance_user_three = self.find_or_create_user("testuser3")
# assert initiator_user.principal is not None assert initiator_user.principal is not None
# assert finance_user_three.principal is not None assert finance_user_three.principal is not None
# AuthorizationService.import_permissions_from_yaml_file() AuthorizationService.import_permissions_from_yaml_file()
#
# finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
# assert finance_group is not None assert finance_group is not None
#
# process_model = load_test_spec( process_model = load_test_spec(
# process_model_id="test_group/manual_task_with_subprocesses", process_model_id="test_group/manual_task_with_subprocesses",
# process_model_source_directory="manual_task_with_subprocesses", process_model_source_directory="manual_task_with_subprocesses",
# ) )
# process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
# process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user
# ) )
# processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
# processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)
# # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2)) # with open("before_reset.json", 'w') as f: f.write(json.dumps(processor.serialize(), indent=2))
# assert len(process_instance.active_human_tasks) == 1 assert len(process_instance.active_human_tasks) == 1
# initial_human_task_id = process_instance.active_human_tasks[0].id initial_human_task_id = process_instance.active_human_tasks[0].id
# assert len(process_instance.active_human_tasks) == 1 assert len(process_instance.active_human_tasks) == 1
# assert initial_human_task_id == process_instance.active_human_tasks[0].id assert initial_human_task_id == process_instance.active_human_tasks[0].id
#
# human_task_one = process_instance.active_human_tasks[0] # import pdb; pdb.set_trace()
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) human_task_one = process_instance.active_human_tasks[0]
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# human_task_one = process_instance.active_human_tasks[0] ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) # import pdb; pdb.set_trace()
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) human_task_one = process_instance.active_human_tasks[0]
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
# # NOTES: ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
# # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task # import pdb; pdb.set_trace()
# # is not marked READY but instead stays as FUTURE. Running things like:
# # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task) # NOTES:
# # and # somehow we are hosing the task state so that when completing tasks of a subprocess, the task AFTER the subprocess task
# # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK) # is not marked READY but instead stays as FUTURE. Running things like:
# # did not help. # self.last_completed_spiff_task.task_spec._update(self.last_completed_spiff_task)
# # and
# processor.suspend() # self.last_completed_spiff_task.task_spec._predict(self.last_completed_spiff_task, mask=TaskState.NOT_FINISHED_MASK)
# task_model_to_reset_to = ( # did not help.
# TaskModel.query.join(TaskDefinitionModel)
# .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script") processor.suspend()
# .order_by(TaskModel.id.desc()) # type: ignore task_model_to_reset_to = (
# .first() TaskModel.query.join(TaskDefinitionModel)
# ) .filter(TaskDefinitionModel.bpmn_identifier == "top_level_subprocess_script")
# assert task_model_to_reset_to is not None .order_by(TaskModel.id.desc()) # type: ignore
# ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True) .first()
# )
# process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() assert task_model_to_reset_to is not None
# processor = ProcessInstanceProcessor(process_instance) import pdb; pdb.set_trace()
# processor.resume() ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid, commit=True)
# processor.do_engine_steps(save=True) import pdb; pdb.set_trace()
#
# assert len(process_instance.active_human_tasks) == 1 process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first()
# human_task_one = process_instance.active_human_tasks[0] processor = ProcessInstanceProcessor(process_instance)
# spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id)) processor.resume()
# ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) processor.do_engine_steps(save=True)
# import pdb; pdb.set_trace()
# assert process_instance.status == "complete"
assert len(process_instance.active_human_tasks) == 1
human_task_one = process_instance.active_human_tasks[0]
spiff_manual_task = processor.bpmn_process_instance.get_task_from_id(UUID(human_task_one.task_id))
ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one)
assert process_instance.status == "complete"
def test_properly_saves_tasks_when_running( def test_properly_saves_tasks_when_running(
self, self,