do not save predicted tasks to the db w/ burnettk

This commit is contained in:
jasquat 2023-03-30 12:41:42 -04:00
parent 31cb1ab27c
commit b4b54f3fff
3 changed files with 64 additions and 35 deletions

View File

@ -68,12 +68,15 @@ class TaskService:
spiff_task: SpiffTask,
) -> None:
for child_spiff_task in spiff_task.children:
self.update_task_model_with_spiff_task(
spiff_task=child_spiff_task,
)
self.process_spiff_task_children(
spiff_task=child_spiff_task,
)
if child_spiff_task._has_state(TaskState.PREDICTED_MASK):
self.__class__.remove_spiff_task_from_parent(child_spiff_task, self.task_models)
else:
self.update_task_model_with_spiff_task(
spiff_task=child_spiff_task,
)
self.process_spiff_task_children(
spiff_task=child_spiff_task,
)
def process_spiff_task_parents(
self,
@ -137,7 +140,7 @@ class TaskService:
)
self.process_instance_events[task_model.guid] = process_instance_event
# self.update_bpmn_process(spiff_task.workflow, bpmn_process)
self.update_bpmn_process(spiff_task.workflow, bpmn_process)
return task_model
def update_bpmn_process(
@ -315,7 +318,7 @@ class TaskService:
if "subprocess_specs" in bpmn_process_dict:
bpmn_process_dict.pop("subprocess_specs")
new_task_models = {}
new_task_models: dict[str, TaskModel] = {}
new_json_data_dicts: dict[str, JsonDataDict] = {}
bpmn_process = None
@ -386,7 +389,12 @@ class TaskService:
if task_properties["task_spec"] == "Root":
continue
# we are going to avoid saving likely and maybe tasks to the db.
# that means we need to remove them from their parents' lists of children as well.
spiff_task = spiff_workflow.get_task_from_id(UUID(task_id))
if spiff_task._has_state(TaskState.PREDICTED_MASK):
cls.remove_spiff_task_from_parent(spiff_task, new_task_models)
continue
task_model = TaskModel.query.filter_by(guid=task_id).first()
if task_model is None:
@ -406,6 +414,18 @@ class TaskService:
new_json_data_dicts[python_env_dict["hash"]] = python_env_dict
return (bpmn_process, new_task_models, new_json_data_dicts)
@classmethod
def remove_spiff_task_from_parent(cls, spiff_task: SpiffTask, task_models: dict[str, TaskModel]) -> None:
"""Removes the given spiff task from its parent and then updates the task_models dict with the changes."""
spiff_task_parent_guid = str(spiff_task.parent.id)
spiff_task_guid = str(spiff_task.id)
if spiff_task_parent_guid in task_models:
parent_task_model = task_models[spiff_task_parent_guid]
new_parent_properties_json = copy.copy(parent_task_model.properties_json)
new_parent_properties_json["children"].remove(spiff_task_guid)
parent_task_model.properties_json = new_parent_properties_json
task_models[spiff_task_parent_guid] = parent_task_model
@classmethod
def update_task_data_on_bpmn_process(
cls, bpmn_process: BpmnProcessModel, bpmn_process_data_dict: dict

View File

@ -61,9 +61,6 @@ class TaskModelSavingDelegate(EngineStepDelegate):
self.current_task_model: Optional[TaskModel] = None
self.current_task_start_in_seconds: Optional[float] = None
# self.task_models: dict[str, TaskModel] = {}
# self.json_data_dicts: dict[str, JsonDataDict] = {}
# self.process_instance_events: dict[str, ProcessInstanceEventModel] = {}
self.last_completed_spiff_task: Optional[SpiffTask] = None
self.task_service = TaskService(

View File

@ -418,54 +418,59 @@ class TestProcessInstanceProcessor(BaseTest):
processor_final = ProcessInstanceProcessor(process_instance_relookup)
assert process_instance_relookup.status == "complete"
first_data_set = {"set_in_top_level_script": 1}
second_data_set = {
**first_data_set,
data_set_1 = {"set_in_top_level_script": 1}
data_set_2 = {
**data_set_1,
**{"set_in_top_level_subprocess": 1, "we_move_on": False},
}
third_data_set = {
**second_data_set,
data_set_3 = {
**data_set_2,
**{
"set_in_test_process_to_call_script": 1,
"set_in_test_process_to_call_subprocess_subprocess_script": 1,
"set_in_test_process_to_call_subprocess_script": 1,
},
}
fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}}
fifth_data_set = {**fourth_data_set, **{"set_top_level_process_script_after_gate": 1}}
sixth_data_set = {**fifth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}}
data_set_4 = {
**data_set_3,
**{
"set_in_test_process_to_call_script": 1,
},
}
data_set_5 = {**data_set_4, **{"a": 1, "we_move_on": True}}
data_set_6 = {**data_set_5, **{"set_top_level_process_script_after_gate": 1}}
data_set_7 = {**data_set_6, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}}
expected_task_data = {
"top_level_script": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_one": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_two": {"data": first_data_set, "bpmn_process_identifier": "top_level_process"},
"top_level_script": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_one": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_two": {"data": data_set_1, "bpmn_process_identifier": "top_level_process"},
"top_level_subprocess_script": {
"data": second_data_set,
"data": data_set_2,
"bpmn_process_identifier": "top_level_subprocess",
},
"top_level_subprocess": {"data": second_data_set, "bpmn_process_identifier": "top_level_process"},
"top_level_subprocess": {"data": data_set_2, "bpmn_process_identifier": "top_level_process"},
"test_process_to_call_subprocess_script": {
"data": third_data_set,
"data": data_set_3,
"bpmn_process_identifier": "test_process_to_call_subprocess",
},
"top_level_call_activity": {"data": third_data_set, "bpmn_process_identifier": "top_level_process"},
"top_level_call_activity": {"data": data_set_4, "bpmn_process_identifier": "top_level_process"},
"top_level_manual_task_two_second": {
"data": third_data_set,
"data": data_set_4,
"bpmn_process_identifier": "top_level_process",
},
"top_level_subprocess_script_second": {
"data": fourth_data_set,
"data": data_set_5,
"bpmn_process_identifier": "top_level_subprocess",
},
"top_level_subprocess_second": {"data": fourth_data_set, "bpmn_process_identifier": "top_level_process"},
"top_level_subprocess_second": {"data": data_set_5, "bpmn_process_identifier": "top_level_process"},
"test_process_to_call_subprocess_script_second": {
"data": fourth_data_set,
"data": data_set_5,
"bpmn_process_identifier": "test_process_to_call_subprocess",
},
"top_level_call_activity_second": {
"data": fourth_data_set,
"data": data_set_5,
"bpmn_process_identifier": "top_level_process",
},
"end_event_of_manual_task_model": {"data": fifth_data_set, "bpmn_process_identifier": "top_level_process"},
"end_event_of_manual_task_model": {"data": data_set_6, "bpmn_process_identifier": "top_level_process"},
}
spiff_tasks_checked: list[str] = []
@ -496,6 +501,7 @@ class TestProcessInstanceProcessor(BaseTest):
)
task_models_with_bpmn_identifier_count = (
TaskModel.query.join(TaskDefinitionModel)
.filter(TaskModel.process_instance_id == process_instance_relookup.id)
.filter(TaskDefinitionModel.bpmn_identifier == spiff_task.task_spec.name)
.count()
)
@ -519,7 +525,6 @@ class TestProcessInstanceProcessor(BaseTest):
)
# TODO: if we split out env data again we will need to use it here instead of json_data
# assert task_model.python_env_data() == expected_python_env_data, message
# import pdb; pdb.set_trace()
assert task_model.json_data() == expected_python_env_data, message
all_spiff_tasks = processor_final.bpmn_process_instance.get_tasks()
@ -561,7 +566,14 @@ class TestProcessInstanceProcessor(BaseTest):
)
assert task_bpmn_identifier in spiff_tasks_checked, message
assert processor.get_data() == sixth_data_set
task_models_that_are_predicted_count = (
TaskModel.query.filter(TaskModel.process_instance_id == process_instance_relookup.id)
.filter(TaskModel.state.in_(["LIKELY", "MAYBE"])) # type: ignore
.count()
)
assert task_models_that_are_predicted_count == 0
assert processor.get_data() == data_set_7
def test_does_not_recreate_human_tasks_on_multiple_saves(
self,