fixed up tests and ran pyl w/ burnettk

This commit is contained in:
jasquat 2023-03-15 16:10:23 -04:00
parent bc619af0a0
commit 96989c676e
6 changed files with 50 additions and 43 deletions

View File

@ -1,5 +1,3 @@
from __future__ import with_statement
import logging
from logging.config import fileConfig

View File

@ -1,5 +1,4 @@
from __future__ import annotations
from typing import Optional
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -35,12 +34,15 @@ class JsonDataModel(SpiffworkflowBaseDBModel):
@classmethod
def find_object_by_hash(cls, hash: str) -> JsonDataModel:
json_data_model: Optional[JsonDataModel] = JsonDataModel.query.filter_by(hash=hash).first()
json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by(
hash=hash
).first()
if json_data_model is None:
raise JsonDataModelNotFoundError(f"Could not find a json data model entry with hash: {hash}")
raise JsonDataModelNotFoundError(
f"Could not find a json data model entry with hash: {hash}"
)
return json_data_model
@classmethod
def find_data_dict_by_hash(cls, hash: str) -> dict:
return cls.find_object_by_hash(hash).data

View File

@ -1184,6 +1184,7 @@ class ProcessInstanceProcessor:
process_instance=self.process_instance_model,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
spiff_workflow=self.bpmn_process_instance,
serializer=self._serializer,
)
)
for subprocess_task_id, subprocess_properties in subprocesses.items():
@ -1198,6 +1199,7 @@ class ProcessInstanceProcessor:
bpmn_process_guid=subprocess_task_id,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
spiff_workflow=self.bpmn_process_instance,
serializer=self._serializer,
)
new_task_models.update(subprocess_new_task_models)
new_json_data_dicts.update(subprocess_new_json_data_models)
@ -1812,8 +1814,7 @@ class ProcessInstanceProcessor:
"""Serialize."""
self.check_task_data_size()
self.preserve_script_engine_state()
# return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore
return json.loads(self._serializer.serialize_json(self.bpmn_process_instance)) # type: ignore
return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore
def next_user_tasks(self) -> list[SpiffTask]:
"""Next_user_tasks."""

View File

@ -38,7 +38,7 @@ class TaskService:
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
insert_stmt = mysql_insert(JsonDataModel).values(list_of_dicts)
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
data=insert_stmt.inserted.data, status="U"
data=insert_stmt.inserted.data
)
else:
insert_stmt = postgres_insert(JsonDataModel).values(list_of_dicts)
@ -61,7 +61,9 @@ class TaskService:
"""
new_properties_json = serializer.task_to_dict(spiff_task)
spiff_task_data = new_properties_json.pop("data")
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task)
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(
spiff_task, serializer
)
task_model.properties_json = new_properties_json
task_model.state = TaskStateNames[new_properties_json["state"]]
json_data_dict = cls._update_task_data_on_task_model(
@ -153,6 +155,7 @@ class TaskService:
process_instance=process_instance,
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
spiff_workflow=spiff_workflow,
serializer=serializer,
)
)
else:
@ -169,6 +172,7 @@ class TaskService:
bpmn_process_guid=subprocess_guid,
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
spiff_workflow=spiff_workflow,
serializer=serializer,
)
)
return (bpmn_process, new_task_models, new_json_data_dicts)
@ -180,6 +184,7 @@ class TaskService:
process_instance: ProcessInstanceModel,
bpmn_definition_to_task_definitions_mappings: dict,
spiff_workflow: BpmnWorkflow,
serializer: BpmnWorkflowSerializer,
bpmn_process_parent: Optional[BpmnProcessModel] = None,
bpmn_process_guid: Optional[str] = None,
) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]:
@ -267,7 +272,9 @@ class TaskService:
if json_data_dict is not None:
new_json_data_dicts[json_data_dict["hash"]] = json_data_dict
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task)
python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(
spiff_task, serializer
)
python_env_dict = TaskService._update_task_data_on_task_model(
task_model, python_env_data_dict, "python_env_data_hash"
)
@ -278,7 +285,7 @@ class TaskService:
@classmethod
def _update_task_data_on_task_model(
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
) -> Optional[JsonDataDict]:
task_data_json = json.dumps(task_data_dict, sort_keys=True)
task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest()
@ -308,5 +315,12 @@ class TaskService:
return task_model
@classmethod
def _get_python_env_data_dict_from_spiff_task(cls, spiff_task: SpiffTask) -> dict:
return spiff_task.workflow.script_engine.environment.user_defined_state()
def _get_python_env_data_dict_from_spiff_task(
cls, spiff_task: SpiffTask, serializer: BpmnWorkflowSerializer
) -> dict:
user_defined_state = (
spiff_task.workflow.script_engine.environment.user_defined_state()
)
# this helps to convert items like datetime objects to be json serializable
converted_data: dict = serializer.data_converter.convert(user_defined_state)
return converted_data

View File

@ -78,12 +78,13 @@ class TaskModelSavingDelegate(EngineStepDelegate):
"""
return self.process_instance.bpmn_process_id is not None
def _update_json_data_dicts_using_list(self, json_data_dict_list: list[Optional[JsonDataDict]]) -> None:
def _update_json_data_dicts_using_list(
self, json_data_dict_list: list[Optional[JsonDataDict]]
) -> None:
for json_data_dict in json_data_dict_list:
if json_data_dict is not None:
self.json_data_dicts[json_data_dict["hash"]] = json_data_dict
def will_complete_task(self, spiff_task: SpiffTask) -> None:
if self.should_update_task_model():
_bpmn_process, task_model, new_task_models, new_json_data_dicts = (

View File

@ -359,15 +359,15 @@ class TestProcessInstanceProcessor(BaseTest):
assert process_instance_relookup.status == "complete"
first_data_set = {"set_in_top_level_script": 1}
second_data_set = {**first_data_set, **{"set_in_top_level_subprocess": 1, "we_move_on": False}}
second_data_set = {
**first_data_set,
**{"set_in_top_level_subprocess": 1, "we_move_on": False},
}
third_data_set = {
**second_data_set,
**{"set_in_test_process_to_call_script": 1},
}
fourth_data_set = {
**third_data_set,
**{'a': 1, 'we_move_on': True}
}
fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}}
expected_task_data = {
"top_level_script": first_data_set,
"manual_task": first_data_set,
@ -382,11 +382,16 @@ class TestProcessInstanceProcessor(BaseTest):
spiff_tasks_checked_once: list = []
def assert_spiff_task_is_in_process(spiff_task_name: str, bpmn_process_identifier: str) -> None:
# TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly
def assert_spiff_task_is_in_process(
spiff_task_name: str, bpmn_process_identifier: str
) -> None:
if spiff_task.task_spec.name == spiff_task_name:
expected_python_env_data = expected_task_data[spiff_task.task_spec.name]
if spiff_task.task_spec.name in spiff_tasks_checked_once:
expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"]
expected_python_env_data = expected_task_data[
f"{spiff_task.task_spec.name}_second"
]
task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task.task_definition_id is not None
task_definition = task.task_definition
@ -395,9 +400,6 @@ class TestProcessInstanceProcessor(BaseTest):
task_definition.bpmn_process_definition.bpmn_identifier
== bpmn_process_identifier
)
print(f"spiff_task_name: {spiff_task_name}")
print(f"task.json_data(): {task.json_data()}")
print(f"task.python_env_data(): {task.python_env_data()}")
assert task.python_env_data() == expected_python_env_data
spiff_tasks_checked_once.append(spiff_task.task_spec.name)
@ -405,24 +407,13 @@ class TestProcessInstanceProcessor(BaseTest):
assert len(all_spiff_tasks) > 1
for spiff_task in all_spiff_tasks:
assert spiff_task.state == TaskState.COMPLETED
assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call")
assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess")
assert_spiff_task_is_in_process(
"test_process_to_call_script", "test_process_to_call"
)
assert_spiff_task_is_in_process(
"top_level_subprocess_script", "top_level_subprocess"
)
assert_spiff_task_is_in_process("top_level_script", "top_level_process")
# FIXME: Checking task data cannot work with the feature/remove-loop-reset branch
# of SiffWorkflow. This is because it saves script data to the python_env and NOT
# to task.data. We may need to either create a new column on TaskModel to put the python_env
# data or we could just shove it back onto the task data when adding to the database.
# Right now everything works in practice because the python_env data is on the top level workflow
# and so is always there but is also always the most recent. If we want to replace spiff_step_details
# with TaskModel then we'll need some way to store python_env on each task.
# spiff_task_name = spiff_task.task_spec.name
# if spiff_task_name in expected_task_data:
# spiff_task_data = expected_task_data[spiff_task_name]
# failure_message = (
# f"Found unexpected task data on {spiff_task_name}. "
# f"Expected: {spiff_task_data}, Found: {spiff_task.data}"
# )
# assert spiff_task.data == spiff_task_data, failure_message
def test_does_not_recreate_human_tasks_on_multiple_saves(
self,