most unit tests are passing now and the use of bpmn_json is almost gone in src

This commit is contained in:
jasquat 2023-03-01 09:22:38 -05:00
parent f74ce0f568
commit 950106fe21
3 changed files with 46 additions and 19 deletions

View File

@ -25,6 +25,7 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSc
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceTaskDataCannotBeUpdatedError,
)
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
@ -194,16 +195,25 @@ def task_data_update(
f" It is currently: {process_instance.status}"
)
process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json)
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=process_instance.id).first()
if process_instance_data is None:
raise ApiError(
error_code="process_instance_data_not_found",
message=(
f"Could not find task data related to process instance: {process_instance.id}"
),
)
process_instance_data_dict = json.loads(process_instance_data.runtime_json)
if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str)
if task_id in process_instance_bpmn_json_dict["tasks"]:
process_instance_bpmn_json_dict["tasks"][task_id][
if task_id in process_instance_data_dict["tasks"]:
process_instance_data_dict["tasks"][task_id][
"data"
] = new_task_data_dict
process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict)
db.session.add(process_instance)
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
db.session.add(process_instance_data)
try:
db.session.commit()
except Exception as e:

View File

@ -1,5 +1,6 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from typing import Any
from typing import Dict
from typing import Optional
@ -550,9 +551,12 @@ def process_instance_task_list(
)
step_details = step_detail_query.all()
bpmn_json = json.loads(process_instance.bpmn_json or "{}")
tasks = bpmn_json["tasks"]
subprocesses = bpmn_json["subprocesses"]
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=process_instance.id).first()
process_instance_data_json = "{}" if process_instance_data is None else process_instance_data.runtime_json
process_instance_data_dict = json.loads(process_instance_data_json)
tasks = process_instance_data_dict["tasks"]
subprocesses = process_instance_data_dict["subprocesses"]
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
@ -584,7 +588,7 @@ def process_instance_task_list(
spiff_task_id, TaskState.FUTURE
)
process_instance.bpmn_json = json.dumps(bpmn_json)
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
processor = ProcessInstanceProcessor(process_instance)
spiff_task = processor.__class__.get_task_by_bpmn_identifier(

View File

@ -520,9 +520,9 @@ class ProcessInstanceProcessor:
)
@classmethod
def get_full_bpmn_json(self, process_instance_model: ProcessInstanceModel) -> Optional[dict]:
def get_full_bpmn_json(cls, process_instance_model: ProcessInstanceModel) -> dict:
if process_instance_model.serialized_bpmn_definition_id is None:
return None
return {}
serialized_bpmn_definition = process_instance_model.serialized_bpmn_definition
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=process_instance_model.id).first()
# if process_instance_data is not None:
@ -848,19 +848,32 @@ class ProcessInstanceProcessor:
)
return subprocesses_by_child_task_ids
def save(self) -> None:
"""Saves the current state of this processor to the database."""
# self.process_instance_model.bpmn_json = self.serialize()
bpmn_json = self.serialize()
def add_bpmn_json_records(self) -> None:
bpmn_dict = json.loads(self.serialize())
bpmn_dict_keys = ('spec', 'subprocess_specs', 'serializer_version')
bpmn_spec_dict = {}
process_instance_data_dict = {}
for bpmn_key in bpmn_dict.keys():
if bpmn_key in bpmn_dict_keys:
bpmn_spec_dict[bpmn_key] = bpmn_dict[bpmn_key]
else:
process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key]
if self.process_instance_model.serialized_bpmn_definition_id is None:
new_hash = {k: bpmn_json[k] for k in ('spec', 'subprocess_spec', 'serializer_version')}
new_hash_digest = sha256(json.dumps(new_hash, sort_keys=True).encode('utf8')).hexdigest()
serialized_bpmn_definition = SerializedBpmnDefinitionModel(hash=new_hash_digest).first()
new_hash_digest = sha256(json.dumps(bpmn_spec_dict, sort_keys=True).encode('utf8')).hexdigest()
serialized_bpmn_definition = SerializedBpmnDefinitionModel.query.filter_by(hash=new_hash_digest).first()
if serialized_bpmn_definition is None:
serialized_bpmn_definition = SerializedBpmnDefinitionModel(hash=new_hash_digest, static_json=json.dumps(new_hash))
serialized_bpmn_definition = SerializedBpmnDefinitionModel(hash=new_hash_digest, static_json=json.dumps(bpmn_spec_dict))
db.session.add(serialized_bpmn_definition)
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=self.process_instance_model.id)
if process_instance_data is None:
process_instance_data = ProcessInstanceDataModel(process_instance_id=self.process_instance_model.id)
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
def save(self) -> None:
"""Saves the current state of this processor to the database."""
self.add_bpmn_json_records()
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
user_tasks = list(self.get_all_user_tasks())