This commit is contained in:
jasquat 2023-03-01 12:35:08 -05:00
parent 28fd2a3966
commit e9ce360a10
No known key found for this signature in database
10 changed files with 64 additions and 37 deletions

View File

@ -17,10 +17,10 @@ per-file-ignores =
# THEN, test_hey.py will NOT be excluding D103
# asserts are ok in tests
spiffworkflow-backend/tests/*:S101,D102,D103,D101
spiffworkflow-backend/tests/*:S101,D100,D101,D102,D103
# prefer naming functions descriptively rather than forcing comments
spiffworkflow-backend/src/*:D102,D103,D101
spiffworkflow-backend/src/*:D100,D101,D102,D103
spiffworkflow-backend/bin/keycloak_test_server.py:B950,D
spiffworkflow-backend/conftest.py:S105

View File

@ -17,10 +17,10 @@ per-file-ignores =
# THEN, test_hey.py will NOT be excluding D103
# asserts are ok in tests
tests/*:S101,D101,D102,D103
tests/*:S101,D100,D101,D102,D103
# prefer naming functions descriptively rather than forcing comments
src/*:D101,D102,D103
src/*:D100,D101,D102,D103
bin/keycloak_test_server.py:B950,D
conftest.py:S105

View File

@ -50,10 +50,14 @@ from spiffworkflow_backend.models.group import GroupModel # noqa: F401
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
) # noqa: F401
from spiffworkflow_backend.models.serialized_bpmn_definition import SerializedBpmnDefinitionModel # noqa: F401
# it was wrongly ProcessesInstanceData
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel # noqa: F401
from spiffworkflow_backend.models.serialized_bpmn_definition import (
SerializedBpmnDefinitionModel,
) # noqa: F401
# it was wrongly ProcessesInstanceData
from spiffworkflow_backend.models.process_instance_data import (
ProcessInstanceDataModel,
) # noqa: F401
add_listeners()

View File

@ -1,8 +1,5 @@
"""Process_instance."""
from __future__ import annotations
from typing import Optional
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.serialized_bpmn_definition import SerializedBpmnDefinitionModel # noqa: F401
from typing import Any
from typing import cast
@ -20,6 +17,10 @@ from sqlalchemy.orm import validates
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.serialized_bpmn_definition import (
SerializedBpmnDefinitionModel,
) # noqa: F401
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskSchema
from spiffworkflow_backend.models.user import UserModel
@ -63,11 +64,15 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
process_initiator = relationship("UserModel")
serialized_bpmn_definition_id: Optional[int] = db.Column(ForeignKey(SerializedBpmnDefinitionModel.id), nullable=True) # type: ignore
serialized_bpmn_definition_id: int | None = db.Column(
ForeignKey(SerializedBpmnDefinitionModel.id), nullable=True # type: ignore
)
serialized_bpmn_definition = relationship("SerializedBpmnDefinitionModel")
process_instance_data_id: Optional[int] = db.Column(ForeignKey(ProcessInstanceDataModel.id), nullable=True) # type: ignore
process_instance_data = relationship("ProcessInstanceDataModel", cascade="delete") # type: ignore
process_instance_data_id: int | None = db.Column(
ForeignKey(ProcessInstanceDataModel.id), nullable=True # type: ignore
)
process_instance_data = relationship("ProcessInstanceDataModel", cascade="delete")
active_human_tasks = relationship(
"HumanTaskModel",

View File

@ -1,7 +1,6 @@
"""Process_instance."""
from __future__ import annotations
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -17,7 +16,6 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
# "root", # guid generated by spiff
# "success", # boolean
class ProcessInstanceDataModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance_data"
id: int = db.Column(db.Integer, primary_key=True)
# this is not deferred because there is no reason to query this model if you do not want the runtime_json

View File

@ -41,7 +41,6 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
# added[added_key] = b[added_key]
# final_tuple = [added, removed, changed]
class SerializedBpmnDefinitionModel(SpiffworkflowBaseDBModel):
__tablename__ = "serialized_bpmn_definition"
id: int = db.Column(db.Integer, primary_key=True)
hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True)

View File

@ -199,7 +199,8 @@ def task_data_update(
raise ApiError(
error_code="process_instance_data_not_found",
message=(
f"Could not find task data related to process instance: {process_instance.id}"
"Could not find task data related to process instance:"
f" {process_instance.id}"
),
)
process_instance_data_dict = json.loads(process_instance_data.runtime_json)
@ -211,7 +212,9 @@ def task_data_update(
process_instance_data_dict["tasks"][task_id][
"data"
] = new_task_data_dict
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
process_instance_data.runtime_json = json.dumps(
process_instance_data_dict
)
db.session.add(process_instance_data)
try:
db.session.commit()

View File

@ -552,7 +552,9 @@ def process_instance_task_list(
step_details = step_detail_query.all()
process_instance_data = process_instance.process_instance_data
process_instance_data_json = "{}" if process_instance_data is None else process_instance_data.runtime_json
process_instance_data_json = (
"{}" if process_instance_data is None else process_instance_data.runtime_json
)
process_instance_data_dict = json.loads(process_instance_data_json)
tasks = process_instance_data_dict["tasks"]
subprocesses = process_instance_data_dict["subprocesses"]

View File

@ -1,9 +1,4 @@
"""Process_instance_processor."""
from hashlib import sha256
from spiffworkflow_backend.models import serialized_bpmn_definition
from spiffworkflow_backend.models import process_instance_data
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.serialized_bpmn_definition import SerializedBpmnDefinitionModel # noqa: F401
import _strptime # type: ignore
import decimal
import json
@ -13,6 +8,7 @@ import re
import time
from datetime import datetime
from datetime import timedelta
from hashlib import sha256
from typing import Any
from typing import Callable
from typing import Dict
@ -71,6 +67,7 @@ from spiffworkflow_backend.models.message_instance_correlation import (
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
@ -78,6 +75,9 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.serialized_bpmn_definition import (
SerializedBpmnDefinitionModel,
) # noqa: F401
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel
@ -526,7 +526,7 @@ class ProcessInstanceProcessor:
return {}
serialized_bpmn_definition = process_instance_model.serialized_bpmn_definition
process_instance_data = process_instance_model.process_instance_data
loaded_json: dict = json.loads(serialized_bpmn_definition.static_json or '{}')
loaded_json: dict = json.loads(serialized_bpmn_definition.static_json or "{}")
loaded_json.update(json.loads(process_instance_data.runtime_json))
return loaded_json
@ -572,7 +572,9 @@ class ProcessInstanceProcessor:
spiff_logger.setLevel(logging.WARNING)
try:
full_bpmn_json = ProcessInstanceProcessor._get_full_bpmn_json(process_instance_model)
full_bpmn_json = ProcessInstanceProcessor._get_full_bpmn_json(
process_instance_model
)
bpmn_process_instance = (
ProcessInstanceProcessor._serializer.deserialize_json(
json.dumps(full_bpmn_json)
@ -733,7 +735,9 @@ class ProcessInstanceProcessor:
Rerturns: {process_name: [task_1, task_2, ...], ...}
"""
bpmn_definition_dict = json.loads(self.process_instance_model.serialized_bpmn_definition.static_json or "{}")
bpmn_definition_dict = json.loads(
self.process_instance_model.serialized_bpmn_definition.static_json or "{}"
)
processes: dict[str, list[str]] = {bpmn_definition_dict["spec"]["name"]: []}
for task_name, _task_spec in bpmn_definition_dict["spec"]["task_specs"].items():
processes[bpmn_definition_dict["spec"]["name"]].append(task_name)
@ -781,7 +785,9 @@ class ProcessInstanceProcessor:
NOTE: this may not fully work for tasks that are NOT call activities since their task_name may not be unique
but in our current use case we only care about the call activities here.
"""
bpmn_definition_dict = json.loads(self.process_instance_model.serialized_bpmn_definition.static_json or "{}")
bpmn_definition_dict = json.loads(
self.process_instance_model.serialized_bpmn_definition.static_json or "{}"
)
spiff_task_json = bpmn_definition_dict["spec"]["task_specs"] or {}
if "subprocess_specs" in bpmn_definition_dict:
for _subprocess_name, subprocess_details in bpmn_definition_dict[
@ -806,13 +812,17 @@ class ProcessInstanceProcessor:
Also note that subprocess_task_id might in fact be a call activity, because spiff treats
call activities like subprocesses in terms of the serialization.
"""
process_instance_data_dict = json.loads(self.process_instance_model.process_instance_data.runtime_json or '{}')
process_instance_data_dict = json.loads(
self.process_instance_model.process_instance_data.runtime_json or "{}"
)
spiff_task_json = self.get_all_task_specs()
subprocesses_by_child_task_ids = {}
task_typename_by_task_id = {}
if "subprocesses" in process_instance_data_dict:
for subprocess_id, subprocess_details in process_instance_data_dict["subprocesses"].items():
for subprocess_id, subprocess_details in process_instance_data_dict[
"subprocesses"
].items():
for task_id, task_details in subprocess_details["tasks"].items():
subprocesses_by_child_task_ids[task_id] = subprocess_id
task_name = task_details["task_spec"]
@ -855,7 +865,7 @@ class ProcessInstanceProcessor:
Expects the save method to commit it.
"""
bpmn_dict = json.loads(self.serialize())
bpmn_dict_keys = ('spec', 'subprocess_specs', 'serializer_version')
bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version")
bpmn_spec_dict = {}
process_instance_data_dict = {}
for bpmn_key in bpmn_dict.keys():
@ -865,12 +875,20 @@ class ProcessInstanceProcessor:
process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key]
if self.process_instance_model.serialized_bpmn_definition_id is None:
new_hash_digest = sha256(json.dumps(bpmn_spec_dict, sort_keys=True).encode('utf8')).hexdigest()
serialized_bpmn_definition = SerializedBpmnDefinitionModel.query.filter_by(hash=new_hash_digest).first()
new_hash_digest = sha256(
json.dumps(bpmn_spec_dict, sort_keys=True).encode("utf8")
).hexdigest()
serialized_bpmn_definition = SerializedBpmnDefinitionModel.query.filter_by(
hash=new_hash_digest
).first()
if serialized_bpmn_definition is None:
serialized_bpmn_definition = SerializedBpmnDefinitionModel(hash=new_hash_digest, static_json=json.dumps(bpmn_spec_dict))
serialized_bpmn_definition = SerializedBpmnDefinitionModel(
hash=new_hash_digest, static_json=json.dumps(bpmn_spec_dict)
)
db.session.add(serialized_bpmn_definition)
self.process_instance_model.serialized_bpmn_definition = serialized_bpmn_definition
self.process_instance_model.serialized_bpmn_definition = (
serialized_bpmn_definition
)
process_instance_data = None
if self.process_instance_model.process_instance_data_id is None:

View File

@ -1,10 +1,8 @@
"""Test Process Api Blueprint."""
import io
from typing import Set
import json
import os
import time
import json_delta
from typing import Any
from typing import Dict