pyl and cleaned up debug code w/ burnettk

This commit is contained in:
jasquat 2023-03-15 12:15:48 -04:00
parent 14fc7debc1
commit 2e9fa0e4b4
8 changed files with 158 additions and 96 deletions

View File

@ -1,5 +1,3 @@
from __future__ import with_statement
import logging import logging
from logging.config import fileConfig from logging.config import fileConfig

View File

@ -1,7 +1,7 @@
from __future__ import annotations from __future__ import annotations
from sqlalchemy.orm import relationship
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel

View File

@ -1,8 +1,5 @@
"""Task.""" """Task."""
import enum import enum
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
from typing import Optional from typing import Optional
@ -13,10 +10,12 @@ from marshmallow import Schema
from marshmallow_enum import EnumField # type: ignore from marshmallow_enum import EnumField # type: ignore
from SpiffWorkflow.task import TaskStateNames # type: ignore from SpiffWorkflow.task import TaskStateNames # type: ignore
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
class MultiInstanceType(enum.Enum): class MultiInstanceType(enum.Enum):

View File

@ -53,7 +53,6 @@ from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models import task_definition
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process_definition import ( from spiffworkflow_backend.models.bpmn_process_definition import (
BpmnProcessDefinitionModel, BpmnProcessDefinitionModel,
@ -457,7 +456,7 @@ class ProcessInstanceProcessor:
self.process_instance_model = process_instance_model self.process_instance_model = process_instance_model
self.process_model_service = ProcessModelService() self.process_model_service = ProcessModelService()
bpmn_process_spec = None bpmn_process_spec = None
self.full_bpmn_process_dict = {} self.full_bpmn_process_dict: dict = {}
# this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id # this caches the bpmn_process_definition_identifier and task_identifier back to the bpmn_process_id
# in the database. This is to cut down on database queries while adding new tasks to the database. # in the database. This is to cut down on database queries while adding new tasks to the database.
@ -465,7 +464,7 @@ class ProcessInstanceProcessor:
# { "bpmn_process_definition_identifier": { "task_identifier": task_definition } } # { "bpmn_process_definition_identifier": { "task_identifier": task_definition } }
# To use from a spiff_task: # To use from a spiff_task:
# [spiff_task.workflow.spec.name][spiff_task.task_spec.name] # [spiff_task.workflow.spec.name][spiff_task.task_spec.name]
self.bpmn_definition_to_task_definitions_mappings = {} self.bpmn_definition_to_task_definitions_mappings: dict = {}
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None subprocesses: Optional[IdToBpmnProcessSpecMapping] = None
if process_instance_model.bpmn_process_definition_id is None: if process_instance_model.bpmn_process_definition_id is None:
@ -482,13 +481,15 @@ class ProcessInstanceProcessor:
) )
try: try:
(self.bpmn_process_instance, self.full_bpmn_process_dict, self.bpmn_definition_to_task_definitions_mappings) = ( (
self.__get_bpmn_process_instance( self.bpmn_process_instance,
process_instance_model, self.full_bpmn_process_dict,
bpmn_process_spec, self.bpmn_definition_to_task_definitions_mappings,
validate_only, ) = self.__get_bpmn_process_instance(
subprocesses=subprocesses, process_instance_model,
) bpmn_process_spec,
validate_only,
subprocesses=subprocesses,
) )
self.set_script_engine(self.bpmn_process_instance) self.set_script_engine(self.bpmn_process_instance)
@ -549,18 +550,27 @@ class ProcessInstanceProcessor:
@classmethod @classmethod
def _update_bpmn_definition_mappings( def _update_bpmn_definition_mappings(
cls, bpmn_definition_to_task_definitions_mappings: dict, bpmn_process_definition_identifier: str, task_definition: TaskDefinitionModel cls,
bpmn_definition_to_task_definitions_mappings: dict,
bpmn_process_definition_identifier: str,
task_definition: TaskDefinitionModel,
) -> None: ) -> None:
# import pdb; pdb.set_trace() if (
# if bpmn_process_definition_identifier == 'test_process_to_call' and task_definition.bpmn_identifier == "Root": bpmn_process_definition_identifier
# import pdb; pdb.set_trace() not in bpmn_definition_to_task_definitions_mappings
if bpmn_process_definition_identifier not in bpmn_definition_to_task_definitions_mappings: ):
bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier] = {} bpmn_definition_to_task_definitions_mappings[
bpmn_definition_to_task_definitions_mappings[bpmn_process_definition_identifier][task_definition.bpmn_identifier] = task_definition bpmn_process_definition_identifier
] = {}
bpmn_definition_to_task_definitions_mappings[
bpmn_process_definition_identifier
][task_definition.bpmn_identifier] = task_definition
@classmethod @classmethod
def _get_definition_dict_for_bpmn_process_definition( def _get_definition_dict_for_bpmn_process_definition(
cls, bpmn_process_definition: BpmnProcessDefinitionModel, bpmn_definition_to_task_definitions_mappings: dict cls,
bpmn_process_definition: BpmnProcessDefinitionModel,
bpmn_definition_to_task_definitions_mappings: dict,
) -> dict: ) -> dict:
task_definitions = TaskDefinitionModel.query.filter_by( task_definitions = TaskDefinitionModel.query.filter_by(
bpmn_process_definition_id=bpmn_process_definition.id bpmn_process_definition_id=bpmn_process_definition.id
@ -571,7 +581,11 @@ class ProcessInstanceProcessor:
bpmn_process_definition_dict["task_specs"][ bpmn_process_definition_dict["task_specs"][
task_definition.bpmn_identifier task_definition.bpmn_identifier
] = task_definition.properties_json ] = task_definition.properties_json
cls._update_bpmn_definition_mappings(bpmn_definition_to_task_definitions_mappings, bpmn_process_definition.bpmn_identifier, task_definition) cls._update_bpmn_definition_mappings(
bpmn_definition_to_task_definitions_mappings,
bpmn_process_definition.bpmn_identifier,
task_definition,
)
return bpmn_process_definition_dict return bpmn_process_definition_dict
@classmethod @classmethod
@ -618,7 +632,11 @@ class ProcessInstanceProcessor:
task_definition.bpmn_process_definition_id task_definition.bpmn_process_definition_id
] ]
) )
cls._update_bpmn_definition_mappings(bpmn_definition_to_task_definitions_mappings, bpmn_subprocess_definition_bpmn_identifier, task_definition) cls._update_bpmn_definition_mappings(
bpmn_definition_to_task_definitions_mappings,
bpmn_subprocess_definition_bpmn_identifier,
task_definition,
)
spiff_bpmn_process_dict["subprocess_specs"][ spiff_bpmn_process_dict["subprocess_specs"][
bpmn_subprocess_definition_bpmn_identifier bpmn_subprocess_definition_bpmn_identifier
]["task_specs"][ ]["task_specs"][
@ -667,7 +685,9 @@ class ProcessInstanceProcessor:
@classmethod @classmethod
def _get_full_bpmn_process_dict( def _get_full_bpmn_process_dict(
cls, process_instance_model: ProcessInstanceModel, bpmn_definition_to_task_definitions_mappings: dict cls,
process_instance_model: ProcessInstanceModel,
bpmn_definition_to_task_definitions_mappings: dict,
) -> dict: ) -> dict:
if process_instance_model.bpmn_process_definition_id is None: if process_instance_model.bpmn_process_definition_id is None:
return {} return {}
@ -682,11 +702,14 @@ class ProcessInstanceProcessor:
if bpmn_process_definition is not None: if bpmn_process_definition is not None:
spiff_bpmn_process_dict["spec"] = ( spiff_bpmn_process_dict["spec"] = (
cls._get_definition_dict_for_bpmn_process_definition( cls._get_definition_dict_for_bpmn_process_definition(
bpmn_process_definition, bpmn_definition_to_task_definitions_mappings bpmn_process_definition,
bpmn_definition_to_task_definitions_mappings,
) )
) )
cls._set_definition_dict_for_bpmn_subprocess_definitions( cls._set_definition_dict_for_bpmn_subprocess_definitions(
bpmn_process_definition, spiff_bpmn_process_dict, bpmn_definition_to_task_definitions_mappings bpmn_process_definition,
spiff_bpmn_process_dict,
bpmn_definition_to_task_definitions_mappings,
) )
bpmn_process = process_instance_model.bpmn_process bpmn_process = process_instance_model.bpmn_process
@ -755,8 +778,7 @@ class ProcessInstanceProcessor:
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
) -> Tuple[BpmnWorkflow, dict, dict]: ) -> Tuple[BpmnWorkflow, dict, dict]:
full_bpmn_process_dict = {} full_bpmn_process_dict = {}
bpmn_definition_to_task_definitions_mappings = {} bpmn_definition_to_task_definitions_mappings: dict = {}
# print("GET BPMN PROCESS INSTANCE")
if process_instance_model.bpmn_process_definition_id is not None: if process_instance_model.bpmn_process_definition_id is not None:
# turn off logging to avoid duplicated spiff logs # turn off logging to avoid duplicated spiff logs
spiff_logger = logging.getLogger("spiff") spiff_logger = logging.getLogger("spiff")
@ -766,10 +788,10 @@ class ProcessInstanceProcessor:
try: try:
full_bpmn_process_dict = ( full_bpmn_process_dict = (
ProcessInstanceProcessor._get_full_bpmn_process_dict( ProcessInstanceProcessor._get_full_bpmn_process_dict(
process_instance_model, bpmn_definition_to_task_definitions_mappings process_instance_model,
bpmn_definition_to_task_definitions_mappings,
) )
) )
# print("WE GOT FULL BPMN PROCESS DICT")
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor._serializer.workflow_from_dict( ProcessInstanceProcessor._serializer.workflow_from_dict(
full_bpmn_process_dict full_bpmn_process_dict
@ -782,17 +804,19 @@ class ProcessInstanceProcessor:
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance) ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
else: else:
# print("WE NO HAVE FULL BPMN YET")
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
spec, subprocesses spec, subprocesses
) )
) )
# import pdb; pdb.set_trace()
bpmn_process_instance.data[ bpmn_process_instance.data[
ProcessInstanceProcessor.VALIDATION_PROCESS_KEY ProcessInstanceProcessor.VALIDATION_PROCESS_KEY
] = validate_only ] = validate_only
return (bpmn_process_instance, full_bpmn_process_dict, bpmn_definition_to_task_definitions_mappings) return (
bpmn_process_instance,
full_bpmn_process_dict,
bpmn_definition_to_task_definitions_mappings,
)
def slam_in_data(self, data: dict) -> None: def slam_in_data(self, data: dict) -> None:
"""Slam_in_data.""" """Slam_in_data."""
@ -1063,13 +1087,7 @@ class ProcessInstanceProcessor:
bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = ( bpmn_process_definition: Optional[BpmnProcessDefinitionModel] = (
BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first() BpmnProcessDefinitionModel.query.filter_by(hash=new_hash_digest).first()
) )
# print(f"process_bpmn_properties: {process_bpmn_properties}")
# import pdb; pdb.set_trace()
# if process_bpmn_identifier == "test_process_to_call":
# import pdb; pdb.set_trace()
# # print("HEY22")
# print(f"self.process_instance_model.id: {self.process_instance_model.id}")
if bpmn_process_definition is None: if bpmn_process_definition is None:
task_specs = process_bpmn_properties.pop("task_specs") task_specs = process_bpmn_properties.pop("task_specs")
bpmn_process_definition = BpmnProcessDefinitionModel( bpmn_process_definition = BpmnProcessDefinitionModel(
@ -1088,13 +1106,23 @@ class ProcessInstanceProcessor:
) )
db.session.add(task_definition) db.session.add(task_definition)
if store_bpmn_definition_mappings: if store_bpmn_definition_mappings:
self._update_bpmn_definition_mappings(self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, task_definition) self._update_bpmn_definition_mappings(
self.bpmn_definition_to_task_definitions_mappings,
process_bpmn_identifier,
task_definition,
)
elif store_bpmn_definition_mappings: elif store_bpmn_definition_mappings:
# this should only ever happen when new process instances use a pre-existing bpmn process definitions # this should only ever happen when new process instances use a pre-existing bpmn process definitions
# otherwise this should get populated on processor initialization # otherwise this should get populated on processor initialization
task_definitions = TaskDefinitionModel.query.filter_by(bpmn_process_definition_id=bpmn_process_definition.id).all() task_definitions = TaskDefinitionModel.query.filter_by(
bpmn_process_definition_id=bpmn_process_definition.id
).all()
for task_definition in task_definitions: for task_definition in task_definitions:
self._update_bpmn_definition_mappings(self.bpmn_definition_to_task_definitions_mappings, process_bpmn_identifier, task_definition) self._update_bpmn_definition_mappings(
self.bpmn_definition_to_task_definitions_mappings,
process_bpmn_identifier,
task_definition,
)
if bpmn_process_definition_parent is not None: if bpmn_process_definition_parent is not None:
bpmn_process_definition_relationship = ( bpmn_process_definition_relationship = (
@ -1113,16 +1141,19 @@ class ProcessInstanceProcessor:
def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None: def _add_bpmn_process_definitions(self, bpmn_spec_dict: dict) -> None:
# store only if mappings is currently empty. this also would mean this is a new instance that has never saved before # store only if mappings is currently empty. this also would mean this is a new instance that has never saved before
# print("WE STORE BPM PROCESS DEF") store_bpmn_definition_mappings = (
store_bpmn_definition_mappings = not self.bpmn_definition_to_task_definitions_mappings not self.bpmn_definition_to_task_definitions_mappings
)
bpmn_process_definition_parent = self._store_bpmn_process_definition( bpmn_process_definition_parent = self._store_bpmn_process_definition(
bpmn_spec_dict["spec"], store_bpmn_definition_mappings=store_bpmn_definition_mappings bpmn_spec_dict["spec"],
store_bpmn_definition_mappings=store_bpmn_definition_mappings,
) )
for process_bpmn_properties in bpmn_spec_dict["subprocess_specs"].values(): for process_bpmn_properties in bpmn_spec_dict["subprocess_specs"].values():
self._store_bpmn_process_definition( self._store_bpmn_process_definition(
process_bpmn_properties, bpmn_process_definition_parent, store_bpmn_definition_mappings=store_bpmn_definition_mappings process_bpmn_properties,
bpmn_process_definition_parent,
store_bpmn_definition_mappings=store_bpmn_definition_mappings,
) )
# import pdb; pdb.set_trace()
self.process_instance_model.bpmn_process_definition = ( self.process_instance_model.bpmn_process_definition = (
bpmn_process_definition_parent bpmn_process_definition_parent
) )
@ -1132,7 +1163,6 @@ class ProcessInstanceProcessor:
Expects the save method to commit it. Expects the save method to commit it.
""" """
# print("WE SAVE THINGS")
bpmn_dict = self.serialize() bpmn_dict = self.serialize()
bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version") bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version")
process_instance_data_dict = {} process_instance_data_dict = {}
@ -1143,12 +1173,9 @@ class ProcessInstanceProcessor:
else: else:
process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key] process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key]
# FIXME: always save new hash until we get updated Spiff without loopresettask # we may have to already process bpmn_defintions if we ever care about the Root task again
# if self.process_instance_model.bpmn_process_definition_id is None: if self.process_instance_model.bpmn_process_definition_id is None:
self._add_bpmn_process_definitions(bpmn_spec_dict) self._add_bpmn_process_definitions(bpmn_spec_dict)
# import pdb; pdb.set_trace()
# print("WE NOW STORE BPMN PROCESS STUFFS")
# print(f"bpmn_definition_to_task_definitions_mappings: {self.bpmn_definition_to_task_definitions_mappings}")
subprocesses = process_instance_data_dict.pop("subprocesses") subprocesses = process_instance_data_dict.pop("subprocesses")
bpmn_process_parent, new_task_models, new_json_data_dicts = ( bpmn_process_parent, new_task_models, new_json_data_dicts = (
@ -1156,7 +1183,7 @@ class ProcessInstanceProcessor:
bpmn_process_dict=process_instance_data_dict, bpmn_process_dict=process_instance_data_dict,
process_instance=self.process_instance_model, process_instance=self.process_instance_model,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
spiff_workflow=self.bpmn_process_instance spiff_workflow=self.bpmn_process_instance,
) )
) )
for subprocess_task_id, subprocess_properties in subprocesses.items(): for subprocess_task_id, subprocess_properties in subprocesses.items():
@ -1170,7 +1197,7 @@ class ProcessInstanceProcessor:
bpmn_process_parent=bpmn_process_parent, bpmn_process_parent=bpmn_process_parent,
bpmn_process_guid=subprocess_task_id, bpmn_process_guid=subprocess_task_id,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
spiff_workflow=self.bpmn_process_instance spiff_workflow=self.bpmn_process_instance,
) )
new_task_models.update(subprocess_new_task_models) new_task_models.update(subprocess_new_task_models)
new_json_data_dicts.update(subprocess_new_json_data_models) new_json_data_dicts.update(subprocess_new_json_data_models)
@ -1180,7 +1207,6 @@ class ProcessInstanceProcessor:
def save(self) -> None: def save(self) -> None:
"""Saves the current state of this processor to the database.""" """Saves the current state of this processor to the database."""
# print("WE IN SAVE")
self._add_bpmn_json_records() self._add_bpmn_json_records()
self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION self.process_instance_model.spiff_serializer_version = self.SERIALIZER_VERSION
@ -1308,7 +1334,7 @@ class ProcessInstanceProcessor:
try: try:
self.bpmn_process_instance.catch(event_definition) self.bpmn_process_instance.catch(event_definition)
except Exception as e: except Exception as e:
print(e) print(e)
# TODO: do_engine_steps without a lock # TODO: do_engine_steps without a lock
self.do_engine_steps(save=True) self.do_engine_steps(save=True)

View File

@ -3,6 +3,7 @@ from hashlib import sha256
from typing import Optional from typing import Optional
from typing import Tuple from typing import Tuple
from typing import TypedDict from typing import TypedDict
from uuid import UUID
from flask import current_app from flask import current_app
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
@ -11,7 +12,6 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskStateNames from SpiffWorkflow.task import TaskStateNames
from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert
from uuid import UUID
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
@ -88,13 +88,21 @@ class TaskService:
new_json_data_dicts: dict[str, JsonDataDict] = {} new_json_data_dicts: dict[str, JsonDataDict] = {}
if task_model is None: if task_model is None:
bpmn_process, new_task_models, new_json_data_dicts = cls.task_bpmn_process( bpmn_process, new_task_models, new_json_data_dicts = cls.task_bpmn_process(
spiff_task, process_instance, serializer, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings spiff_task,
process_instance,
serializer,
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
) )
task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first() task_model = TaskModel.query.filter_by(guid=spiff_task_guid).first()
if task_model is None: if task_model is None:
task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] task_definition = bpmn_definition_to_task_definitions_mappings[
spiff_task.workflow.spec.name
][spiff_task.task_spec.name]
task_model = TaskModel( task_model = TaskModel(
guid=spiff_task_guid, bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id guid=spiff_task_guid,
bpmn_process_id=bpmn_process.id,
process_instance_id=process_instance.id,
task_definition_id=task_definition.id,
) )
return (bpmn_process, task_model, new_task_models, new_json_data_dicts) return (bpmn_process, task_model, new_task_models, new_json_data_dicts)
@ -135,9 +143,7 @@ class TaskService:
spiff_workflow = spiff_task.workflow._get_outermost_workflow() spiff_workflow = spiff_task.workflow._get_outermost_workflow()
bpmn_process, new_task_models, new_json_data_dicts = ( bpmn_process, new_task_models, new_json_data_dicts = (
cls.add_bpmn_process( cls.add_bpmn_process(
bpmn_process_dict=serializer.workflow_to_dict( bpmn_process_dict=serializer.workflow_to_dict(spiff_workflow),
spiff_workflow
),
process_instance=process_instance, process_instance=process_instance,
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
spiff_workflow=spiff_workflow, spiff_workflow=spiff_workflow,
@ -157,7 +163,6 @@ class TaskService:
bpmn_process_guid=subprocess_guid, bpmn_process_guid=subprocess_guid,
bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings,
spiff_workflow=spiff_workflow, spiff_workflow=spiff_workflow,
) )
) )
return (bpmn_process, new_task_models, new_json_data_dicts) return (bpmn_process, new_task_models, new_json_data_dicts)
@ -195,9 +200,12 @@ class TaskService:
if bpmn_process is None: if bpmn_process is None:
bpmn_process_is_new = True bpmn_process_is_new = True
bpmn_process = BpmnProcessModel(guid=bpmn_process_guid) bpmn_process = BpmnProcessModel(guid=bpmn_process_guid)
# Point the root id to the Start task instead of the Root task
# since we are ignoring the Root task.
for task_id, task_properties in tasks.items(): for task_id, task_properties in tasks.items():
if task_properties['task_spec'] == 'Start': if task_properties["task_spec"] == "Start":
bpmn_process_dict['root'] = task_id bpmn_process_dict["root"] = task_id
bpmn_process.properties_json = bpmn_process_dict bpmn_process.properties_json = bpmn_process_dict
@ -223,21 +231,26 @@ class TaskService:
if bpmn_process_is_new: if bpmn_process_is_new:
for task_id, task_properties in tasks.items(): for task_id, task_properties in tasks.items():
if task_properties['task_spec'] == 'Root': # The Root task is added to the spec by Spiff when the bpmn process is instantiated
# within Spiff. We do not actually need it and it's missing from our initial
# bpmn process defintion so let's avoid using it.
if task_properties["task_spec"] == "Root":
continue continue
if task_properties['task_spec'] == 'Start': if task_properties["task_spec"] == "Start":
task_properties['parent'] = None task_properties["parent"] = None
process_dict = bpmn_process.properties_json
process_dict['root'] = task_id
bpmn_process.properties_json = process_dict
db.session.add(bpmn_process)
task_data_dict = task_properties.pop("data") task_data_dict = task_properties.pop("data")
state_int = task_properties["state"] state_int = task_properties["state"]
task_model = TaskModel.query.filter_by(guid=task_id).first() task_model = TaskModel.query.filter_by(guid=task_id).first()
if task_model is None: if task_model is None:
spiff_task = spiff_workflow.get_task(UUID(task_id)) spiff_task = spiff_workflow.get_task(UUID(task_id))
task_model = cls._create_task(bpmn_process, process_instance, spiff_task, bpmn_definition_to_task_definitions_mappings) task_model = cls._create_task(
bpmn_process,
process_instance,
spiff_task,
bpmn_definition_to_task_definitions_mappings,
)
task_model.state = TaskStateNames[state_int] task_model.state = TaskStateNames[state_int]
task_model.properties_json = task_properties task_model.properties_json = task_properties
@ -263,10 +276,20 @@ class TaskService:
return json_data_dict return json_data_dict
@classmethod @classmethod
def _create_task(cls, bpmn_process: BpmnProcessModel, process_instance: ProcessInstanceModel, spiff_task: SpiffTask, bpmn_definition_to_task_definitions_mappings: dict) -> TaskModel: def _create_task(
cls,
task_definition = bpmn_definition_to_task_definitions_mappings[spiff_task.workflow.spec.name][spiff_task.task_spec.name] bpmn_process: BpmnProcessModel,
process_instance: ProcessInstanceModel,
spiff_task: SpiffTask,
bpmn_definition_to_task_definitions_mappings: dict,
) -> TaskModel:
task_definition = bpmn_definition_to_task_definitions_mappings[
spiff_task.workflow.spec.name
][spiff_task.task_spec.name]
task_model = TaskModel( task_model = TaskModel(
guid=str(spiff_task.id), bpmn_process_id=bpmn_process.id, process_instance_id=process_instance.id, task_definition_id=task_definition.id guid=str(spiff_task.id),
bpmn_process_id=bpmn_process.id,
process_instance_id=process_instance.id,
task_definition_id=task_definition.id,
) )
return task_model return task_model

View File

@ -62,7 +62,9 @@ class TaskModelSavingDelegate(EngineStepDelegate):
) -> None: ) -> None:
self.secondary_engine_step_delegate = secondary_engine_step_delegate self.secondary_engine_step_delegate = secondary_engine_step_delegate
self.process_instance = process_instance self.process_instance = process_instance
self.bpmn_definition_to_task_definitions_mappings = bpmn_definition_to_task_definitions_mappings self.bpmn_definition_to_task_definitions_mappings = (
bpmn_definition_to_task_definitions_mappings
)
self.current_task_model: Optional[TaskModel] = None self.current_task_model: Optional[TaskModel] = None
self.task_models: dict[str, TaskModel] = {} self.task_models: dict[str, TaskModel] = {}
@ -80,7 +82,10 @@ class TaskModelSavingDelegate(EngineStepDelegate):
if self.should_update_task_model(): if self.should_update_task_model():
_bpmn_process, task_model, new_task_models, new_json_data_dicts = ( _bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task( TaskService.find_or_create_task_model_from_spiff_task(
spiff_task, self.process_instance, self.serializer, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings spiff_task,
self.process_instance,
self.serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
) )
) )
self.current_task_model = task_model self.current_task_model = task_model
@ -123,7 +128,10 @@ class TaskModelSavingDelegate(EngineStepDelegate):
): ):
_bpmn_process, task_model, new_task_models, new_json_data_dicts = ( _bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task( TaskService.find_or_create_task_model_from_spiff_task(
waiting_spiff_task, self.process_instance, self.serializer, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings waiting_spiff_task,
self.process_instance,
self.serializer,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
) )
) )
self.task_models.update(new_task_models) self.task_models.update(new_task_models)

View File

@ -33,7 +33,6 @@ class TestErrorHandlingService(BaseTest):
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model.id, user process_model.id, user
) )
print(f"process_instance.id: {process_instance.id}")
pip = ProcessInstanceProcessor(process_instance) pip = ProcessInstanceProcessor(process_instance)
with pytest.raises(ApiError) as e: with pytest.raises(ApiError) as e:
pip.do_engine_steps(save=True) pip.do_engine_steps(save=True)

View File

@ -378,24 +378,33 @@ class TestProcessInstanceProcessor(BaseTest):
assert len(all_spiff_tasks) > 1 assert len(all_spiff_tasks) > 1
for spiff_task in all_spiff_tasks: for spiff_task in all_spiff_tasks:
assert spiff_task.state == TaskState.COMPLETED assert spiff_task.state == TaskState.COMPLETED
if spiff_task.task_spec.name == 'test_process_to_call_script': if spiff_task.task_spec.name == "test_process_to_call_script":
task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task.task_definition_id is not None assert task.task_definition_id is not None
task_definition = task.task_definition task_definition = task.task_definition
assert task_definition.bpmn_identifier == 'test_process_to_call_script' assert task_definition.bpmn_identifier == "test_process_to_call_script"
assert task_definition.bpmn_process_definition.bpmn_identifier == 'test_process_to_call' assert (
elif spiff_task.task_spec.name == 'top_level_subprocess_script': task_definition.bpmn_process_definition.bpmn_identifier
== "test_process_to_call"
)
elif spiff_task.task_spec.name == "top_level_subprocess_script":
task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task.task_definition_id is not None assert task.task_definition_id is not None
task_definition = task.task_definition task_definition = task.task_definition
assert task_definition.bpmn_identifier == 'top_level_subprocess_script' assert task_definition.bpmn_identifier == "top_level_subprocess_script"
assert task_definition.bpmn_process_definition.bpmn_identifier == 'top_level_subprocess' assert (
if spiff_task.task_spec.name == 'top_level_script': task_definition.bpmn_process_definition.bpmn_identifier
== "top_level_subprocess"
)
if spiff_task.task_spec.name == "top_level_script":
task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() task = TaskModel.query.filter_by(guid=str(spiff_task.id)).first()
assert task.task_definition_id is not None assert task.task_definition_id is not None
task_definition = task.task_definition task_definition = task.task_definition
assert task_definition.bpmn_identifier == 'top_level_script' assert task_definition.bpmn_identifier == "top_level_script"
assert task_definition.bpmn_process_definition.bpmn_identifier == 'top_level_process' assert (
task_definition.bpmn_process_definition.bpmn_identifier
== "top_level_process"
)
# FIXME: Checking task data cannot work with the feature/remove-loop-reset branch # FIXME: Checking task data cannot work with the feature/remove-loop-reset branch
# of SiffWorkflow. This is because it saves script data to the python_env and NOT # of SiffWorkflow. This is because it saves script data to the python_env and NOT
# to task.data. We may need to either create a new column on TaskModel to put the python_env # to task.data. We may need to either create a new column on TaskModel to put the python_env