removed spiff step details w/ burnettk

This commit is contained in:
jasquat 2023-03-23 16:33:30 -04:00
parent 94cd732ebd
commit 0ae74f8f35
17 changed files with 12 additions and 300 deletions

View File

@ -1,8 +1,8 @@
"""empty message
Revision ID: 4255f548bfb4
Revision ID: 0b5dd14bfbac
Revises:
Create Date: 2023-03-20 13:00:28.655387
Create Date: 2023-03-23 16:25:33.288500
"""
from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '4255f548bfb4'
revision = '0b5dd14bfbac'
down_revision = None
branch_labels = None
depends_on = None
@ -251,7 +251,6 @@ def upgrade():
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
sa.Column('spiff_step', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ),
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
@ -347,22 +346,6 @@ def upgrade():
op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False)
op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False)
op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False)
op.create_table('spiff_step_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('spiff_step', sa.Integer(), nullable=False),
sa.Column('task_json', sa.JSON(), nullable=False),
sa.Column('task_id', sa.String(length=50), nullable=False),
sa.Column('task_state', sa.String(length=50), nullable=False),
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
sa.Column('delta_json', sa.JSON(), nullable=True),
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step')
)
op.create_index(op.f('ix_spiff_step_details_process_instance_id'), 'spiff_step_details', ['process_instance_id'], unique=False)
op.create_table('task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('guid', sa.String(length=36), nullable=False),
@ -468,8 +451,6 @@ def downgrade():
op.drop_index(op.f('ix_task_json_data_hash'), table_name='task')
op.drop_index(op.f('ix_task_bpmn_process_id'), table_name='task')
op.drop_table('task')
op.drop_index(op.f('ix_spiff_step_details_process_instance_id'), table_name='spiff_step_details')
op.drop_table('spiff_step_details')
op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue')
op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue')
op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue')

View File

@ -1595,7 +1595,7 @@ paths:
type: string
get:
operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show
summary: Get task data for a single task in a spiff step.
summary: Get task data for a single task.
tags:
- Process Instances
responses:

View File

@ -41,9 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import (
) # noqa: F401
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
from spiffworkflow_backend.models.spiff_step_details import (
SpiffStepDetailsModel,
) # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401
from spiffworkflow_backend.models.group import GroupModel # noqa: F401
from spiffworkflow_backend.models.process_instance_metadata import (

View File

@ -87,6 +87,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"ProcessInstanceMetadataModel",
cascade="delete",
) # type: ignore
process_instance_queue = relationship(
"ProcessInstanceQueueModel",
cascade="delete",
) # type: ignore
start_in_seconds: int | None = db.Column(db.Integer, index=True)
end_in_seconds: int | None = db.Column(db.Integer, index=True)
@ -96,7 +100,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
bpmn_version_control_type: str = db.Column(db.String(50))
bpmn_version_control_identifier: str = db.Column(db.String(255))
spiff_step: int = db.Column(db.Integer)
bpmn_xml_file_contents: str | None = None
process_model_with_diagram_identifier: str | None = None
@ -117,7 +120,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"bpmn_xml_file_contents": self.bpmn_xml_file_contents,
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
"bpmn_version_control_type": self.bpmn_version_control_type,
"spiff_step": self.spiff_step,
"process_initiator_username": self.process_initiator.username,
}

View File

@ -1,37 +0,0 @@
"""Spiff_step_details."""
from dataclasses import dataclass
from typing import Union
from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint
from sqlalchemy.orm import deferred
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@dataclass
class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
"""SpiffStepDetailsModel."""
__tablename__ = "spiff_step_details"
__table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),)
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
)
spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
task_id: str = db.Column(db.String(50), nullable=False)
task_state: str = db.Column(db.String(50), nullable=False)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
delta_json: list = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False)
# to fix mypy in 3.9 - not sure why syntax like:
# float | None
# works in other dataclass db models
end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))

View File

@ -108,7 +108,6 @@ class Task:
event_definition: Union[dict[str, Any], None] = None,
call_activity_process_identifier: Optional[str] = None,
calling_subprocess_task_id: Optional[str] = None,
task_spiff_step: Optional[int] = None,
):
"""__init__."""
self.id = id
@ -123,7 +122,6 @@ class Task:
self.event_definition = event_definition
self.call_activity_process_identifier = call_activity_process_identifier
self.calling_subprocess_task_id = calling_subprocess_task_id
self.task_spiff_step = task_spiff_step
self.data = data
if self.data is None:
@ -181,7 +179,6 @@ class Task:
"event_definition": self.event_definition,
"call_activity_process_identifier": self.call_activity_process_identifier,
"calling_subprocess_task_id": self.calling_subprocess_task_id,
"task_spiff_step": self.task_spiff_step,
}
@classmethod

View File

@ -41,7 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import (
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.task import TaskModel
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.models.user import UserModel
@ -448,7 +447,6 @@ def process_instance_delete(
# (Pdb) db.session.delete
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete()
db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete()
db.session.delete(process_instance)
db.session.commit()

View File

@ -102,7 +102,6 @@ def script_unit_test_run(
"""Script_unit_test_run."""
# FIXME: We should probably clear this somewhere else but this works
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
python_script = _get_required_parameter_or_raise("python_script", body)
input_json = _get_required_parameter_or_raise("input_json", body)

View File

@ -33,7 +33,6 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.json_data import JsonDataModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
@ -218,7 +217,7 @@ def task_data_update(
task_model, new_task_data_dict, "json_data_hash"
)
if json_data_dict is not None:
TaskService.insert_or_update_json_data_records({json_data_dict['hash']: json_data_dict})
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
# json_data = JsonDataModel(**json_data_dict)
# db.session.add(json_data)
ProcessInstanceProcessor.add_event_to_process_instance(

View File

@ -9,7 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.scripts.script import Script
@ -43,14 +42,6 @@ class DeleteProcessInstancesWithCriteria(Script):
rows_affected = len(results)
if rows_affected > 0:
ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore
step_details = SpiffStepDetailsModel.query.filter(
SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore
).all()
for deletion in step_details:
db.session.delete(deletion)
for deletion in results:
db.session.delete(deletion)
db.session.commit()

View File

@ -6,7 +6,6 @@ import sys
from typing import Any
from typing import Optional
from flask import g
from flask.app import Flask
@ -88,28 +87,6 @@ class JsonFormatter(logging.Formatter):
return json.dumps(message_dict, default=str)
class SpiffFilter(logging.Filter):
"""SpiffFilter."""
def __init__(self, app: Flask):
"""__init__."""
self.app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
"""Filter."""
tld = self.app.config["THREAD_LOCAL_DATA"]
process_instance_id = ""
if hasattr(tld, "process_instance_id"):
process_instance_id = tld.process_instance_id
setattr(record, "process_instance_id", process_instance_id) # noqa: B010
if hasattr(tld, "spiff_step"):
setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010
if hasattr(g, "user") and g.user:
setattr(record, "current_user_id", g.user.id) # noqa: B010
return True
def setup_logger(app: Flask) -> None:
"""Setup_logger."""
upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper()

View File

@ -39,7 +39,6 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
@ -84,7 +83,6 @@ from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.task import TaskModel
from spiffworkflow_backend.models.task import TaskNotFoundError
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
@ -92,9 +90,6 @@ from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_lock_service import (
ProcessInstanceLockService,
)
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -105,9 +100,6 @@ from spiffworkflow_backend.services.user_service import UserService
from spiffworkflow_backend.services.workflow_execution_service import (
execution_strategy_named,
)
from spiffworkflow_backend.services.workflow_execution_service import (
StepDetailLoggingDelegate,
)
from spiffworkflow_backend.services.workflow_execution_service import (
TaskModelSavingDelegate,
)
@ -151,10 +143,6 @@ class MissingProcessInfoError(Exception):
"""MissingProcessInfoError."""
class SpiffStepDetailIsMissingError(Exception):
pass
class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore
def __init__(self, environment_globals: Dict[str, Any]):
"""BoxedTaskDataBasedScriptEngineEnvironment."""
@ -433,7 +421,6 @@ class ProcessInstanceProcessor:
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.process_instance_id = process_instance_model.id
tld.spiff_step = process_instance_model.spiff_step
# we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
@ -814,37 +801,6 @@ class ProcessInstanceProcessor:
"lane_assignment_id": lane_assignment_id,
}
def spiff_step_details_mapping(
self,
spiff_task: Optional[SpiffTask] = None,
start_in_seconds: Optional[float] = None,
end_in_seconds: Optional[float] = None,
) -> dict:
"""SaveSpiffStepDetails."""
if spiff_task is None:
# TODO: safer to pass in task vs use last task?
spiff_task = self.bpmn_process_instance.last_task
if spiff_task is None:
return {}
# it's only None when we're starting a human task (it's not complete yet)
if start_in_seconds is None:
start_in_seconds = time.time()
task_json = self.get_task_dict_from_spiff_task(spiff_task)
return {
"process_instance_id": self.process_instance_model.id,
"spiff_step": self.process_instance_model.spiff_step or 1,
"task_json": task_json,
"task_id": str(spiff_task.id),
"task_state": spiff_task.get_state_name(),
"bpmn_task_identifier": spiff_task.task_spec.name,
"start_in_seconds": start_in_seconds,
"end_in_seconds": end_in_seconds,
}
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
"""Extract_metadata."""
metadata_extraction_paths = process_model_info.metadata_extraction_paths
@ -1182,14 +1138,7 @@ class ProcessInstanceProcessor:
human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task)
db.session.add(human_task_user)
self.increment_spiff_step()
spiff_step_detail_mapping = self.spiff_step_details_mapping(
spiff_task=ready_or_waiting_task, start_in_seconds=time.time()
)
spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping)
db.session.add(spiff_step_detail)
db.session.commit()
# self.log_spiff_step_details(spiff_step_detail_mapping)
if len(human_tasks) > 0:
for at in human_tasks:
@ -1220,15 +1169,6 @@ class ProcessInstanceProcessor:
# TODO: do_engine_steps without a lock
self.do_engine_steps(save=True)
def add_step(self, step: Union[dict, None] = None) -> None:
"""Add a spiff step."""
if step is None:
step = self.spiff_step_details_mapping()
spiff_step_detail = SpiffStepDetailsModel(**step)
db.session.add(spiff_step_detail)
db.session.commit()
# self.log_spiff_step_details(step)
def manual_complete_task(self, task_id: str, execute: bool) -> None:
"""Mark the task complete optionally executing it."""
spiff_tasks_updated = {}
@ -1279,9 +1219,6 @@ class ProcessInstanceProcessor:
task.complete()
spiff_tasks_updated[task.id] = task
self.increment_spiff_step()
self.add_step()
for updated_spiff_task in spiff_tasks_updated.values():
bpmn_process, task_model, new_task_models, new_json_data_dicts = (
TaskService.find_or_create_task_model_from_spiff_task(
@ -1666,31 +1603,15 @@ class ProcessInstanceProcessor:
db.session.add(message_instance)
db.session.commit()
def increment_spiff_step(self) -> None:
"""Spiff_step++."""
spiff_step = self.process_instance_model.spiff_step or 0
spiff_step += 1
self.process_instance_model.spiff_step = spiff_step
current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step
db.session.add(self.process_instance_model)
def do_engine_steps(
self,
exit_at: None = None,
save: bool = False,
execution_strategy_name: Optional[str] = None,
) -> None:
# NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and
# set the TaskModelSavingDelegate's secondary_engine_step_delegate to None.
def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict:
self._script_engine.environment.revise_state_with_task_data(task)
return self.spiff_step_details_mapping(task, start, end)
self._add_bpmn_process_definitions()
step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder)
task_model_delegate = TaskModelSavingDelegate(
secondary_engine_step_delegate=step_delegate,
serializer=self._serializer,
process_instance=self.process_instance_model,
bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings,
@ -1718,31 +1639,6 @@ class ProcessInstanceProcessor:
):
self._script_engine.failing_spiff_task = None
# log the spiff step details so we know what is processing the process
# instance when a human task has a timer event.
def log_spiff_step_details(self, step_details: Any) -> None:
if ProcessInstanceLockService.has_lock(self.process_instance_model.id):
locked_by = ProcessInstanceLockService.locked_by()
message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}"
current_app.logger.debug(message)
def cancel_notify(self) -> None:
"""Cancel_notify."""
self.__cancel_notify(self.bpmn_process_instance)
@staticmethod
def __cancel_notify(bpmn_process_instance: BpmnWorkflow) -> None:
"""__cancel_notify."""
try:
# A little hackly, but make the bpmn_process_instance catch a cancel event.
bpmn_process_instance.signal("cancel") # generate a cancel signal.
bpmn_process_instance.catch(CancelEventDefinition())
# Due to this being static, can't save granular step details in this case
# TODO: do_engine_steps without a lock
bpmn_process_instance.do_engine_steps()
except WorkflowTaskException as we:
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
@classmethod
def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]:
return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0]
@ -1891,28 +1787,6 @@ class ProcessInstanceProcessor:
human_task.task_status = spiff_task.get_state_name()
db.session.add(human_task)
# FIXME: remove when we switch over to using tasks only
details_model = (
SpiffStepDetailsModel.query.filter_by(
process_instance_id=self.process_instance_model.id,
task_id=str(spiff_task.id),
task_state="READY",
)
.order_by(SpiffStepDetailsModel.id.desc()) # type: ignore
.first()
)
if details_model is None:
raise SpiffStepDetailIsMissingError(
"Cannot find a ready spiff_step_detail entry for process instance"
f" {self.process_instance_model.id} and task_id is {spiff_task.id}"
)
details_model.task_state = spiff_task.get_state_name()
details_model.end_in_seconds = time.time()
details_model.task_json = self.get_task_dict_from_spiff_task(spiff_task)
db.session.add(details_model)
# #######
json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer)
for json_data_dict in json_data_dict_list:
if json_data_dict is not None:

View File

@ -404,7 +404,6 @@ class ProcessInstanceService:
spiff_task: SpiffTask,
add_docs_and_forms: bool = False,
calling_subprocess_task_id: Optional[str] = None,
task_spiff_step: Optional[int] = None,
) -> Task:
"""Spiff_task_to_api_task."""
task_type = spiff_task.task_spec.spec_type
@ -443,7 +442,6 @@ class ProcessInstanceService:
event_definition=serialized_task_spec.get("event_definition"),
call_activity_process_identifier=call_activity_process_identifier,
calling_subprocess_task_id=calling_subprocess_task_id,
task_spiff_step=task_spiff_step,
)
return task

View File

@ -1,7 +1,6 @@
import logging
import time
from typing import Callable
from typing import List
from typing import Optional
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
@ -19,7 +18,6 @@ from spiffworkflow_backend.models.message_instance_correlation import (
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.services.assertion_service import safe_assertion
from spiffworkflow_backend.services.process_instance_lock_service import (
@ -45,10 +43,6 @@ class EngineStepDelegate:
pass
SpiffStepIncrementer = Callable[[], None]
SpiffStepDetailsMappingBuilder = Callable[[SpiffTask, float, float], dict]
class TaskModelSavingDelegate(EngineStepDelegate):
"""Engine step delegate that takes care of saving a task model to the database.
@ -167,58 +161,6 @@ class TaskModelSavingDelegate(EngineStepDelegate):
return task_model
class StepDetailLoggingDelegate(EngineStepDelegate):
"""Engine step delegate that takes care of logging spiff step details.
This separates the concerns of step execution and step logging.
"""
def __init__(
self,
increment_spiff_step: SpiffStepIncrementer,
spiff_step_details_mapping: SpiffStepDetailsMappingBuilder,
):
"""__init__."""
self.increment_spiff_step = increment_spiff_step
self.spiff_step_details_mapping = spiff_step_details_mapping
self.step_details: List[dict] = []
self.current_task_start_in_seconds = 0.0
self.tasks_to_log = {
"BPMN Task",
"Script Task",
"Service Task",
"Default Start Event",
"Exclusive Gateway",
"Call Activity",
# "End Join",
"End Event",
"Default Throwing Event",
"Subprocess",
"Transactional Subprocess",
}
def should_log(self, spiff_task: SpiffTask) -> bool:
return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith(
".EndJoin"
)
def will_complete_task(self, spiff_task: SpiffTask) -> None:
if self.should_log(spiff_task):
self.current_task_start_in_seconds = time.time()
self.increment_spiff_step()
def did_complete_task(self, spiff_task: SpiffTask) -> None:
if self.should_log(spiff_task):
self.step_details.append(
self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time())
)
def save(self, _bpmn_process_instance: BpmnWorkflow, commit: bool = True) -> None:
db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details)
if commit:
db.session.commit()
class ExecutionStrategy:
"""Interface of sorts for a concrete execution strategy."""

View File

@ -1295,7 +1295,6 @@ export default function ProcessInstanceListTable({
end_in_seconds: 'End Time',
status: 'Status',
process_initiator_username: 'Started By',
spiff_step: 'SpiffWorkflow Step',
};
const getHeaderLabel = (header: string) => {
return headerLabels[header] ?? header;

View File

@ -53,9 +53,6 @@ export interface Task {
task_definition_properties_json: TaskDefinitionPropertiesJson;
event_definition?: EventDefinition;
// TOOD: DELETE THIS!
task_spiff_step?: number;
}
export interface TaskIds {
@ -88,7 +85,6 @@ export interface ProcessInstanceTask {
type: string;
updated_at_in_seconds: number;
task_spiff_step?: number;
potential_owner_usernames?: string;
assigned_user_group_identifier?: string;
}
@ -132,7 +128,6 @@ export interface ProcessInstance {
end_in_seconds: number | null;
process_initiator_username: string;
bpmn_xml_file_contents?: string;
spiff_step?: number;
created_at_in_seconds: number;
updated_at_in_seconds: number;
bpmn_version_control_identifier: string;

View File

@ -286,14 +286,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
);
};
const returnToLastSpiffStep = () => {
const returnToProcessInstance = () => {
window.location.href = processInstanceShowPageBaseUrl;
};
const resetProcessInstance = () => {
HttpService.makeCallToBackend({
path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`,
successCallback: returnToLastSpiffStep,
successCallback: returnToProcessInstance,
httpMethod: 'POST',
});
};
@ -763,7 +763,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
HttpService.makeCallToBackend({
path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`,
httpMethod: 'POST',
successCallback: returnToLastSpiffStep,
successCallback: returnToProcessInstance,
postBody: { execute },
});
}