From 4c891903c4ce49cbcbbc9e787868101b57930e1d Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 09:49:07 -0400 Subject: [PATCH 01/20] some basic changes to test using tasks instead of spiff step details --- .../src/spiffworkflow_backend/models/task.py | 1 + .../routes/process_instances_controller.py | 223 +++++++++++------- 2 files changed, 136 insertions(+), 88 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index a7812ba03..70a60ae1b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -78,6 +78,7 @@ class Task: HUMAN_TASK_TYPES = ["User Task", "Manual Task"] + def __init__( self, id: str, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 93e78389d..70987c7f1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -563,11 +563,11 @@ def process_instance_task_list_without_task_data_for_me( """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - most_recent_tasks_only, + _modified_process_model_identifier=modified_process_model_identifier, + process_instance=process_instance, + all_tasks=all_tasks, + spiff_step=spiff_step, + most_recent_tasks_only=most_recent_tasks_only, ) @@ -581,11 +581,11 @@ def process_instance_task_list_without_task_data( """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) return process_instance_task_list( - modified_process_model_identifier, - process_instance, - all_tasks, - spiff_step, - most_recent_tasks_only, + _modified_process_model_identifier=modified_process_model_identifier, + process_instance=process_instance, + all_tasks=all_tasks, + spiff_step=spiff_step, + most_recent_tasks_only=most_recent_tasks_only, ) @@ -594,93 +594,140 @@ def process_instance_task_list( process_instance: ProcessInstanceModel, all_tasks: bool = False, spiff_step: int = 0, + to_task_guid: Optional[str] = None, most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - step_detail_query = db.session.query(SpiffStepDetailsModel).filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, + # step_detail_query = db.session.query(SpiffStepDetailsModel).filter( + # SpiffStepDetailsModel.process_instance_id == process_instance.id, + # ) + # + # if spiff_step > 0: + # step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) + # + # step_details = step_detail_query.all() + # + # processor = ProcessInstanceProcessor(process_instance) + # full_bpmn_process_dict = processor.full_bpmn_process_dict + # tasks = full_bpmn_process_dict["tasks"] + # subprocesses = full_bpmn_process_dict["subprocesses"] + # + # steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} + # + # def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: + # if spiff_task["last_state_change"] > step_ended: + # spiff_task["state"] = Task.task_state_name_to_int("FUTURE") + # spiff_task["data"] = {} + # + # if spiff_step > 0: + # last_change = step_details[-1].end_in_seconds or 0 + # for spiff_task in tasks.values(): + # restore_task(spiff_task, last_change) + # for subprocess in subprocesses.values(): + # for spiff_task in subprocess["tasks"].values(): + # restore_task(spiff_task, last_change) + # + # bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) + # if spiff_step > 0: + # bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) + # for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): + # if not subprocess.is_completed(): + # task = bpmn_process_instance.get_task(subprocess_id) + # task._set_state(TaskState.WAITING) + + task_model_query = db.session.query(TaskModel).filter( + TaskModel.process_instance_id == process_instance.id, ) - if spiff_step > 0: - step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) + if to_task_guid is not None: + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise ApiError( + error_code="task_not_found", + message=( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ), + status_code=400, + ) + task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) - step_details = step_detail_query.all() + task_models = task_model_query.all() - processor = ProcessInstanceProcessor(process_instance) - full_bpmn_process_dict = processor.full_bpmn_process_dict - tasks = full_bpmn_process_dict["tasks"] - subprocesses = full_bpmn_process_dict["subprocesses"] + # processor = ProcessInstanceProcessor(process_instance) + # full_bpmn_process_dict = processor.full_bpmn_process_dict + # tasks = full_bpmn_process_dict["tasks"] + # subprocesses = full_bpmn_process_dict["subprocesses"] + # + # steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} + # + # def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: + # if spiff_task["last_state_change"] > step_ended: + # spiff_task["state"] = Task.task_state_name_to_int("FUTURE") + # spiff_task["data"] = {} + # + # if spiff_step > 0: + # last_change = step_details[-1].end_in_seconds or 0 + # for spiff_task in tasks.values(): + # restore_task(spiff_task, last_change) + # for subprocess in subprocesses.values(): + # for spiff_task in subprocess["tasks"].values(): + # restore_task(spiff_task, last_change) + # + # bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) + # if spiff_step > 0: + # bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) + # for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): + # if not subprocess.is_completed(): + # task = bpmn_process_instance.get_task(subprocess_id) + # task._set_state(TaskState.WAITING) - steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} + # spiff_tasks = None + # if all_tasks: + # spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + # else: + # spiff_tasks = processor.get_all_user_tasks() + # + # ( + # subprocesses_by_child_task_ids, + # task_typename_by_task_id, + # ) = processor.get_subprocesses_by_child_task_ids() + # processor.get_highest_level_calling_subprocesses_by_child_task_ids( + # subprocesses_by_child_task_ids, task_typename_by_task_id + # ) + # + # spiff_tasks_to_process = spiff_tasks + # if most_recent_tasks_only: + # spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} + # current_tasks = {} + # for spiff_task in spiff_tasks_to_process: + # row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" + # if spiff_task.state in [TaskState.READY, TaskState.WAITING]: + # current_tasks[row_id] = spiff_task + # if ( + # row_id not in spiff_tasks_by_process_id_and_task_name + # or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state + # ): + # spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task + # spiff_tasks_by_process_id_and_task_name.update(current_tasks) + # spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values() + # + # response = [] + # for spiff_task in spiff_tasks_to_process: + # task_spiff_step: Optional[int] = None + # if str(spiff_task.id) in steps_by_id: + # task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step + # calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None) + # task = ProcessInstanceService.spiff_task_to_api_task( + # processor, + # spiff_task, + # calling_subprocess_task_id=calling_subprocess_task_id, + # task_spiff_step=task_spiff_step, + # ) + # if task.state in ["MAYBE", "LIKELY"]: + # task.state = "FUTURE" + # response.append(task) - def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: - if spiff_task["last_state_change"] > step_ended: - spiff_task["state"] = Task.task_state_name_to_int("FUTURE") - spiff_task["data"] = {} - - if spiff_step > 0: - last_change = step_details[-1].end_in_seconds or 0 - for spiff_task in tasks.values(): - restore_task(spiff_task, last_change) - for subprocess in subprocesses.values(): - for spiff_task in subprocess["tasks"].values(): - restore_task(spiff_task, last_change) - - bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) - if spiff_step > 0: - bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) - for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): - if not subprocess.is_completed(): - task = bpmn_process_instance.get_task(subprocess_id) - task._set_state(TaskState.WAITING) - - spiff_tasks = None - if all_tasks: - spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - else: - spiff_tasks = processor.get_all_user_tasks() - - ( - subprocesses_by_child_task_ids, - task_typename_by_task_id, - ) = processor.get_subprocesses_by_child_task_ids() - processor.get_highest_level_calling_subprocesses_by_child_task_ids( - subprocesses_by_child_task_ids, task_typename_by_task_id - ) - - spiff_tasks_to_process = spiff_tasks - if most_recent_tasks_only: - spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} - current_tasks = {} - for spiff_task in spiff_tasks_to_process: - row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" - if spiff_task.state in [TaskState.READY, TaskState.WAITING]: - current_tasks[row_id] = spiff_task - if ( - row_id not in spiff_tasks_by_process_id_and_task_name - or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state - ): - spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task - spiff_tasks_by_process_id_and_task_name.update(current_tasks) - spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values() - - response = [] - for spiff_task in spiff_tasks_to_process: - task_spiff_step: Optional[int] = None - if str(spiff_task.id) in steps_by_id: - task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step - calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None) - task = ProcessInstanceService.spiff_task_to_api_task( - processor, - spiff_task, - calling_subprocess_task_id=calling_subprocess_task_id, - task_spiff_step=task_spiff_step, - ) - if task.state in ["MAYBE", "LIKELY"]: - task.state = "FUTURE" - response.append(task) - - return make_response(jsonify(response), 200) + return make_response(jsonify(task_models), 200) def process_instance_reset( From 9765f74bc0817828c818de850708363e13e2ce2e Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 12:04:22 -0400 Subject: [PATCH 02/20] some more updates w/ burnettk --- .../routes/process_instances_controller.py | 30 +++++++++++++++++++ spiffworkflow-frontend/src/interfaces.ts | 15 +++++++++- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 70987c7f1..59399f2f2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,5 +1,6 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import json from typing import Any from typing import Dict @@ -635,6 +636,18 @@ def process_instance_task_list( # task = bpmn_process_instance.get_task(subprocess_id) # task._set_state(TaskState.WAITING) + # guid: string; + # bpmn_identifier: string; + # + # bpmn_name?: string; + # + # state: string; + # typename: string; + + # calling_subprocess_task_guid: string; + # call_activity_process_bpmn_identifier?: string; + + task_model_query = db.session.query(TaskModel).filter( TaskModel.process_instance_id == process_instance.id, ) @@ -651,6 +664,23 @@ def process_instance_task_list( ) task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) + task_model_query = ( + task_model_query.order_by( + ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore + ) + .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) + .join(BpmnProcessModel, BpmnProcessModel.id == TaskModel.bpmn_process_id) + .join( + BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id + ) + .add_columns( + BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore + BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore + TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore + TaskDefinitionModel.bpmn_name.label("task_definition_name"), # type: ignore + TaskDefinitionModel.typename.label("bpmn_task_type"), # type: ignore + ) + ) task_models = task_model_query.all() # processor = ProcessInstanceProcessor(process_instance) diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index a8d736903..aaf11ade6 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -21,8 +21,21 @@ export interface RecentProcessModel { processModelDisplayName: string; } +export interface Task { + guid: string; + bpmn_identifier: string; + + bpmn_name?: string; + + calling_subprocess_task_guid: string; + data: any; + state: string; + typename: string; + call_activity_process_bpmn_identifier?: string; +} + export interface ProcessInstanceTask { - id: number; + id: string; task_id: string; calling_subprocess_task_id: string; From bc58de809ea0d31d0d4d2cc129b81e6c460d22e0 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 16:51:29 -0400 Subject: [PATCH 03/20] use task table for process instance show page. spiff steps are not working yet and neither is data w/ burnettk --- spiffworkflow-backend/conftest.py | 3 +- spiffworkflow-backend/migrations/env.py | 2 + .../{b652c232839f_.py => 4255f548bfb4_.py} | 18 +++-- .../src/spiffworkflow_backend/api.yml | 12 +++ .../models/bpmn_process.py | 7 +- .../routes/process_instances_controller.py | 42 ++++++++-- .../services/process_instance_processor.py | 2 +- .../services/task_service.py | 51 +++++++++--- .../manual_task_with_subprocesses.bpmn | 2 +- .../test_process_to_call.bpmn | 79 +++++++++++++------ .../unit/test_process_instance_processor.py | 21 ++++- .../src/components/ReactDiagramEditor.tsx | 14 ++-- spiffworkflow-frontend/src/interfaces.ts | 13 ++- .../src/routes/ProcessInstanceShow.tsx | 77 +++++++++--------- 14 files changed, 240 insertions(+), 103 deletions(-) rename spiffworkflow-backend/migrations/versions/{b652c232839f_.py => 4255f548bfb4_.py} (97%) diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index 304008d0b..9d05dfe5a 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -47,7 +47,8 @@ def app() -> Flask: def with_db_and_bpmn_file_cleanup() -> None: """Do it cleanly!""" meta = db.metadata - db.session.execute(db.update(BpmnProcessModel, values={"parent_process_id": None})) + db.session.execute(db.update(BpmnProcessModel, values={"top_level_process_id": None})) + db.session.execute(db.update(BpmnProcessModel, values={"direct_parent_process_id": None})) for table in reversed(meta.sorted_tables): db.session.execute(table.delete()) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381ad..68feded2a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/b652c232839f_.py b/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py similarity index 97% rename from spiffworkflow-backend/migrations/versions/b652c232839f_.py rename to spiffworkflow-backend/migrations/versions/4255f548bfb4_.py index dbf5b2763..a66c074bb 100644 --- a/spiffworkflow-backend/migrations/versions/b652c232839f_.py +++ b/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: b652c232839f +Revision ID: 4255f548bfb4 Revises: -Create Date: 2023-03-17 16:50:32.774216 +Create Date: 2023-03-20 13:00:28.655387 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = 'b652c232839f' +revision = '4255f548bfb4' down_revision = None branch_labels = None depends_on = None @@ -115,19 +115,22 @@ def upgrade(): sa.Column('id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=True), sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), - sa.Column('parent_process_id', sa.Integer(), nullable=True), + sa.Column('top_level_process_id', sa.Integer(), nullable=True), + sa.Column('direct_parent_process_id', sa.Integer(), nullable=True), sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('json_data_hash', sa.String(length=255), nullable=False), sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), - sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['direct_parent_process_id'], ['bpmn_process.id'], ), + sa.ForeignKeyConstraint(['top_level_process_id'], ['bpmn_process.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('guid') ) op.create_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), 'bpmn_process', ['bpmn_process_definition_id'], unique=False) + op.create_index(op.f('ix_bpmn_process_direct_parent_process_id'), 'bpmn_process', ['direct_parent_process_id'], unique=False) op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False) - op.create_index(op.f('ix_bpmn_process_parent_process_id'), 'bpmn_process', ['parent_process_id'], unique=False) + op.create_index(op.f('ix_bpmn_process_top_level_process_id'), 'bpmn_process', ['top_level_process_id'], unique=False) op.create_table('bpmn_process_definition_relationship', sa.Column('id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False), @@ -519,8 +522,9 @@ def downgrade(): op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_child_id'), table_name='bpmn_process_definition_relationship') op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_parent_id'), table_name='bpmn_process_definition_relationship') op.drop_table('bpmn_process_definition_relationship') - op.drop_index(op.f('ix_bpmn_process_parent_process_id'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_top_level_process_id'), table_name='bpmn_process') op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process') + op.drop_index(op.f('ix_bpmn_process_direct_parent_process_id'), table_name='bpmn_process') op.drop_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), table_name='bpmn_process') op.drop_table('bpmn_process') op.drop_index(op.f('ix_user_service_id'), table_name='user') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index b71bed93d..7cffde1c3 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -919,6 +919,12 @@ paths: description: If true, this wil return only the most recent tasks. schema: type: boolean + - name: bpmn_process_guid + in: query + required: false + description: The guid of the bpmn process to get the tasks for. + schema: + type: string get: tags: - Process Instances @@ -972,6 +978,12 @@ paths: description: If true, this wil return only the most recent tasks. schema: type: boolean + - name: bpmn_process_guid + in: query + required: false + description: The guid of the bpmn process to get the tasks for. + schema: + type: string get: tags: - Process Instances diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index 22bdfa70a..c38fed7bb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -8,6 +8,10 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +class BpmnProcessNotFoundError(Exception): + pass + + # properties_json attributes: # "last_task", # guid generated by spiff # "root", # guid generated by spiff @@ -24,7 +28,8 @@ class BpmnProcessModel(SpiffworkflowBaseDBModel): ) bpmn_process_definition = relationship(BpmnProcessDefinitionModel) - parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) + top_level_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) + direct_parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True) properties_json: dict = db.Column(db.JSON, nullable=False) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 59399f2f2..f75df6c12 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,5 +1,7 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 +from spiffworkflow_backend.services.task_service import TaskService +from sqlalchemy.orm import aliased from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import json from typing import Any @@ -560,6 +562,7 @@ def process_instance_task_list_without_task_data_for_me( all_tasks: bool = False, spiff_step: int = 0, most_recent_tasks_only: bool = False, + bpmn_process_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) @@ -569,6 +572,7 @@ def process_instance_task_list_without_task_data_for_me( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, + bpmn_process_guid=bpmn_process_guid ) @@ -578,6 +582,7 @@ def process_instance_task_list_without_task_data( all_tasks: bool = False, spiff_step: int = 0, most_recent_tasks_only: bool = False, + bpmn_process_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -587,12 +592,14 @@ def process_instance_task_list_without_task_data( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, + bpmn_process_guid=bpmn_process_guid ) def process_instance_task_list( _modified_process_model_identifier: str, process_instance: ProcessInstanceModel, + bpmn_process_guid: Optional[str] = None, all_tasks: bool = False, spiff_step: int = 0, to_task_guid: Optional[str] = None, @@ -644,9 +651,14 @@ def process_instance_task_list( # state: string; # typename: string; - # calling_subprocess_task_guid: string; - # call_activity_process_bpmn_identifier?: string; + # calling_subprocess_task_guid: string; -> bpmn_process_direct_parent_guid + # call_activity_process_bpmn_identifier?: string; -> bpmn_process_direct_parent_bpmn_identifier + bpmn_process_ids = [] + if bpmn_process_guid: + bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first() + bpmn_processes = TaskService.bpmn_process_and_descendants([bpmn_process]) + bpmn_process_ids = [p.id for p in bpmn_processes] task_model_query = db.session.query(TaskModel).filter( TaskModel.process_instance_id == process_instance.id, @@ -664,23 +676,39 @@ def process_instance_task_list( ) task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) + bpmn_process_alias = aliased(BpmnProcessModel) + direct_parent_bpmn_process_alias = aliased(BpmnProcessModel) + direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel) + task_model_query = ( task_model_query.order_by( - ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore + TaskModel.id.desc() # type: ignore ) .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) - .join(BpmnProcessModel, BpmnProcessModel.id == TaskModel.bpmn_process_id) + .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) + .outerjoin(direct_parent_bpmn_process_alias, direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id) + .outerjoin(direct_parent_bpmn_process_definition_alias, direct_parent_bpmn_process_definition_alias.id == direct_parent_bpmn_process_alias.bpmn_process_definition_id) .join( BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id ) .add_columns( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore - TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore - TaskDefinitionModel.bpmn_name.label("task_definition_name"), # type: ignore - TaskDefinitionModel.typename.label("bpmn_task_type"), # type: ignore + direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), + direct_parent_bpmn_process_definition_alias.bpmn_identifier.label("bpmn_process_direct_parent_bpmn_identifier"), + TaskDefinitionModel.bpmn_identifier, + TaskDefinitionModel.bpmn_name, + TaskDefinitionModel.typename, + TaskDefinitionModel.properties_json.label('task_definition_properties_json'), # type: ignore ) ) + + if len(bpmn_process_ids) > 0: + print(f"bpmn_process_ids: {bpmn_process_ids}") + task_model_query = ( + task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) + ) + task_models = task_model_query.all() # processor = ProcessInstanceProcessor(process_instance) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ea59c4141..fdd42cb92 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -687,7 +687,7 @@ class ProcessInstanceProcessor: single_bpmn_process_dict = cls._get_bpmn_process_dict(bpmn_process, get_tasks=True) spiff_bpmn_process_dict.update(single_bpmn_process_dict) - bpmn_subprocesses = BpmnProcessModel.query.filter_by(parent_process_id=bpmn_process.id).all() + bpmn_subprocesses = BpmnProcessModel.query.filter_by(top_level_process_id=bpmn_process.id).all() bpmn_subprocess_id_to_guid_mappings = {} for bpmn_subprocess in bpmn_subprocesses: bpmn_subprocess_id_to_guid_mappings[bpmn_subprocess.id] = bpmn_subprocess.guid diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 5a03f3877..fa9024061 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -13,7 +13,7 @@ from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel, BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -144,7 +144,7 @@ class TaskService: bpmn_process, new_task_models, new_json_data_dicts = cls.add_bpmn_process( bpmn_process_dict=serializer.workflow_to_dict(subprocess), process_instance=process_instance, - bpmn_process_parent=process_instance.bpmn_process, + top_level_process=process_instance.bpmn_process, bpmn_process_guid=subprocess_guid, bpmn_definition_to_task_definitions_mappings=bpmn_definition_to_task_definitions_mappings, spiff_workflow=spiff_workflow, @@ -160,7 +160,7 @@ class TaskService: bpmn_definition_to_task_definitions_mappings: dict, spiff_workflow: BpmnWorkflow, serializer: BpmnWorkflowSerializer, - bpmn_process_parent: Optional[BpmnProcessModel] = None, + top_level_process: Optional[BpmnProcessModel] = None, bpmn_process_guid: Optional[str] = None, ) -> Tuple[BpmnProcessModel, dict[str, TaskModel], dict[str, JsonDataDict]]: """This creates and adds a bpmn_process to the Db session. @@ -182,9 +182,9 @@ class TaskService: new_json_data_dicts: dict[str, JsonDataDict] = {} bpmn_process = None - if bpmn_process_parent is not None: + if top_level_process is not None: bpmn_process = BpmnProcessModel.query.filter_by( - parent_process_id=bpmn_process_parent.id, guid=bpmn_process_guid + top_level_process_id=top_level_process.id, guid=bpmn_process_guid ).first() elif process_instance.bpmn_process_id is not None: bpmn_process = process_instance.bpmn_process @@ -194,6 +194,28 @@ class TaskService: bpmn_process_is_new = True bpmn_process = BpmnProcessModel(guid=bpmn_process_guid) + bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ + "bpmn_process_definition" + ] + bpmn_process.bpmn_process_definition = bpmn_process_definition + + if top_level_process is not None: + subprocesses = spiff_workflow._get_outermost_workflow().subprocesses + direct_bpmn_process_parent = top_level_process + for subprocess_guid, subprocess in subprocesses.items(): + if subprocess == spiff_workflow.outer_workflow: + direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(guid=str(subprocess_guid)).first() + if direct_bpmn_process_parent is None: + raise BpmnProcessNotFoundError( + f"Could not find bpmn process with guid: {str(subprocess_guid)} " + f"while searching for direct parent process of {bpmn_process_guid}." + ) + + if direct_bpmn_process_parent is None: + raise BpmnProcessNotFoundError(f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}") + + bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id + # Point the root id to the Start task instead of the Root task # since we are ignoring the Root task. for task_id, task_properties in tasks.items(): @@ -206,15 +228,10 @@ class TaskService: if bpmn_process_json_data is not None: new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data - if bpmn_process_parent is None: + if top_level_process is None: process_instance.bpmn_process = bpmn_process - elif bpmn_process.parent_process_id is None: - bpmn_process.parent_process_id = bpmn_process_parent.id - - bpmn_process_definition = bpmn_definition_to_task_definitions_mappings[spiff_workflow.spec.name][ - "bpmn_process_definition" - ] - bpmn_process.bpmn_process_definition = bpmn_process_definition + elif bpmn_process.top_level_process_id is None: + bpmn_process.top_level_process_id = top_level_process.id # Since we bulk insert tasks later we need to add the bpmn_process to the session # to ensure we have an id. @@ -285,6 +302,14 @@ class TaskService: setattr(task_model, task_model_data_column, task_data_hash) return json_data_dict + @classmethod + def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]: + bpmn_process_ids = [p.id for p in bpmn_processes] + direct_children = BpmnProcessModel.query.filter(BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids)).all() # type: ignore + if len(direct_children) > 0: + return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) + return bpmn_processes + @classmethod def _create_task( cls, diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 939c8c0be..680903f5b 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -151,4 +151,4 @@ except: - + \ No newline at end of file diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn index 299f078e9..afda130a7 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn @@ -1,38 +1,71 @@ - - Flow_06g687y - - - - Flow_01e21r0 + + Flow_095sred - - - Flow_06g687y - Flow_01e21r0 - set_in_test_process_to_call_script = 1 - + + Flow_1qsx5et + + + Flow_1qsx5et + Flow_095sred + + Flow_12zb3j0 + + + Flow_12zb3j0 + Flow_0iu4d71 + set_in_test_process_to_call_script = 1 + + + Flow_0iu4d71 + + + + + + - - + + + + + + + + + + + + + + + + + + + + + + + - + - - + + - - - + + + - - - + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 70f97328b..0b80a46c9 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -326,11 +326,11 @@ class TestProcessInstanceProcessor(BaseTest): "manual_task": first_data_set, "top_level_subprocess_script": second_data_set, "top_level_subprocess": second_data_set, - "test_process_to_call_script": third_data_set, + "test_process_to_call_subprocess_script": third_data_set, "top_level_call_activity": third_data_set, "end_event_of_manual_task_model": third_data_set, "top_level_subprocess_script_second": fourth_data_set, - "test_process_to_call_script_second": fourth_data_set, + "test_process_to_call_subprocess_script_second": fourth_data_set, } spiff_tasks_checked_once: list = [] @@ -365,7 +365,7 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process("test_process_to_call_script", "test_process_to_call") + assert_spiff_task_is_in_process("test_process_to_call_subprocess_script", "test_process_to_call_subprocess") assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") @@ -378,6 +378,21 @@ class TestProcessInstanceProcessor(BaseTest): assert bpmn_process_definition.bpmn_identifier == "test_process_to_call" assert bpmn_process_definition.bpmn_name == "Test Process To Call" + # Check that the direct parent of the called activity subprocess task is the + # name of the process that was called from the activity. + if spiff_task.task_spec.name == "test_process_to_call_subprocess_script": + task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() + assert task_model is not None + bpmn_process = task_model.bpmn_process + assert bpmn_process is not None + bpmn_process_definition = bpmn_process.bpmn_process_definition + assert bpmn_process_definition is not None + assert bpmn_process_definition.bpmn_identifier == "test_process_to_call_subprocess" + assert bpmn_process.direct_parent_process_id is not None + direct_parent_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first() + assert direct_parent_process is not None + assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call" + assert processor.get_data() == fifth_data_set def test_does_not_recreate_human_tasks_on_multiple_saves( diff --git a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx index 126f0c4f2..e3989c63c 100644 --- a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx +++ b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx @@ -60,14 +60,14 @@ import HttpService from '../services/HttpService'; import ButtonWithConfirmation from './ButtonWithConfirmation'; import { getBpmnProcessIdentifiers, makeid } from '../helpers'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; -import { PermissionsToCheck, ProcessInstanceTask } from '../interfaces'; +import { PermissionsToCheck, Task } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; type OwnProps = { processModelId: string; diagramType: string; - readyOrWaitingProcessInstanceTasks?: ProcessInstanceTask[] | null; - completedProcessInstanceTasks?: ProcessInstanceTask[] | null; + readyOrWaitingProcessInstanceTasks?: Task[] | null; + completedProcessInstanceTasks?: Task[] | null; saveDiagram?: (..._args: any[]) => any; onDeleteFile?: (..._args: any[]) => any; isPrimaryFile?: boolean; @@ -364,18 +364,18 @@ export default function ReactDiagramEditor({ function highlightBpmnIoElement( canvas: any, - processInstanceTask: ProcessInstanceTask, + task: Task, bpmnIoClassName: string, bpmnProcessIdentifiers: string[] ) { - if (checkTaskCanBeHighlighted(processInstanceTask.name)) { + if (checkTaskCanBeHighlighted(task.bpmn_identifier)) { try { if ( bpmnProcessIdentifiers.includes( - processInstanceTask.process_identifier + task.bpmn_process_definition_identifier ) ) { - canvas.addMarker(processInstanceTask.name, bpmnIoClassName); + canvas.addMarker(task.bpmn_identifier, bpmnIoClassName); } } catch (bpmnIoError: any) { // the task list also contains task for processes called from call activities which will diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index aaf11ade6..8b61f4745 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -21,17 +21,26 @@ export interface RecentProcessModel { processModelDisplayName: string; } +export interface TaskDefinitionPropertiesJson { + spec: string; +} + export interface Task { + id: number; guid: string; bpmn_identifier: string; bpmn_name?: string; - calling_subprocess_task_guid: string; + bpmn_process_direct_parent_guid: string; + bpmn_process_definition_identifier: string; data: any; state: string; typename: string; - call_activity_process_bpmn_identifier?: string; + task_definition_properties_json: TaskDefinitionPropertiesJson; + + // TOOD: DELETE THIS! + task_spiff_step?: number; } export interface ProcessInstanceTask { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 36c06d23b..1b555a04f 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -46,7 +46,8 @@ import { ProcessData, ProcessInstance, ProcessInstanceMetadata, - ProcessInstanceTask, + Task, + TaskDefinitionPropertiesJson, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessInstanceClass from '../classes/ProcessInstanceClass'; @@ -64,10 +65,9 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const [processInstance, setProcessInstance] = useState(null); - const [tasks, setTasks] = useState(null); + const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); - const [taskToDisplay, setTaskToDisplay] = - useState(null); + const [taskToDisplay, setTaskToDisplay] = useState(null); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); const [showTaskDataLoading, setShowTaskDataLoading] = useState(false); @@ -148,6 +148,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { if (typeof params.spiff_step !== 'undefined') { taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; } + const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); + if (bpmnProcessGuid) { + taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; + } let taskPath = ''; if (ability.can('GET', taskListPath)) { taskPath = `${taskListPath}${taskParams}`; @@ -213,14 +217,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const getTaskIds = () => { const taskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { - const callingSubprocessId = searchParams.get('call_activity_task_id'); - tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) { - if ( - callingSubprocessId && - callingSubprocessId !== task.calling_subprocess_task_id - ) { - return null; - } + tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { (taskIds.completed as any).push(task); } @@ -251,13 +248,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const spiffStepLink = (label: any, spiffStep: number) => { const processIdentifier = searchParams.get('process_identifier'); - const callActivityTaskId = searchParams.get('call_activity_task_id'); + const callActivityTaskId = searchParams.get('bpmn_process_guid'); const queryParamArray = []; if (processIdentifier) { queryParamArray.push(`process_identifier=${processIdentifier}`); } if (callActivityTaskId) { - queryParamArray.push(`call_activity_task_id=${callActivityTaskId}`); + queryParamArray.push(`bpmn_process_guid=${callActivityTaskId}`); } let queryParams = ''; if (queryParamArray.length > 0) { @@ -509,7 +506,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return
; }; - const processTaskResult = (result: ProcessInstanceTask) => { + const processTaskResult = (result: Task) => { if (result == null) { setTaskDataToDisplay(''); } else { @@ -518,7 +515,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { setShowTaskDataLoading(false); }; - const initializeTaskDataToDisplay = (task: ProcessInstanceTask | null) => { + const initializeTaskDataToDisplay = (task: Task | null) => { if ( task && task.state === 'COMPLETED' && @@ -526,7 +523,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ) { setShowTaskDataLoading(true); HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${task.task_spiff_step}`, + path: `${targetUris.processInstanceTaskDataPath}/${task.id}`, httpMethod: 'GET', successCallback: processTaskResult, failureCallback: (error: any) => { @@ -577,13 +574,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { successCallback: handleProcessDataShowResponse, }); } else if (tasks) { - const matchingTask: any = tasks.find((task: any) => { - const callingSubprocessId = searchParams.get('call_activity_task_id'); + const matchingTask: Task | undefined = tasks.find((task: Task) => { return ( - (!callingSubprocessId || - callingSubprocessId === task.calling_subprocess_task_id) && - task.name === shapeElement.id && - bpmnProcessIdentifiers.includes(task.process_identifier) + task.bpmn_identifier === shapeElement.id && + bpmnProcessIdentifiers.includes( + task.bpmn_process_definition_identifier + ) ); }); if (matchingTask) { @@ -618,7 +614,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { httpMethod: 'POST', successCallback: processScriptUnitTestCreateResult, postBody: { - bpmn_task_identifier: taskToUse.name, + bpmn_task_identifier: taskToUse.bpmn_identifier, input_json: previousTask.data, expected_output_json: taskToUse.data, }, @@ -634,7 +630,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ]; return ( (task.state === 'WAITING' && - subprocessTypes.filter((t) => t === task.type).length > 0) || + subprocessTypes.filter((t) => t === task.typename).length > 0) || task.state === 'READY' ); }; @@ -656,7 +652,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processInstance && processInstance.status === 'waiting' && ability.can('POST', targetUris.processInstanceSendEventPath) && - taskTypes.filter((t) => t === task.type).length > 0 && + taskTypes.filter((t) => t === task.typename).length > 0 && task.state === 'WAITING' && showingLastSpiffStep() ); @@ -717,7 +713,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { setEditingTaskData(false); const dataObject = taskDataStringToObject(taskDataToDisplay); if (taskToDisplay) { - const taskToDisplayCopy: ProcessInstanceTask = { + const taskToDisplayCopy: Task = { ...taskToDisplay, data: dataObject, }; // spread operator @@ -768,11 +764,11 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }); }; - const taskDisplayButtons = (task: any) => { + const taskDisplayButtons = (task: Task) => { const buttons = []; if ( - task.type === 'Script Task' && + task.typename === 'Script Task' && ability.can('PUT', targetUris.processModelShowPath) ) { buttons.push( @@ -785,11 +781,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); } - if (task.type === 'Call Activity') { + if (task.typename === 'CallActivity') { + console.log('task', task) + const taskDefinitionPropertiesJson: TaskDefinitionPropertiesJson = + task.task_definition_properties_json; + console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson) buttons.push( View Call Activity Diagram @@ -971,12 +971,15 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const taskUpdateDisplayArea = () => { - const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + if (!taskToDisplay) { + return null; + } + const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; const candidateEvents: any = getEvents(taskToUse); if (taskToDisplay) { - let taskTitleText = taskToUse.id; - if (taskToUse.title) { - taskTitleText += ` (${taskToUse.title})`; + let taskTitleText = taskToUse.guid; + if (taskToUse.bpmn_name) { + taskTitleText += ` (${taskToUse.bpmn_name})`; } return ( - {taskToUse.name} ( - {taskToUse.type} + {taskToUse.bpmn_identifier} ( + {taskToUse.typename} ): {taskToUse.state} {taskDisplayButtons(taskToUse)} From b957954d30a1e18b8a959af6627dad69eddfc094 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 17:05:15 -0400 Subject: [PATCH 04/20] get task data from json data table now w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 18 ++++----- .../src/spiffworkflow_backend/models/task.py | 2 + .../routes/tasks_controller.py | 38 +++++-------------- .../src/routes/ProcessInstanceShow.tsx | 2 +- 4 files changed, 22 insertions(+), 38 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 7cffde1c3..43d32c5ec 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1585,7 +1585,7 @@ paths: items: $ref: "#/components/schemas/Task" - /task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1599,12 +1599,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: spiff_step + - name: task_guid in: path required: true - description: If set will return the tasks as they were during a specific step of execution. + description: The guid of the task to show. schema: - type: integer + type: string get: operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show summary: Get task data for a single task in a spiff step. @@ -1638,12 +1638,12 @@ paths: description: The unique id of the task. schema: type: string - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. + - name: task_guid + in: path + required: true + description: The guid of the task to show. schema: - type: integer + type: string put: operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update summary: Update the task data for requested instance and task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 70a60ae1b..a3e182c08 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -66,6 +66,8 @@ class TaskModel(SpiffworkflowBaseDBModel): start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) + data: Optional[dict] = None + def python_env_data(self) -> dict: return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index ad9868e63..37c29575c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -1,5 +1,6 @@ """APIs for dealing with process groups, process models, and process instances.""" import json +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 import os import uuid from sys import exc_info @@ -169,38 +170,19 @@ def task_list_for_my_groups( def task_data_show( modified_process_model_identifier: str, process_instance_id: int, - spiff_step: int = 0, + task_guid: int = 0, ) -> flask.wrappers.Response: - process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) - - if step_detail is None: + task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first() + if task_model is None: raise ApiError( - error_code="spiff_step_for_proces_instance_not_found", - message="The given spiff step for the given process instance could not be found.", + error_code="task_not_found", + message=( + f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'" + ), status_code=400, ) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = processor.__class__.get_task_by_bpmn_identifier( - step_detail.bpmn_task_identifier, processor.bpmn_process_instance - ) - task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"] - task = ProcessInstanceService.spiff_task_to_api_task( - processor, - spiff_task, - task_spiff_step=spiff_step, - ) - task.data = task_data - - return make_response(jsonify(task), 200) + task_model.data = task_model.json_data() + return make_response(jsonify(task_model), 200) def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 1b555a04f..33b29ae35 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -523,7 +523,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ) { setShowTaskDataLoading(true); HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${task.id}`, + path: `${targetUris.processInstanceTaskDataPath}/${task.guid}`, httpMethod: 'GET', successCallback: processTaskResult, failureCallback: (error: any) => { From 94caddf9de12fdeaa9ef96407250f08bbceaae85 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 20 Mar 2023 17:29:53 -0400 Subject: [PATCH 05/20] send to task guid to signify how far to go with the tasks w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 24 +++---- .../routes/process_instances_controller.py | 8 ++- .../src/routes/AdminRoutes.tsx | 4 +- .../src/routes/ProcessInstanceShow.tsx | 71 +++++-------------- 4 files changed, 37 insertions(+), 70 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 43d32c5ec..1a21e6439 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -907,12 +907,6 @@ paths: description: If true, this wil return all tasks associated with the process instance and not just user tasks. schema: type: boolean - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer - name: most_recent_tasks_only in: query required: false @@ -925,6 +919,12 @@ paths: description: The guid of the bpmn process to get the tasks for. schema: type: string + - name: to_task_guid + in: query + required: false + description: Get the tasks only up to the given guid. + schema: + type: string get: tags: - Process Instances @@ -966,12 +966,6 @@ paths: description: If true, this wil return all tasks associated with the process instance and not just user tasks. schema: type: boolean - - name: spiff_step - in: query - required: false - description: If set will return the tasks as they were during a specific step of execution. - schema: - type: integer - name: most_recent_tasks_only in: query required: false @@ -984,6 +978,12 @@ paths: description: The guid of the bpmn process to get the tasks for. schema: type: string + - name: to_task_guid + in: query + required: false + description: Get the tasks only up to the given guid. + schema: + type: string get: tags: - Process Instances diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index f75df6c12..ccc463584 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -563,6 +563,7 @@ def process_instance_task_list_without_task_data_for_me( spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data_for_me.""" process_instance = _find_process_instance_for_me_or_raise(process_instance_id) @@ -572,7 +573,8 @@ def process_instance_task_list_without_task_data_for_me( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, - bpmn_process_guid=bpmn_process_guid + bpmn_process_guid=bpmn_process_guid, + to_task_guid=to_task_guid, ) @@ -583,6 +585,7 @@ def process_instance_task_list_without_task_data( spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, + to_task_guid: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_task_list_without_task_data.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -592,7 +595,8 @@ def process_instance_task_list_without_task_data( all_tasks=all_tasks, spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, - bpmn_process_guid=bpmn_process_guid + bpmn_process_guid=bpmn_process_guid, + to_task_guid=to_task_guid, ) diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index d183dc013..d04d50b1f 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -73,7 +73,7 @@ export default function AdminRoutes() { element={} /> } /> } /> } /> { - if (processInstance && typeof params.spiff_step === 'undefined') { + if (processInstance && typeof params.to_task_guid === 'undefined') { return processInstance.spiff_step || 0; } @@ -246,7 +246,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return processInstance && currentSpiffStep() === processInstance.spiff_step; }; - const spiffStepLink = (label: any, spiffStep: number) => { + const completionViewLink = (label: any, taskGuid: string) => { const processIdentifier = searchParams.get('process_identifier'); const callActivityTaskId = searchParams.get('bpmn_process_guid'); const queryParamArray = []; @@ -265,29 +265,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {label} ); }; - const previousStepLink = () => { - if (showingFirstSpiffStep()) { - return null; - } - - return spiffStepLink(, currentSpiffStep() - 1); - }; - - const nextStepLink = () => { - if (showingLastSpiffStep()) { - return null; - } - - return spiffStepLink(, currentSpiffStep() + 1); - }; - const returnToLastSpiffStep = () => { window.location.href = processInstanceShowPageBaseUrl; }; @@ -782,10 +766,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } if (task.typename === 'CallActivity') { - console.log('task', task) + console.log('task', task); const taskDefinitionPropertiesJson: TaskDefinitionPropertiesJson = task.task_definition_properties_json; - console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson) + console.log('taskDefinitionPropertiesJson', taskDefinitionPropertiesJson); buttons.push( - {taskToUse.task_spiff_step ? ( -
- - Task completed at step:{' '} - {spiffStepLink( - `${taskToUse.task_spiff_step}`, - taskToUse.task_spiff_step - )} - -
-
-
- ) : null} +
+ + {completionViewLink( + 'View state at task completion', + taskToUse.guid + )} + +
+
+
{selectingEvent ? eventSelector(candidateEvents) : taskDataContainer()} @@ -1015,23 +996,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return null; }; - const stepsElement = () => { - if (!processInstance) { - return null; - } - return ( - - - - {previousStepLink()} - Step {currentSpiffStep()} of {processInstance.spiff_step} - {nextStepLink()} - - - - ); - }; - const buttonIcons = () => { if (!processInstance) { return null; @@ -1119,7 +1083,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {taskUpdateDisplayArea()} {processDataDisplayArea()} {processInstanceMetadataArea()} - {stepsElement()}
Date: Tue, 21 Mar 2023 10:45:10 -0400 Subject: [PATCH 06/20] use consistent types for tasks in webui instance show page and mark the to task guid task as ready in backend --- .../routes/process_instances_controller.py | 132 ++---------------- spiffworkflow-frontend/src/interfaces.ts | 20 +++ .../src/routes/ProcessInstanceShow.tsx | 77 +++++----- 3 files changed, 68 insertions(+), 161 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index ccc463584..43ed6cef1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -610,54 +610,6 @@ def process_instance_task_list( most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: """Process_instance_task_list.""" - # step_detail_query = db.session.query(SpiffStepDetailsModel).filter( - # SpiffStepDetailsModel.process_instance_id == process_instance.id, - # ) - # - # if spiff_step > 0: - # step_detail_query = step_detail_query.filter(SpiffStepDetailsModel.spiff_step <= spiff_step) - # - # step_details = step_detail_query.all() - # - # processor = ProcessInstanceProcessor(process_instance) - # full_bpmn_process_dict = processor.full_bpmn_process_dict - # tasks = full_bpmn_process_dict["tasks"] - # subprocesses = full_bpmn_process_dict["subprocesses"] - # - # steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} - # - # def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: - # if spiff_task["last_state_change"] > step_ended: - # spiff_task["state"] = Task.task_state_name_to_int("FUTURE") - # spiff_task["data"] = {} - # - # if spiff_step > 0: - # last_change = step_details[-1].end_in_seconds or 0 - # for spiff_task in tasks.values(): - # restore_task(spiff_task, last_change) - # for subprocess in subprocesses.values(): - # for spiff_task in subprocess["tasks"].values(): - # restore_task(spiff_task, last_change) - # - # bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) - # if spiff_step > 0: - # bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) - # for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): - # if not subprocess.is_completed(): - # task = bpmn_process_instance.get_task(subprocess_id) - # task._set_state(TaskState.WAITING) - - # guid: string; - # bpmn_identifier: string; - # - # bpmn_name?: string; - # - # state: string; - # typename: string; - - # calling_subprocess_task_guid: string; -> bpmn_process_direct_parent_guid - # call_activity_process_bpmn_identifier?: string; -> bpmn_process_direct_parent_bpmn_identifier - bpmn_process_ids = [] if bpmn_process_guid: bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first() @@ -704,90 +656,24 @@ def process_instance_task_list( TaskDefinitionModel.bpmn_name, TaskDefinitionModel.typename, TaskDefinitionModel.properties_json.label('task_definition_properties_json'), # type: ignore + TaskModel.guid, + TaskModel.state, ) ) if len(bpmn_process_ids) > 0: - print(f"bpmn_process_ids: {bpmn_process_ids}") task_model_query = ( task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) ) task_models = task_model_query.all() - - # processor = ProcessInstanceProcessor(process_instance) - # full_bpmn_process_dict = processor.full_bpmn_process_dict - # tasks = full_bpmn_process_dict["tasks"] - # subprocesses = full_bpmn_process_dict["subprocesses"] - # - # steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} - # - # def restore_task(spiff_task: dict[str, Any], step_ended: float) -> None: - # if spiff_task["last_state_change"] > step_ended: - # spiff_task["state"] = Task.task_state_name_to_int("FUTURE") - # spiff_task["data"] = {} - # - # if spiff_step > 0: - # last_change = step_details[-1].end_in_seconds or 0 - # for spiff_task in tasks.values(): - # restore_task(spiff_task, last_change) - # for subprocess in subprocesses.values(): - # for spiff_task in subprocess["tasks"].values(): - # restore_task(spiff_task, last_change) - # - # bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict) - # if spiff_step > 0: - # bpmn_process_instance.complete_task_from_id(UUID(step_details[-1].task_id)) - # for subprocess_id, subprocess in bpmn_process_instance.subprocesses.items(): - # if not subprocess.is_completed(): - # task = bpmn_process_instance.get_task(subprocess_id) - # task._set_state(TaskState.WAITING) - - # spiff_tasks = None - # if all_tasks: - # spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - # else: - # spiff_tasks = processor.get_all_user_tasks() - # - # ( - # subprocesses_by_child_task_ids, - # task_typename_by_task_id, - # ) = processor.get_subprocesses_by_child_task_ids() - # processor.get_highest_level_calling_subprocesses_by_child_task_ids( - # subprocesses_by_child_task_ids, task_typename_by_task_id - # ) - # - # spiff_tasks_to_process = spiff_tasks - # if most_recent_tasks_only: - # spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} - # current_tasks = {} - # for spiff_task in spiff_tasks_to_process: - # row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" - # if spiff_task.state in [TaskState.READY, TaskState.WAITING]: - # current_tasks[row_id] = spiff_task - # if ( - # row_id not in spiff_tasks_by_process_id_and_task_name - # or spiff_task.state > spiff_tasks_by_process_id_and_task_name[row_id].state - # ): - # spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task - # spiff_tasks_by_process_id_and_task_name.update(current_tasks) - # spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values() - # - # response = [] - # for spiff_task in spiff_tasks_to_process: - # task_spiff_step: Optional[int] = None - # if str(spiff_task.id) in steps_by_id: - # task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step - # calling_subprocess_task_id = subprocesses_by_child_task_ids.get(str(spiff_task.id), None) - # task = ProcessInstanceService.spiff_task_to_api_task( - # processor, - # spiff_task, - # calling_subprocess_task_id=calling_subprocess_task_id, - # task_spiff_step=task_spiff_step, - # ) - # if task.state in ["MAYBE", "LIKELY"]: - # task.state = "FUTURE" - # response.append(task) + # import pdb; pdb.set_trace() + if to_task_guid is not None: + task_models_dict = json.loads(current_app.json.dumps(task_models)) + for task_model in task_models_dict: + if task_model['guid'] == to_task_guid and task_model['state'] == "COMPLETED": + task_model['state'] = "READY" + return make_response(jsonify(task_models_dict), 200) return make_response(jsonify(task_models), 200) diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 8b61f4745..4e65bd020 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -21,10 +21,22 @@ export interface RecentProcessModel { processModelDisplayName: string; } +export interface TaskPropertiesJson { + parent: string; +} + export interface TaskDefinitionPropertiesJson { spec: string; } +export interface EventDefinition { + typename: string; + payload: any; + event_definitions: [EventDefinition]; + + message_var?: string; +} + export interface Task { id: number; guid: string; @@ -37,12 +49,20 @@ export interface Task { data: any; state: string; typename: string; + properties_json: TaskPropertiesJson; task_definition_properties_json: TaskDefinitionPropertiesJson; + event_definition?: EventDefinition; + // TOOD: DELETE THIS! task_spiff_step?: number; } +export interface TaskIds { + completed: Task[]; + readyOrWaiting: Task[]; +} + export interface ProcessInstanceTask { id: string; task_id: string; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 932314070..fb5b9b3bc 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -42,12 +42,14 @@ import { import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { + EventDefinition, PermissionsToCheck, ProcessData, ProcessInstance, ProcessInstanceMetadata, Task, TaskDefinitionPropertiesJson, + TaskIds, } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; import ProcessInstanceClass from '../classes/ProcessInstanceClass'; @@ -215,14 +217,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const getTaskIds = () => { - const taskIds = { completed: [], readyOrWaiting: [] }; + const taskIds: TaskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { - (taskIds.completed as any).push(task); + taskIds.completed.push(task); } if (task.state === 'READY' || task.state === 'WAITING') { - (taskIds.readyOrWaiting as any).push(task); + taskIds.readyOrWaiting.push(task); } return null; }); @@ -230,20 +232,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return taskIds; }; - const currentSpiffStep = () => { - if (processInstance && typeof params.to_task_guid === 'undefined') { - return processInstance.spiff_step || 0; - } - - return Number(params.spiff_step); - }; - - const showingFirstSpiffStep = () => { - return currentSpiffStep() === 1; + const currentToTaskGuid = () => { + return params.to_task_guid; }; const showingLastSpiffStep = () => { - return processInstance && currentSpiffStep() === processInstance.spiff_step; + return ( + processInstance && currentToTaskGuid() === processInstance.spiff_step + ); }; const completionViewLink = (label: any, taskGuid: string) => { @@ -278,7 +274,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const resetProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`, + path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`, successCallback: returnToLastSpiffStep, httpMethod: 'POST', }); @@ -580,7 +576,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const getTaskById = (taskId: string) => { if (tasks !== null) { - return tasks.find((task: any) => task.id === taskId); + return tasks.find((task: Task) => task.guid === taskId) || null; } return null; }; @@ -589,24 +585,29 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { console.log('result', result); }; + const getParentTaskFromTask = (task: Task) => { + return task.properties_json.parent; + }; + const createScriptUnitTest = () => { if (taskToDisplay) { - const taskToUse: any = taskToDisplay; - const previousTask: any = getTaskById(taskToUse.parent); + const previousTask: Task | null = getTaskById( + getParentTaskFromTask(taskToDisplay) + ); HttpService.makeCallToBackend({ path: `/process-models/${modifiedProcessModelId}/script-unit-tests`, httpMethod: 'POST', successCallback: processScriptUnitTestCreateResult, postBody: { - bpmn_task_identifier: taskToUse.bpmn_identifier, - input_json: previousTask.data, - expected_output_json: taskToUse.data, + bpmn_task_identifier: taskToDisplay.bpmn_identifier, + input_json: previousTask ? previousTask.data : '', + expected_output_json: taskToDisplay.data, }, }); } }; - const isCurrentTask = (task: any) => { + const isCurrentTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', 'Call Activity', @@ -619,7 +620,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canEditTaskData = (task: any) => { + const canEditTaskData = (task: Task) => { return ( processInstance && ability.can('PUT', targetUris.processInstanceTaskDataPath) && @@ -629,7 +630,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canSendEvent = (task: any) => { + const canSendEvent = (task: Task) => { // We actually could allow this for any waiting events const taskTypes = ['Event Based Gateway']; return ( @@ -642,7 +643,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canCompleteTask = (task: any) => { + const canCompleteTask = (task: Task) => { return ( processInstance && processInstance.status === 'suspended' && @@ -652,7 +653,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const canResetProcess = (task: any) => { + const canResetProcess = (task: Task) => { return ( ability.can('POST', targetUris.processInstanceResetPath) && processInstance && @@ -662,8 +663,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const getEvents = (task: any) => { - const handleMessage = (eventDefinition: any) => { + const getEvents = (task: Task) => { + const handleMessage = (eventDefinition: EventDefinition) => { if (eventDefinition.typename === 'MessageEventDefinition') { const newEvent = eventDefinition; delete newEvent.message_var; @@ -673,7 +674,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return eventDefinition; }; if (task.event_definition && task.event_definition.event_definitions) - return task.event_definition.event_definitions.map((e: any) => + return task.event_definition.event_definitions.map((e: EventDefinition) => handleMessage(e) ); if (task.event_definition) return [handleMessage(task.event_definition)]; @@ -710,11 +711,10 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { if (!taskToDisplay) { return; } - console.log('saveTaskData'); removeError(); // taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute - const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; HttpService.makeCallToBackend({ path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`, httpMethod: 'PUT', @@ -739,13 +739,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const completeTask = (execute: boolean) => { - const taskToUse: any = taskToDisplay; - HttpService.makeCallToBackend({ - path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`, - httpMethod: 'POST', - successCallback: returnToLastSpiffStep, - postBody: { execute }, - }); + if (taskToDisplay) { + HttpService.makeCallToBackend({ + path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`, + httpMethod: 'POST', + successCallback: returnToLastSpiffStep, + postBody: { execute }, + }); + } }; const taskDisplayButtons = (task: Task) => { From 034201b01c087f7b19ece6731b7939c7fe33a1e5 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 21 Mar 2023 13:34:59 -0400 Subject: [PATCH 07/20] pyl --- spiffworkflow-backend/migrations/env.py | 2 - .../src/spiffworkflow_backend/models/task.py | 1 - .../routes/process_instances_controller.py | 44 +++++++++---------- .../routes/tasks_controller.py | 7 +-- .../services/task_service.py | 15 +++++-- .../manual_task_with_subprocesses.bpmn | 2 +- .../unit/test_process_instance_processor.py | 8 +++- .../src/routes/ProcessInstanceShow.tsx | 2 - 8 files changed, 41 insertions(+), 40 deletions(-) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2a..630e381ad 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index a3e182c08..c1e85c578 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -80,7 +80,6 @@ class Task: HUMAN_TASK_TYPES = ["User Task", "Manual Task"] - def __init__( self, id: str, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 43ed6cef1..5fa451268 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -1,13 +1,9 @@ """APIs for dealing with process groups, process models, and process instances.""" import base64 -from spiffworkflow_backend.services.task_service import TaskService -from sqlalchemy.orm import aliased -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel import json from typing import Any from typing import Dict from typing import Optional -from uuid import UUID import flask.wrappers from flask import current_app @@ -16,12 +12,12 @@ from flask import jsonify from flask import make_response from flask import request from flask.wrappers import Response -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState from sqlalchemy import and_ from sqlalchemy import or_ +from sqlalchemy.orm import aliased from spiffworkflow_backend.exceptions.api_error import ApiError +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel @@ -46,7 +42,6 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel @@ -88,6 +83,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskService def process_instance_create( @@ -625,9 +621,7 @@ def process_instance_task_list( if to_task_model is None: raise ApiError( error_code="task_not_found", - message=( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ), + message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'", status_code=400, ) task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) @@ -637,13 +631,18 @@ def process_instance_task_list( direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel) task_model_query = ( - task_model_query.order_by( - TaskModel.id.desc() # type: ignore - ) + task_model_query.order_by(TaskModel.id.desc()) # type: ignore .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) - .outerjoin(direct_parent_bpmn_process_alias, direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id) - .outerjoin(direct_parent_bpmn_process_definition_alias, direct_parent_bpmn_process_definition_alias.id == direct_parent_bpmn_process_alias.bpmn_process_definition_id) + .outerjoin( + direct_parent_bpmn_process_alias, + direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id, + ) + .outerjoin( + direct_parent_bpmn_process_definition_alias, + direct_parent_bpmn_process_definition_alias.id + == direct_parent_bpmn_process_alias.bpmn_process_definition_id, + ) .join( BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id ) @@ -651,28 +650,27 @@ def process_instance_task_list( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"), - direct_parent_bpmn_process_definition_alias.bpmn_identifier.label("bpmn_process_direct_parent_bpmn_identifier"), + direct_parent_bpmn_process_definition_alias.bpmn_identifier.label( + "bpmn_process_direct_parent_bpmn_identifier" + ), TaskDefinitionModel.bpmn_identifier, TaskDefinitionModel.bpmn_name, TaskDefinitionModel.typename, - TaskDefinitionModel.properties_json.label('task_definition_properties_json'), # type: ignore + TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore TaskModel.guid, TaskModel.state, ) ) if len(bpmn_process_ids) > 0: - task_model_query = ( - task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) - ) + task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) task_models = task_model_query.all() - # import pdb; pdb.set_trace() if to_task_guid is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - if task_model['guid'] == to_task_guid and task_model['state'] == "COMPLETED": - task_model['state'] = "READY" + if task_model["guid"] == to_task_guid and task_model["state"] == "COMPLETED": + task_model["state"] = "READY" return make_response(jsonify(task_models_dict), 200) return make_response(jsonify(task_models), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 37c29575c..495d22dec 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -1,6 +1,5 @@ """APIs for dealing with process groups, process models, and process instances.""" import json -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 import os import uuid from sys import exc_info @@ -37,8 +36,8 @@ from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.routes.process_api_blueprint import ( _find_principal_or_raise, @@ -176,9 +175,7 @@ def task_data_show( if task_model is None: raise ApiError( error_code="task_not_found", - message=( - f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'" - ), + message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", status_code=400, ) task_model.data = task_model.json_data() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index fa9024061..159a54d8b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -13,7 +13,8 @@ from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert -from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel, BpmnProcessNotFoundError +from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel +from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401 from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -204,7 +205,9 @@ class TaskService: direct_bpmn_process_parent = top_level_process for subprocess_guid, subprocess in subprocesses.items(): if subprocess == spiff_workflow.outer_workflow: - direct_bpmn_process_parent = BpmnProcessModel.query.filter_by(guid=str(subprocess_guid)).first() + direct_bpmn_process_parent = BpmnProcessModel.query.filter_by( + guid=str(subprocess_guid) + ).first() if direct_bpmn_process_parent is None: raise BpmnProcessNotFoundError( f"Could not find bpmn process with guid: {str(subprocess_guid)} " @@ -212,7 +215,9 @@ class TaskService: ) if direct_bpmn_process_parent is None: - raise BpmnProcessNotFoundError(f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}") + raise BpmnProcessNotFoundError( + f"Could not find a direct bpmn process parent for guid: {bpmn_process_guid}" + ) bpmn_process.direct_parent_process_id = direct_bpmn_process_parent.id @@ -305,7 +310,9 @@ class TaskService: @classmethod def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]: bpmn_process_ids = [p.id for p in bpmn_processes] - direct_children = BpmnProcessModel.query.filter(BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids)).all() # type: ignore + direct_children = BpmnProcessModel.query.filter( + BpmnProcessModel.direct_parent_process_id.in_(bpmn_process_ids) # type: ignore + ).all() if len(direct_children) > 0: return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) return bpmn_processes diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 680903f5b..939c8c0be 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -151,4 +151,4 @@ except: - \ No newline at end of file + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 0b80a46c9..9ca008ecf 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -365,7 +365,9 @@ class TestProcessInstanceProcessor(BaseTest): assert len(all_spiff_tasks) > 1 for spiff_task in all_spiff_tasks: assert spiff_task.state == TaskState.COMPLETED - assert_spiff_task_is_in_process("test_process_to_call_subprocess_script", "test_process_to_call_subprocess") + assert_spiff_task_is_in_process( + "test_process_to_call_subprocess_script", "test_process_to_call_subprocess" + ) assert_spiff_task_is_in_process("top_level_subprocess_script", "top_level_subprocess") assert_spiff_task_is_in_process("top_level_script", "top_level_process") @@ -389,7 +391,9 @@ class TestProcessInstanceProcessor(BaseTest): assert bpmn_process_definition is not None assert bpmn_process_definition.bpmn_identifier == "test_process_to_call_subprocess" assert bpmn_process.direct_parent_process_id is not None - direct_parent_process = BpmnProcessModel.query.filter_by(id=bpmn_process.direct_parent_process_id).first() + direct_parent_process = BpmnProcessModel.query.filter_by( + id=bpmn_process.direct_parent_process_id + ).first() assert direct_parent_process is not None assert direct_parent_process.bpmn_process_definition.bpmn_identifier == "test_process_to_call" diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index fb5b9b3bc..4fa70d6c5 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -7,12 +7,10 @@ import { useSearchParams, } from 'react-router-dom'; import { - CaretRight, TrashCan, StopOutline, PauseOutline, PlayOutline, - CaretLeft, InProgress, Checkmark, Warning, From 26af07befdfed959301d442ad573d081ecf82f75 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 09:44:13 -0400 Subject: [PATCH 08/20] resetting tasks somewhat work --- .../src/spiffworkflow_backend/api.yml | 24 +--- .../src/spiffworkflow_backend/models/task.py | 2 +- .../routes/process_instances_controller.py | 15 +- .../services/process_instance_processor.py | 128 +++++++++++++----- .../services/task_service.py | 51 +++++++ .../src/routes/ProcessInstanceShow.tsx | 2 +- 6 files changed, 152 insertions(+), 70 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 1a21e6439..f7fa3f036 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -901,12 +901,6 @@ paths: description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. schema: type: string - - name: all_tasks - in: query - required: false - description: If true, this wil return all tasks associated with the process instance and not just user tasks. - schema: - type: boolean - name: most_recent_tasks_only in: query required: false @@ -960,12 +954,6 @@ paths: description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. schema: type: string - - name: all_tasks - in: query - required: false - description: If true, this wil return all tasks associated with the process instance and not just user tasks. - schema: - type: boolean - name: most_recent_tasks_only in: query required: false @@ -1188,7 +1176,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{to_task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1202,12 +1190,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: spiff_step - in: query - required: false - description: Reset the process to this state + - name: to_task_guid + in: path + required: true + description: Get the tasks only up to the given guid. schema: - type: integer + type: string post: operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset summary: Reset a process instance to an earlier step diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index c1e85c578..dbdd429e7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -63,7 +63,7 @@ class TaskModel(SpiffworkflowBaseDBModel): json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) python_env_data_hash: str = db.Column(db.String(255), nullable=False, index=True) - start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) + start_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) data: Optional[dict] = None diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 5fa451268..b0cde36f8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -555,8 +555,6 @@ def process_instance_report_show( def process_instance_task_list_without_task_data_for_me( modified_process_model_identifier: str, process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, to_task_guid: Optional[str] = None, @@ -566,8 +564,6 @@ def process_instance_task_list_without_task_data_for_me( return process_instance_task_list( _modified_process_model_identifier=modified_process_model_identifier, process_instance=process_instance, - all_tasks=all_tasks, - spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, bpmn_process_guid=bpmn_process_guid, to_task_guid=to_task_guid, @@ -577,8 +573,6 @@ def process_instance_task_list_without_task_data_for_me( def process_instance_task_list_without_task_data( modified_process_model_identifier: str, process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, most_recent_tasks_only: bool = False, bpmn_process_guid: Optional[str] = None, to_task_guid: Optional[str] = None, @@ -588,8 +582,6 @@ def process_instance_task_list_without_task_data( return process_instance_task_list( _modified_process_model_identifier=modified_process_model_identifier, process_instance=process_instance, - all_tasks=all_tasks, - spiff_step=spiff_step, most_recent_tasks_only=most_recent_tasks_only, bpmn_process_guid=bpmn_process_guid, to_task_guid=to_task_guid, @@ -600,8 +592,6 @@ def process_instance_task_list( _modified_process_model_identifier: str, process_instance: ProcessInstanceModel, bpmn_process_guid: Optional[str] = None, - all_tasks: bool = False, - spiff_step: int = 0, to_task_guid: Optional[str] = None, most_recent_tasks_only: bool = False, ) -> flask.wrappers.Response: @@ -679,12 +669,11 @@ def process_instance_task_list( def process_instance_reset( process_instance_id: int, modified_process_model_identifier: str, - spiff_step: int = 0, + to_task_guid: str, ) -> flask.wrappers.Response: """Reset a process instance to a particular step.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - processor = ProcessInstanceProcessor(process_instance) - processor.reset_process(spiff_step) + ProcessInstanceProcessor.reset_process(process_instance, to_task_guid, commit=True) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index fdd42cb92..ec741f32e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -52,6 +52,8 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -85,7 +87,8 @@ from spiffworkflow_backend.models.script_attributes_context import ( ) from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.task import TaskModel # noqa: F401 +from spiffworkflow_backend.models.task import TaskModel +from spiffworkflow_backend.models.task import TaskNotFoundError from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script @@ -154,10 +157,6 @@ class SpiffStepDetailIsMissingError(Exception): pass -class TaskNotFoundError(Exception): - pass - - class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore def __init__(self, environment_globals: Dict[str, Any]): """BoxedTaskDataBasedScriptEngineEnvironment.""" @@ -1312,48 +1311,103 @@ class ProcessInstanceProcessor: # Saving the workflow seems to reset the status self.suspend() - def reset_process(self, spiff_step: int) -> None: + @classmethod + def reset_process( + cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False + ) -> None: """Reset a process to an earlier state.""" - spiff_logger = logging.getLogger("spiff") - spiff_logger.info( - f"Process reset from step {spiff_step}", - extra=self.bpmn_process_instance.log_info(), + cls.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid ) - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == self.process_instance_model.id, - SpiffStepDetailsModel.spiff_step == spiff_step, + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + if to_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" ) - .first() + + parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + to_task_model ) - if step_detail is not None: - self.increment_spiff_step() - self.add_step( - { - "process_instance_id": self.process_instance_model.id, - "spiff_step": self.process_instance_model.spiff_step or 1, - "task_json": step_detail.task_json, - "timestamp": round(time.time()), - } + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] + tasks_to_update_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_not(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + ) + ) + tasks_to_update = tasks_to_update_query.all() + + # run all queries before making changes to task_model + if commit: + tasks_to_delete_query = db.session.query(TaskModel).filter( + and_( + or_( + TaskModel.end_in_seconds > to_task_model.end_in_seconds, + TaskModel.end_in_seconds.is_not(None), # type: ignore + ), + TaskModel.process_instance_id == process_instance.id, + TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + ) ) - dct = self._serializer.workflow_to_dict(self.bpmn_process_instance) - dct["tasks"] = step_detail.task_json["tasks"] - dct["subprocesses"] = step_detail.task_json["subprocesses"] - self.bpmn_process_instance = self._serializer.workflow_from_dict(dct) + tasks_to_delete = tasks_to_delete_query.all() - # Cascade does not seems to work on filters, only directly through the session - tasks = self.bpmn_process_instance.get_tasks(TaskState.NOT_FINISHED_MASK) - rows = HumanTaskModel.query.filter( - HumanTaskModel.task_id.in_(str(t.id) for t in tasks) # type: ignore + # delete any later tasks from to_task_model and delete bpmn processes that may be + # link directly to one of those tasks. + tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + tasks_to_delete_ids = [t.id for t in tasks_to_delete] + bpmn_processes_to_delete = BpmnProcessModel.query.filter( + BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + ).all() + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore ).all() - for row in rows: - db.session.delete(row) - self.save() - self.suspend() + # ensure the correct order for foreign keys + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + for task_to_delete in tasks_to_delete: + db.session.delete(task_to_delete) + db.session.commit() + for bpmn_process_to_delete in bpmn_processes_to_delete: + db.session.delete(bpmn_process_to_delete) + db.session.commit() + + related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + if related_human_task is not None: + db.session.delete(related_human_task) + + for task_to_update in tasks_to_update: + TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + + parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + if parent_task_model is None: + raise TaskNotFoundError( + f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + ) + + TaskService.reset_task_model( + to_task_model, + state="READY", + json_data_hash=parent_task_model.json_data_hash, + python_env_data_hash=parent_task_model.python_env_data_hash, + commit=commit, + ) + for task_model in task_models_of_parent_bpmn_processes: + TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + + if commit: + processor = ProcessInstanceProcessor(process_instance) + processor.save() + processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 159a54d8b..918de4d8e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -9,6 +9,7 @@ from flask import current_app from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState from SpiffWorkflow.task import TaskStateNames from sqlalchemy.dialects.mysql import insert as mysql_insert from sqlalchemy.dialects.postgresql import insert as postgres_insert @@ -317,6 +318,56 @@ class TaskService: return bpmn_processes + cls.bpmn_process_and_descendants(direct_children) return bpmn_processes + @classmethod + def task_models_of_parent_bpmn_processes( + cls, task_model: TaskModel + ) -> Tuple[list[BpmnProcessModel], list[TaskModel]]: + bpmn_process = task_model.bpmn_process + task_models: list[TaskModel] = [] + bpmn_processes: list[BpmnProcessModel] = [bpmn_process] + if bpmn_process.guid is not None: + parent_task_model = TaskModel.query.filter_by(guid=bpmn_process.guid).first() + if parent_task_model is not None: + b, t = cls.task_models_of_parent_bpmn_processes(parent_task_model) + return (bpmn_processes + b, [parent_task_model] + t) + return (bpmn_processes, task_models) + + @classmethod + def reset_task_model( + cls, + task_model: TaskModel, + state: str, + commit: Optional[bool] = True, + json_data_hash: Optional[str] = None, + python_env_data_hash: Optional[str] = None, + ) -> None: + if json_data_hash is None: + TaskService.update_task_data_on_task_model(task_model, {}, "json_data_hash") + else: + task_model.json_data_hash = json_data_hash + if python_env_data_hash is None: + TaskService.update_task_data_on_task_model(task_model, {}, "python_env_data") + else: + task_model.python_env_data_hash = python_env_data_hash + + new_properties_json = task_model.properties_json + task_model.state = state + task_model.start_in_seconds = None + task_model.end_in_seconds = None + + if commit: + db.session.add(task_model) + db.session.commit() + + new_properties_json["state"] = getattr(TaskState, state) + task_model.properties_json = new_properties_json + + if commit: + # if we commit the properties json at the same time as the other items + # the json gets reset for some reason. + db.session.add(task_model) + db.session.commit() + @classmethod def _create_task( cls, diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 4fa70d6c5..74e6e1a8d 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -144,7 +144,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, successCallback: setProcessInstance, }); - let taskParams = '?all_tasks=true&most_recent_tasks_only=true'; + let taskParams = '?most_recent_tasks_only=true'; if (typeof params.to_task_guid !== 'undefined') { taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; } From 61da3d6b6f419cb14857aa1589b3412243987003 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 10:45:40 -0400 Subject: [PATCH 09/20] updated manual task with subprocess bpmn w/ burnettk --- .../services/process_instance_processor.py | 2 +- .../services/task_service.py | 4 +- .../manual_task_with_subprocesses.bpmn | 14 +-- .../test_process_to_call.bpmn | 115 ++++++++++++++---- .../unit/test_process_instance_processor.py | 62 +++++++++- 5 files changed, 154 insertions(+), 43 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ec741f32e..c97fa7337 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1335,7 +1335,7 @@ class ProcessInstanceProcessor: and_( or_( TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_not(None), # type: ignore + TaskModel.end_in_seconds.is_(None), # type: ignore ), TaskModel.process_instance_id == process_instance.id, TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 918de4d8e..f75d955c9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -342,11 +342,11 @@ class TaskService: python_env_data_hash: Optional[str] = None, ) -> None: if json_data_hash is None: - TaskService.update_task_data_on_task_model(task_model, {}, "json_data_hash") + cls.update_task_data_on_task_model(task_model, {}, "json_data_hash") else: task_model.json_data_hash = json_data_hash if python_env_data_hash is None: - TaskService.update_task_data_on_task_model(task_model, {}, "python_env_data") + cls.update_task_data_on_task_model(task_model, {}, "python_env_data") else: task_model.python_env_data_hash = python_env_data_hash diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn index 939c8c0be..f49f99cd9 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/manual_task_with_subprocesses.bpmn @@ -22,11 +22,10 @@ set_in_top_level_script = 1 - Flow_09gjylo - Flow_1i7syph + Flow_0yxus36 Flow_00k1tii @@ -48,7 +47,7 @@ except: - Flow_1i7syph + Flow_0yxus36 Flow_187mcqe @@ -67,6 +66,7 @@ except: set_top_level_process_script_after_gate = 1 + @@ -102,10 +102,6 @@ except: - - - - @@ -128,6 +124,10 @@ except: + + + + diff --git a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn index afda130a7..2bdce678a 100644 --- a/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn +++ b/spiffworkflow-backend/tests/data/manual_task_with_subprocesses/test_process_to_call.bpmn @@ -2,70 +2,131 @@ - Flow_095sred + Flow_089aeua Flow_1qsx5et Flow_1qsx5et - Flow_095sred + Flow_0zedtvv - Flow_12zb3j0 + Flow_0bkk554 - Flow_12zb3j0 - Flow_0iu4d71 - set_in_test_process_to_call_script = 1 + Flow_1cnuh2a + Flow_17hgw9g + set_in_test_process_to_call_subprocess_script = 1 - Flow_0iu4d71 + Flow_17hgw9g - - + + + + + Flow_0bkk554 + Flow_1cnuh2a + + Flow_1nri60d + + + + Flow_1bfzrzu + + + + Flow_1nri60d + Flow_1bfzrzu + set_in_test_process_to_call_subprocess_subprocess_script = 1 + + - + + + + Flow_0zedtvv + Flow_089aeua + set_in_test_process_to_call_script = 1 + + + + - - - + + + + - + - + + + + + - - - - - - - - - + + + + + + + + + + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 9ca008ecf..d1f5da242 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -256,6 +256,54 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED + def test_properly_resets_process_to_given_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + initiator_user = self.find_or_create_user("initiator_user") + finance_user_three = self.find_or_create_user("testuser3") + assert initiator_user.principal is not None + assert finance_user_three.principal is not None + AuthorizationService.import_permissions_from_yaml_file() + + finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + assert finance_group is not None + + process_model = load_test_spec( + process_model_id="test_group/manual_task_with_subprocesses", + process_model_source_directory="manual_task_with_subprocesses", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=initiator_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id + + # save again to ensure we go attempt to process the human tasks again + processor.save() + + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id + + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + human_task_one.task_name, processor.bpmn_process_instance + ) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + def test_properly_saves_tasks_when_running( self, app: Flask, @@ -263,7 +311,6 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_does_not_recreate_human_tasks_on_multiple_saves.""" self.create_process_group(client, with_super_admin_user, "test_group", "test_group") initiator_user = self.find_or_create_user("initiator_user") finance_user_three = self.find_or_create_user("testuser3") @@ -317,7 +364,7 @@ class TestProcessInstanceProcessor(BaseTest): } third_data_set = { **second_data_set, - **{"set_in_test_process_to_call_script": 1}, + **{"set_in_test_process_to_call_script": 1, "set_in_test_process_to_call_subprocess_subprocess_script": 1, "set_in_test_process_to_call_subprocess_script": 1}, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} @@ -338,10 +385,13 @@ class TestProcessInstanceProcessor(BaseTest): # TODO: also check task data here from the spiff_task directly to ensure we hydrated spiff correctly def assert_spiff_task_is_in_process(spiff_task_identifier: str, bpmn_process_identifier: str) -> None: if spiff_task.task_spec.name == spiff_task_identifier: - base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier}." - expected_python_env_data = expected_task_data[spiff_task.task_spec.name] + expected_task_data_key = spiff_task.task_spec.name if spiff_task.task_spec.name in spiff_tasks_checked_once: - expected_python_env_data = expected_task_data[f"{spiff_task.task_spec.name}_second"] + expected_task_data_key = f"{spiff_task.task_spec.name}_second" + + expected_python_env_data = expected_task_data[expected_task_data_key] + + base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key {expected_task_data_key}." task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -354,7 +404,7 @@ class TestProcessInstanceProcessor(BaseTest): assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier message = ( - f"{base_failure_message} Expected: {expected_python_env_data}. Received: {task_model.json_data()}" + f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received: {sorted(task_model.json_data())}" ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message From 722680a5ac5b31a6eb87f906a69595d209227227 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 22 Mar 2023 14:39:04 -0400 Subject: [PATCH 10/20] some more debugging for resetting a process to specific task w/ burnettk --- .../services/process_instance_processor.py | 81 +++++++++++-------- .../unit/test_process_instance_processor.py | 20 +++-- 2 files changed, 61 insertions(+), 40 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index c97fa7337..849a0ee54 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1338,53 +1338,64 @@ class ProcessInstanceProcessor: TaskModel.end_in_seconds.is_(None), # type: ignore ), TaskModel.process_instance_id == process_instance.id, - TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore ) ) tasks_to_update = tasks_to_update_query.all() # run all queries before making changes to task_model if commit: - tasks_to_delete_query = db.session.query(TaskModel).filter( - and_( - or_( - TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_not(None), # type: ignore - ), - TaskModel.process_instance_id == process_instance.id, - TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - ) - ) + # tasks_to_delete_query = db.session.query(TaskModel).filter( + # and_( + # or_( + # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # TaskModel.end_in_seconds.is_not(None), # type: ignore + # ), + # TaskModel.process_instance_id == process_instance.id, + # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + # ) + # ) + # + # tasks_to_delete = tasks_to_delete_query.all() + # + # # delete any later tasks from to_task_model and delete bpmn processes that may be + # # link directly to one of those tasks. + # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + # tasks_to_delete_ids = [t.id for t in tasks_to_delete] + # bpmn_processes_to_delete = BpmnProcessModel.query.filter( + # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + # ).order_by(BpmnProcessModel.id.desc()).all() + # human_tasks_to_delete = HumanTaskModel.query.filter( + # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + # ).all() + # + # + # import pdb; pdb.set_trace() + # # ensure the correct order for foreign keys + # for human_task_to_delete in human_tasks_to_delete: + # db.session.delete(human_task_to_delete) + # db.session.commit() + # for task_to_delete in tasks_to_delete: + # db.session.delete(task_to_delete) + # db.session.commit() + # for bpmn_process_to_delete in bpmn_processes_to_delete: + # db.session.delete(bpmn_process_to_delete) + # db.session.commit() - tasks_to_delete = tasks_to_delete_query.all() - - # delete any later tasks from to_task_model and delete bpmn processes that may be - # link directly to one of those tasks. - tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - tasks_to_delete_ids = [t.id for t in tasks_to_delete] - bpmn_processes_to_delete = BpmnProcessModel.query.filter( - BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - ).all() - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - ).all() - - # ensure the correct order for foreign keys - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() - for task_to_delete in tasks_to_delete: - db.session.delete(task_to_delete) - db.session.commit() - for bpmn_process_to_delete in bpmn_processes_to_delete: - db.session.delete(bpmn_process_to_delete) - db.session.commit() related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() if related_human_task is not None: db.session.delete(related_human_task) + tasks_to_update_ids = [t.id for t in tasks_to_update] + human_tasks_to_delete = HumanTaskModel.query.filter( + HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + ).all() + for human_task_to_delete in human_tasks_to_delete: + db.session.delete(human_task_to_delete) + db.session.commit() + for task_to_update in tasks_to_update: TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index d1f5da242..9b447f342 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -298,11 +298,21 @@ class TestProcessInstanceProcessor(BaseTest): ) ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - # processor = ProcessInstanceProcessor(process_instance) - # human_task_one = process_instance.active_human_tasks[0] - # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + processor.suspend() + ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) + + process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + processor = ProcessInstanceProcessor(process_instance) + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + import pdb; pdb.set_trace() + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + import pdb; pdb.set_trace() + human_task_one = process_instance.active_human_tasks[0] + spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + + import pdb; pdb.set_trace() def test_properly_saves_tasks_when_running( self, From 7a14a58518167c94aaddf729e86f493673c55ec6 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 10:44:09 -0400 Subject: [PATCH 11/20] commented out reset process code and added comment and raise until we get it actually working and fixed issue with viewing at completed task where it was not including the tasks for the parent bpmn processes --- .../models/bpmn_process.py | 3 + .../models/bpmn_process_definition.py | 3 + .../bpmn_process_definition_relationship.py | 3 + .../models/task_definition.py | 3 + .../routes/process_instances_controller.py | 41 +++- .../services/process_instance_processor.py | 212 +++++++++--------- .../services/task_service.py | 11 + .../services/workflow_execution_service.py | 2 + .../unit/test_process_instance_processor.py | 125 ++++++----- 9 files changed, 234 insertions(+), 169 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py index c38fed7bb..d5ba53dfc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship @@ -18,6 +20,7 @@ class BpmnProcessNotFoundError(Exception): # "success", # boolean # "bpmn_messages", # if top-level process # "correlations", # if top-level process +@dataclass class BpmnProcessModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py index 7f60d7511..902062357 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel @@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel # # each subprocess will have its own row in this table. # there is a join table to link them together: bpmn_process_definition_relationship +@dataclass class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process_definition" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py index 096570d84..51126503a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/bpmn_process_definition_relationship.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint @@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +@dataclass class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel): __tablename__ = "bpmn_process_definition_relationship" __table_args__ = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py index 791e1deab..ec2436493 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task_definition.py @@ -1,5 +1,7 @@ from __future__ import annotations +from dataclasses import dataclass + from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint from sqlalchemy.orm import relationship @@ -11,6 +13,7 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel +@dataclass class TaskDefinitionModel(SpiffworkflowBaseDBModel): __tablename__ = "task_definition" __table_args__ = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index b0cde36f8..75e2a23cd 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -606,6 +606,8 @@ def process_instance_task_list( TaskModel.process_instance_id == process_instance.id, ) + to_task_model: Optional[TaskModel] = None + task_models_of_parent_bpmn_processes_guids: list[str] = [] if to_task_guid is not None: to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() if to_task_model is None: @@ -614,7 +616,28 @@ def process_instance_task_list( message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'", status_code=400, ) - task_model_query = task_model_query.filter(TaskModel.end_in_seconds <= to_task_model.end_in_seconds) + + if to_task_model.state != "COMPLETED": + # TODO: find a better term for viewing at task state + raise ApiError( + error_code="task_cannot_be_viewed_at", + message=( + f"Desired task with guid '{to_task_guid}' for process instance '{process_instance.id}' was never" + " completed and therefore cannot be viewed at." + ), + status_code=400, + ) + + _parent_bpmn_processes, task_models_of_parent_bpmn_processes = ( + TaskService.task_models_of_parent_bpmn_processes(to_task_model) + ) + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + task_model_query = task_model_query.filter( + or_( + TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore + TaskModel.guid.in_(task_models_of_parent_bpmn_processes_guids), # type: ignore + ) + ) bpmn_process_alias = aliased(BpmnProcessModel) direct_parent_bpmn_process_alias = aliased(BpmnProcessModel) @@ -649,6 +672,9 @@ def process_instance_task_list( TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore TaskModel.guid, TaskModel.state, + TaskModel.properties_json, + TaskModel.end_in_seconds, + TaskModel.start_in_seconds, ) ) @@ -656,11 +682,18 @@ def process_instance_task_list( task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) task_models = task_model_query.all() - if to_task_guid is not None: + if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - if task_model["guid"] == to_task_guid and task_model["state"] == "COMPLETED": - task_model["state"] = "READY" + end_in_seconds = float(task_model["end_in_seconds"]) + if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": + TaskService.reset_task_model_dict(task_model, state="READY") + elif ( + end_in_seconds is None + or to_task_model.end_in_seconds is None + or to_task_model.end_in_seconds < end_in_seconds + ) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids: + TaskService.reset_task_model_dict(task_model, state="WAITING") return make_response(jsonify(task_models_dict), 200) return make_response(jsonify(task_models), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 849a0ee54..535a2be41 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -52,8 +52,6 @@ from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ign from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore -from sqlalchemy import and_ -from sqlalchemy import or_ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel @@ -1311,114 +1309,118 @@ class ProcessInstanceProcessor: # Saving the workflow seems to reset the status self.suspend() + # FIXME: this currently cannot work for multi-instance tasks and loopback. It can somewhat for not those + # if we can properly handling resetting children tasks. Right now if we set them all to FUTURE then + # they never get picked up by spiff and processed. The process instance just stops after the to_task_guid + # and marks itself complete without processing any of the children. @classmethod def reset_process( cls, process_instance: ProcessInstanceModel, to_task_guid: str, commit: Optional[bool] = False ) -> None: """Reset a process to an earlier state.""" - cls.add_event_to_process_instance( - process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid - ) - - to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() - if to_task_model is None: - raise TaskNotFoundError( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ) - - parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( - to_task_model - ) - task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] - parent_bpmn_processes_ids = [p.id for p in parent_bpmn_processes] - tasks_to_update_query = db.session.query(TaskModel).filter( - and_( - or_( - TaskModel.end_in_seconds > to_task_model.end_in_seconds, - TaskModel.end_in_seconds.is_(None), # type: ignore - ), - TaskModel.process_instance_id == process_instance.id, - # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore - ) - ) - tasks_to_update = tasks_to_update_query.all() - - # run all queries before making changes to task_model - if commit: - # tasks_to_delete_query = db.session.query(TaskModel).filter( - # and_( - # or_( - # TaskModel.end_in_seconds > to_task_model.end_in_seconds, - # TaskModel.end_in_seconds.is_not(None), # type: ignore - # ), - # TaskModel.process_instance_id == process_instance.id, - # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore - # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore - # ) - # ) - # - # tasks_to_delete = tasks_to_delete_query.all() - # - # # delete any later tasks from to_task_model and delete bpmn processes that may be - # # link directly to one of those tasks. - # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] - # tasks_to_delete_ids = [t.id for t in tasks_to_delete] - # bpmn_processes_to_delete = BpmnProcessModel.query.filter( - # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore - # ).order_by(BpmnProcessModel.id.desc()).all() - # human_tasks_to_delete = HumanTaskModel.query.filter( - # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore - # ).all() - # - # - # import pdb; pdb.set_trace() - # # ensure the correct order for foreign keys - # for human_task_to_delete in human_tasks_to_delete: - # db.session.delete(human_task_to_delete) - # db.session.commit() - # for task_to_delete in tasks_to_delete: - # db.session.delete(task_to_delete) - # db.session.commit() - # for bpmn_process_to_delete in bpmn_processes_to_delete: - # db.session.delete(bpmn_process_to_delete) - # db.session.commit() - - - related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() - if related_human_task is not None: - db.session.delete(related_human_task) - - tasks_to_update_ids = [t.id for t in tasks_to_update] - human_tasks_to_delete = HumanTaskModel.query.filter( - HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore - ).all() - for human_task_to_delete in human_tasks_to_delete: - db.session.delete(human_task_to_delete) - db.session.commit() - - for task_to_update in tasks_to_update: - TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) - - parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() - if parent_task_model is None: - raise TaskNotFoundError( - f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" - ) - - TaskService.reset_task_model( - to_task_model, - state="READY", - json_data_hash=parent_task_model.json_data_hash, - python_env_data_hash=parent_task_model.python_env_data_hash, - commit=commit, - ) - for task_model in task_models_of_parent_bpmn_processes: - TaskService.reset_task_model(task_model, state="WAITING", commit=commit) - - if commit: - processor = ProcessInstanceProcessor(process_instance) - processor.save() - processor.suspend() + raise Exception("This feature to reset a process instance to a given task is currently unavaiable") + # cls.add_event_to_process_instance( + # process_instance, ProcessInstanceEventType.process_instance_rewound_to_task.value, task_guid=to_task_guid + # ) + # + # to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + # if to_task_model is None: + # raise TaskNotFoundError( + # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + # ) + # + # parent_bpmn_processes, task_models_of_parent_bpmn_processes = TaskService.task_models_of_parent_bpmn_processes( + # to_task_model + # ) + # [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + # [p.id for p in parent_bpmn_processes] + # tasks_to_update_query = db.session.query(TaskModel).filter( + # and_( + # or_( + # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # TaskModel.end_in_seconds.is_(None), # type: ignore + # ), + # TaskModel.process_instance_id == process_instance.id, + # # TaskModel.bpmn_process_id.in_(parent_bpmn_processes_ids), # type: ignore + # ) + # ) + # tasks_to_update = tasks_to_update_query.all() + # + # # run all queries before making changes to task_model + # if commit: + # # tasks_to_delete_query = db.session.query(TaskModel).filter( + # # and_( + # # or_( + # # TaskModel.end_in_seconds > to_task_model.end_in_seconds, + # # TaskModel.end_in_seconds.is_not(None), # type: ignore + # # ), + # # TaskModel.process_instance_id == process_instance.id, + # # TaskModel.guid.not_in(task_models_of_parent_bpmn_processes_guids), # type: ignore + # # TaskModel.bpmn_process_id.not_in(parent_bpmn_processes_ids), # type: ignore + # # ) + # # ) + # # + # # tasks_to_delete = tasks_to_delete_query.all() + # # + # # # delete any later tasks from to_task_model and delete bpmn processes that may be + # # # link directly to one of those tasks. + # # tasks_to_delete_guids = [t.guid for t in tasks_to_delete] + # # tasks_to_delete_ids = [t.id for t in tasks_to_delete] + # # bpmn_processes_to_delete = BpmnProcessModel.query.filter( + # # BpmnProcessModel.guid.in_(tasks_to_delete_guids) # type: ignore + # # ).order_by(BpmnProcessModel.id.desc()).all() + # # human_tasks_to_delete = HumanTaskModel.query.filter( + # # HumanTaskModel.task_model_id.in_(tasks_to_delete_ids) # type: ignore + # # ).all() + # # + # # + # # import pdb; pdb.set_trace() + # # # ensure the correct order for foreign keys + # # for human_task_to_delete in human_tasks_to_delete: + # # db.session.delete(human_task_to_delete) + # # db.session.commit() + # # for task_to_delete in tasks_to_delete: + # # db.session.delete(task_to_delete) + # # db.session.commit() + # # for bpmn_process_to_delete in bpmn_processes_to_delete: + # # db.session.delete(bpmn_process_to_delete) + # # db.session.commit() + # + # related_human_task = HumanTaskModel.query.filter_by(task_model_id=to_task_model.id).first() + # if related_human_task is not None: + # db.session.delete(related_human_task) + # + # tasks_to_update_ids = [t.id for t in tasks_to_update] + # human_tasks_to_delete = HumanTaskModel.query.filter( + # HumanTaskModel.task_model_id.in_(tasks_to_update_ids) # type: ignore + # ).all() + # for human_task_to_delete in human_tasks_to_delete: + # db.session.delete(human_task_to_delete) + # db.session.commit() + # + # for task_to_update in tasks_to_update: + # TaskService.reset_task_model(task_to_update, state="FUTURE", commit=commit) + # + # parent_task_model = TaskModel.query.filter_by(guid=to_task_model.properties_json["parent"]).first() + # if parent_task_model is None: + # raise TaskNotFoundError( + # f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'" + # ) + # + # TaskService.reset_task_model( + # to_task_model, + # state="READY", + # json_data_hash=parent_task_model.json_data_hash, + # python_env_data_hash=parent_task_model.python_env_data_hash, + # commit=commit, + # ) + # for task_model in task_models_of_parent_bpmn_processes: + # TaskService.reset_task_model(task_model, state="WAITING", commit=commit) + # + # if commit: + # processor = ProcessInstanceProcessor(process_instance) + # processor.save() + # processor.suspend() @staticmethod def get_parser() -> MyCustomParser: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index f75d955c9..29a456772 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -332,6 +332,17 @@ class TaskService: return (bpmn_processes + b, [parent_task_model] + t) return (bpmn_processes, task_models) + @classmethod + def reset_task_model_dict( + cls, + task_model: dict, + state: str, + ) -> None: + task_model["state"] = state + task_model["start_in_seconds"] = None + task_model["end_in_seconds"] = None + task_model["properties_json"]["state"] = getattr(TaskState, state) + @classmethod def reset_task_model( cls, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index 4d44308b3..b8983f1df 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -107,6 +107,8 @@ class TaskModelSavingDelegate(EngineStepDelegate): def after_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> None: if self._should_update_task_model(): + # TODO: also include children of the last task processed. This may help with task resets + # if we have to set their states to FUTURE. # excludes FUTURE and COMPLETED. the others were required to get PP1 to go to completion. for waiting_spiff_task in bpmn_process_instance.get_tasks( TaskState.WAITING | TaskState.CANCELLED | TaskState.READY | TaskState.MAYBE | TaskState.LIKELY diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 9b447f342..9ccda1cbe 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -256,63 +256,60 @@ class TestProcessInstanceProcessor(BaseTest): assert spiff_task is not None assert spiff_task.state == TaskState.COMPLETED - def test_properly_resets_process_to_given_task( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - with_super_admin_user: UserModel, - ) -> None: - self.create_process_group(client, with_super_admin_user, "test_group", "test_group") - initiator_user = self.find_or_create_user("initiator_user") - finance_user_three = self.find_or_create_user("testuser3") - assert initiator_user.principal is not None - assert finance_user_three.principal is not None - AuthorizationService.import_permissions_from_yaml_file() - - finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() - assert finance_group is not None - - process_model = load_test_spec( - process_model_id="test_group/manual_task_with_subprocesses", - process_model_source_directory="manual_task_with_subprocesses", - ) - process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user - ) - processor = ProcessInstanceProcessor(process_instance) - processor.do_engine_steps(save=True) - assert len(process_instance.active_human_tasks) == 1 - initial_human_task_id = process_instance.active_human_tasks[0].id - - # save again to ensure we go attempt to process the human tasks again - processor.save() - - assert len(process_instance.active_human_tasks) == 1 - assert initial_human_task_id == process_instance.active_human_tasks[0].id - - processor = ProcessInstanceProcessor(process_instance) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( - human_task_one.task_name, processor.bpmn_process_instance - ) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - processor.suspend() - ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) - - process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() - processor = ProcessInstanceProcessor(process_instance) - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - import pdb; pdb.set_trace() - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - import pdb; pdb.set_trace() - human_task_one = process_instance.active_human_tasks[0] - spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) - ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) - - import pdb; pdb.set_trace() + # TODO: FIX resetting a process instance to a task + # def test_properly_resets_process_to_given_task( + # self, + # app: Flask, + # client: FlaskClient, + # with_db_and_bpmn_file_cleanup: None, + # with_super_admin_user: UserModel, + # ) -> None: + # self.create_process_group(client, with_super_admin_user, "test_group", "test_group") + # initiator_user = self.find_or_create_user("initiator_user") + # finance_user_three = self.find_or_create_user("testuser3") + # assert initiator_user.principal is not None + # assert finance_user_three.principal is not None + # AuthorizationService.import_permissions_from_yaml_file() + # + # finance_group = GroupModel.query.filter_by(identifier="Finance Team").first() + # assert finance_group is not None + # + # process_model = load_test_spec( + # process_model_id="test_group/manual_task_with_subprocesses", + # process_model_source_directory="manual_task_with_subprocesses", + # ) + # process_instance = self.create_process_instance_from_process_model( + # process_model=process_model, user=initiator_user + # ) + # processor = ProcessInstanceProcessor(process_instance) + # processor.do_engine_steps(save=True) + # assert len(process_instance.active_human_tasks) == 1 + # initial_human_task_id = process_instance.active_human_tasks[0].id + # + # # save again to ensure we go attempt to process the human tasks again + # processor.save() + # + # assert len(process_instance.active_human_tasks) == 1 + # assert initial_human_task_id == process_instance.active_human_tasks[0].id + # + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.__class__.get_task_by_bpmn_identifier( + # human_task_one.task_name, processor.bpmn_process_instance + # ) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # + # processor.suspend() + # ProcessInstanceProcessor.reset_process(process_instance, str(spiff_manual_task.id), commit=True) + # + # process_instance = ProcessInstanceModel.query.filter_by(id=process_instance.id).first() + # processor = ProcessInstanceProcessor(process_instance) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) + # human_task_one = process_instance.active_human_tasks[0] + # spiff_manual_task = processor.bpmn_process_instance.get_task(UUID(human_task_one.task_id)) + # ProcessInstanceService.complete_form_task(processor, spiff_manual_task, {}, initiator_user, human_task_one) def test_properly_saves_tasks_when_running( self, @@ -374,7 +371,11 @@ class TestProcessInstanceProcessor(BaseTest): } third_data_set = { **second_data_set, - **{"set_in_test_process_to_call_script": 1, "set_in_test_process_to_call_subprocess_subprocess_script": 1, "set_in_test_process_to_call_subprocess_script": 1}, + **{ + "set_in_test_process_to_call_script": 1, + "set_in_test_process_to_call_subprocess_subprocess_script": 1, + "set_in_test_process_to_call_subprocess_script": 1, + }, } fourth_data_set = {**third_data_set, **{"a": 1, "we_move_on": True}} fifth_data_set = {**fourth_data_set, **{"validate_only": False, "set_top_level_process_script_after_gate": 1}} @@ -401,7 +402,10 @@ class TestProcessInstanceProcessor(BaseTest): expected_python_env_data = expected_task_data[expected_task_data_key] - base_failure_message = f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key {expected_task_data_key}." + base_failure_message = ( + f"Failed on {bpmn_process_identifier} - {spiff_task_identifier} - task data key" + f" {expected_task_data_key}." + ) task_model = TaskModel.query.filter_by(guid=str(spiff_task.id)).first() assert task_model.start_in_seconds is not None @@ -414,7 +418,8 @@ class TestProcessInstanceProcessor(BaseTest): assert task_definition.bpmn_process_definition.bpmn_identifier == bpmn_process_identifier message = ( - f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received: {sorted(task_model.json_data())}" + f"{base_failure_message} Expected: {sorted(expected_python_env_data)}. Received:" + f" {sorted(task_model.json_data())}" ) # TODO: if we split out env data again we will need to use it here instead of json_data # assert task_model.python_env_data() == expected_python_env_data, message From 250298e9ad580981b6580b4b047d3c56424ea50d Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 11:08:23 -0400 Subject: [PATCH 12/20] some updates to add the log link back into the log list w/ burnettk --- spiffworkflow-frontend/src/interfaces.ts | 17 +++++++ .../src/routes/ProcessInstanceLogList.tsx | 47 ++++++++++++------- .../src/routes/ProcessInstanceShow.tsx | 2 +- 3 files changed, 49 insertions(+), 17 deletions(-) diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 4e65bd020..2b1a457dc 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -300,3 +300,20 @@ export interface JsonSchemaForm { process_model_id: string; required: string[]; } + +export interface ProcessInstanceLogEntry { + bpmn_process_definition_identifier: string; + bpmn_process_definition_name: string; + bpmn_task_type: string; + event_type: string; + spiff_task_guid: string; + task_definition_identifier: string; + task_guid: string; + timestamp: number; + id: number; + process_instance_id: number; + + task_definition_name?: string; + user_id?: number; + username?: string; +} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 4f1d39bee..797ba2543 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -1,7 +1,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Table, Tabs, TabList, Tab } from '@carbon/react'; -import { useParams, useSearchParams } from 'react-router-dom'; +import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { @@ -10,6 +10,7 @@ import { } from '../helpers'; import HttpService from '../services/HttpService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; +import { ProcessInstanceLogEntry } from '../interfaces'; type OwnProps = { variant: string; @@ -50,25 +51,26 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { isDetailedView, ]); - const getTableRow = (row: any) => { + const getTableRow = (logEntry: ProcessInstanceLogEntry) => { const tableRow = []; const taskNameCell = ( - {row.task_definition_name || - (row.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || - (row.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} + {logEntry.spiff_task_guid || + logEntry.task_definition_name || + (logEntry.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || + (logEntry.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} ); const bpmnProcessCell = ( - {row.bpmn_process_definition_name || - row.bpmn_process_definition_identifier} + {logEntry.bpmn_process_definition_name || + logEntry.bpmn_process_definition_identifier} ); if (isDetailedView) { tableRow.push( <> - {row.id} + {logEntry.id} {bpmnProcessCell} {taskNameCell} @@ -84,24 +86,37 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { if (isDetailedView) { tableRow.push( <> - {row.bpmn_task_type} - {row.event_type} + {logEntry.bpmn_task_type} + {logEntry.event_type} - {row.username || ( + {logEntry.username || ( system )} ); } - tableRow.push({convertSecondsToFormattedDateTime(row.timestamp)}); - return {tableRow}; + // tableRow.push({convertSecondsToFormattedDateTime(logEntry.timestamp)}); + tableRow.push( + + + {convertSecondsToFormattedDateTime(logEntry.timestamp)} + + + ); + + return {tableRow}; }; const buildTable = () => { - const rows = processInstanceLogs.map((row) => { - return getTableRow(row); - }); + const rows = processInstanceLogs.map( + (logEntry: ProcessInstanceLogEntry) => { + return getTableRow(logEntry); + } + ); const tableHeaders = []; if (isDetailedView) { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 74e6e1a8d..1cfc36c21 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -608,7 +608,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const isCurrentTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', - 'Call Activity', + 'CallActivity', 'Transactional Subprocess', ]; return ( From f45f2c1cc6b85e0a8434f3ff768128a064f989e6 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 13:55:16 -0400 Subject: [PATCH 13/20] added link to go back to most recent --- .../routes/process_instances_controller.py | 2 +- .../src/routes/ProcessInstanceLogList.tsx | 30 ++++++---- .../src/routes/ProcessInstanceShow.tsx | 59 +++++++++++++++---- 3 files changed, 65 insertions(+), 26 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 75e2a23cd..bb5cef6c2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -685,7 +685,7 @@ def process_instance_task_list( if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - end_in_seconds = float(task_model["end_in_seconds"]) + end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": TaskService.reset_task_model_dict(task_model, state="READY") elif ( diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 797ba2543..a59b2fab4 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -55,8 +55,7 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { const tableRow = []; const taskNameCell = ( - {logEntry.spiff_task_guid || - logEntry.task_definition_name || + {logEntry.task_definition_name || (logEntry.bpmn_task_type === 'StartEvent' ? 'Process Started' : '') || (logEntry.bpmn_task_type === 'EndEvent' ? 'Process Ended' : '')} @@ -96,17 +95,24 @@ export default function ProcessInstanceLogList({ variant }: OwnProps) { ); } - // tableRow.push({convertSecondsToFormattedDateTime(logEntry.timestamp)}); - tableRow.push( - - - {convertSecondsToFormattedDateTime(logEntry.timestamp)} - - + + let timestampComponent = ( + {convertSecondsToFormattedDateTime(logEntry.timestamp)} ); + if (logEntry.spiff_task_guid) { + timestampComponent = ( + + + {convertSecondsToFormattedDateTime(logEntry.timestamp)} + + + ); + } + tableRow.push(timestampComponent); return {tableRow}; }; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 1cfc36c21..07cb45fdf 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -605,7 +605,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } }; - const isCurrentTask = (task: Task) => { + const isActiveTask = (task: Task) => { const subprocessTypes = [ 'Subprocess', 'CallActivity', @@ -622,7 +622,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return ( processInstance && ability.can('PUT', targetUris.processInstanceTaskDataPath) && - isCurrentTask(task) && + isActiveTask(task) && processInstance.status === 'suspended' && showingLastSpiffStep() ); @@ -646,7 +646,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processInstance && processInstance.status === 'suspended' && ability.can('POST', targetUris.processInstanceCompleteTaskPath) && - isCurrentTask(task) && + isActiveTask(task) && showingLastSpiffStep() ); }; @@ -976,16 +976,18 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ): {taskToUse.state} {taskDisplayButtons(taskToUse)} -
- - {completionViewLink( - 'View state at task completion', - taskToUse.guid - )} - -
-
-
+ {taskToUse.state == 'COMPLETED' ? ( +
+ + {completionViewLink( + 'View state at task completion', + taskToUse.guid + )} + +
+
+
+ ) : null} {selectingEvent ? eventSelector(candidateEvents) : taskDataContainer()} @@ -1029,6 +1031,36 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return elements; }; + // right now this just assume if to_task_guid was passed in then + // this cannot be the active task. + // we may need a better way to figure this out. + const showingActiveTask = () => { + return !!params.to_task_guid; + }; + + const viewMostRecentStateComponent = () => { + if (!showingActiveTask()) { + return null; + } + + return ( + <> + + + + View at most recent state + + + +
+ + ); + }; + if (processInstance && (tasks || tasksCallHadError)) { const taskIds = getTaskIds(); const processModelId = unModifyProcessIdentifierForPathParam( @@ -1083,6 +1115,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { {processDataDisplayArea()} {processInstanceMetadataArea()}
+ {viewMostRecentStateComponent()} Date: Thu, 23 Mar 2023 14:28:20 -0400 Subject: [PATCH 14/20] fixed editing task data and skipping tasks, moved task and task data methods from process_api_blueprint to tasks_controller, and updated to favor task_guid over task_id in some places --- .../src/spiffworkflow_backend/api.yml | 41 +---- .../routes/process_api_blueprint.py | 85 ----------- .../routes/process_instances_controller.py | 1 + .../routes/tasks_controller.py | 144 ++++++++++++++---- .../src/routes/ProcessInstanceShow.tsx | 48 +++--- 5 files changed, 145 insertions(+), 174 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index f7fa3f036..6fa28040e 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1590,7 +1590,7 @@ paths: - name: task_guid in: path required: true - description: The guid of the task to show. + description: The unique id of the task. schema: type: string get: @@ -1605,35 +1605,8 @@ paths: application/json: schema: $ref: "#/components/schemas/Task" - - /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}: - parameters: - - name: modified_process_model_identifier - in: path - required: true - description: The modified id of an existing process model - schema: - type: string - - name: process_instance_id - in: path - required: true - description: The unique id of an existing process instance. - schema: - type: integer - - name: task_id - in: path - required: true - description: The unique id of the task. - schema: - type: string - - name: task_guid - in: path - required: true - description: The guid of the task to show. - schema: - type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update + operationId: spiffworkflow_backend.routes.tasks_controller.task_data_update summary: Update the task data for requested instance and task tags: - Process Instances @@ -1738,7 +1711,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}: + /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_guid}: parameters: - name: modified_process_model_identifier in: path @@ -1752,14 +1725,14 @@ paths: description: The unique id of the process instance schema: type: string - - name: task_id + - name: task_guid in: path required: true description: The unique id of the task. schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task + operationId: spiffworkflow_backend.routes.tasks_controller.manual_complete_task summary: Mark a task complete without executing it tags: - Process Instances @@ -1838,9 +1811,9 @@ paths: schema: $ref: "#/components/schemas/ServiceTask" - /tasks/{process_instance_id}/{task_id}: + /tasks/{process_instance_id}/{task_guid}: parameters: - - name: task_id + - name: task_guid in: path required: true description: The unique id of an existing process group. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index a07f5f494..ac38eff02 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -16,15 +16,9 @@ from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ( - ProcessInstanceTaskDataCannotBeUpdatedError, -) -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_instance_file_data import ( ProcessInstanceFileDataModel, ) @@ -38,7 +32,6 @@ from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.task_service import TaskService process_api_blueprint = Blueprint("process_api", __name__) @@ -169,60 +162,6 @@ def github_webhook_receive(body: Dict) -> Response: return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json") -def task_data_update( - process_instance_id: str, - modified_process_model_identifier: str, - task_id: str, - body: Dict, -) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() - if process_instance: - if process_instance.status != "suspended": - raise ProcessInstanceTaskDataCannotBeUpdatedError( - "The process instance needs to be suspended to update the task-data." - f" It is currently: {process_instance.status}" - ) - - task_model = TaskModel.query.filter_by(guid=task_id).first() - if task_model is None: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - json_data_dict = TaskService.update_task_data_on_task_model( - task_model, new_task_data_dict, "json_data_hash" - ) - if json_data_dict is not None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) - ProcessInstanceProcessor.add_event_to_process_instance( - process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_id - ) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: """Get_required_parameter_or_raise.""" return_value = None @@ -263,30 +202,6 @@ def send_bpmn_event( ) -def manual_complete_task( - modified_process_model_identifier: str, - process_instance_id: str, - task_id: str, - body: Dict, -) -> Response: - """Mark a task complete without executing it.""" - execute = body.get("execute", True) - process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() - if process_instance: - processor = ProcessInstanceProcessor(process_instance) - processor.manual_complete_task(task_id, execute) - else: - raise ApiError( - error_code="complete_task", - message=f"Could not complete Task {task_id} in Instance {process_instance_id}", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - def _commit_and_push_to_git(message: str) -> None: """Commit_and_push_to_git.""" if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index bb5cef6c2..48a931c21 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -218,6 +218,7 @@ def process_instance_resume( try: processor.lock_process_instance("Web") processor.resume() + processor.do_engine_steps(save=True) except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 495d22dec..c49eda582 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -33,8 +33,14 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceTaskDataCannotBeUpdatedError, +) +from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import TaskModel # noqa: F401 @@ -56,6 +62,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ) from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import TaskService class TaskDataSelectOption(TypedDict): @@ -182,24 +189,85 @@ def task_data_show( return make_response(jsonify(task_model), 200) -def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: - if task.form_ui_schema is None: - task.form_ui_schema = {} +def task_data_update( + process_instance_id: str, + modified_process_model_identifier: str, + task_guid: str, + body: Dict, +) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() + if process_instance: + if process_instance.status != "suspended": + raise ProcessInstanceTaskDataCannotBeUpdatedError( + "The process instance needs to be suspended to update the task-data." + f" It is currently: {process_instance.status}" + ) - if task.data and "form_ui_hidden_fields" in task.data: - hidden_fields = task.data["form_ui_hidden_fields"] - for hidden_field in hidden_fields: - hidden_field_parts = hidden_field.split(".") - relevant_depth_of_ui_schema = task.form_ui_schema - for ii, hidden_field_part in enumerate(hidden_field_parts): - if hidden_field_part not in relevant_depth_of_ui_schema: - relevant_depth_of_ui_schema[hidden_field_part] = {} - relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] - if len(hidden_field_parts) == ii + 1: - relevant_depth_of_ui_schema["ui:widget"] = "hidden" + task_model = TaskModel.query.filter_by(guid=task_guid).first() + if task_model is None: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not find Task: {task_guid} in Instance: {process_instance_id}.", + ) + + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + json_data_dict = TaskService.update_task_data_on_task_model( + task_model, new_task_data_dict, "json_data_hash" + ) + if json_data_dict is not None: + json_data = JsonDataModel(**json_data_dict) + db.session.add(json_data) + ProcessInstanceProcessor.add_event_to_process_instance( + process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid + ) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_guid}.", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: +def manual_complete_task( + modified_process_model_identifier: str, + process_instance_id: str, + task_guid: str, + body: Dict, +) -> Response: + """Mark a task complete without executing it.""" + execute = body.get("execute", True) + process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first() + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.manual_complete_task(task_guid, execute) + else: + raise ApiError( + error_code="complete_task", + message=f"Could not complete Task {task_guid} in Instance {process_instance_id}", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def task_show(process_instance_id: int, task_guid: str) -> flask.wrappers.Response: """Task_show.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -214,12 +282,12 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response process_instance.process_model_identifier, ) - _find_human_task_or_raise(process_instance_id, task_id) + _find_human_task_or_raise(process_instance_id, task_guid) form_schema_file_name = "" form_ui_schema_file_name = "" processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) extensions = spiff_task.task_spec.extensions if "properties" in extensions: @@ -252,7 +320,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response ApiError( error_code="missing_form_file", message=( - f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}" + f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid:" + f" {task_guid}" ), status_code=400, ) @@ -319,7 +388,7 @@ def process_data_show( def task_submit_shared( process_instance_id: int, - task_id: str, + task_guid: str, body: Dict[str, Any], terminate_loop: bool = False, ) -> flask.wrappers.Response: @@ -336,7 +405,7 @@ def task_submit_shared( ) processor = ProcessInstanceProcessor(process_instance) - spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance, processor=processor) + spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor) AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user) if spiff_task.state != TaskState.READY: @@ -353,7 +422,7 @@ def task_submit_shared( human_task = _find_human_task_or_raise( process_instance_id=process_instance_id, - task_id=task_id, + task_guid=task_guid, only_tasks_that_can_be_completed=True, ) @@ -398,13 +467,13 @@ def task_submit_shared( def task_submit( process_instance_id: int, - task_id: str, + task_guid: str, body: Dict[str, Any], terminate_loop: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): - return task_submit_shared(process_instance_id, task_id, body, terminate_loop) + return task_submit_shared(process_instance_id, task_guid, body, terminate_loop) def _get_tasks( @@ -559,14 +628,14 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> def _get_spiff_task_from_process_instance( - task_id: str, + task_guid: str, process_instance: ProcessInstanceModel, processor: Union[ProcessInstanceProcessor, None] = None, ) -> SpiffTask: """Get_spiff_task_from_process_instance.""" if processor is None: processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) + task_uuid = uuid.UUID(task_guid) spiff_task = processor.bpmn_process_instance.get_task(task_uuid) if spiff_task is None: @@ -658,15 +727,15 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any: def _find_human_task_or_raise( process_instance_id: int, - task_id: str, + task_guid: str, only_tasks_that_can_be_completed: bool = False, ) -> HumanTaskModel: if only_tasks_that_can_be_completed: human_task_query = HumanTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id, completed=False + process_instance_id=process_instance_id, task_id=task_guid, completed=False ) else: - human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_id) + human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_guid) human_task: HumanTaskModel = human_task_query.first() if human_task is None: @@ -674,10 +743,27 @@ def _find_human_task_or_raise( ApiError( error_code="no_human_task", message=( - f"Cannot find a task to complete for task id '{task_id}' and" + f"Cannot find a task to complete for task id '{task_guid}' and" f" process instance {process_instance_id}." ), status_code=500, ) ) return human_task + + +def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: + if task.form_ui_schema is None: + task.form_ui_schema = {} + + if task.data and "form_ui_hidden_fields" in task.data: + hidden_fields = task.data["form_ui_hidden_fields"] + for hidden_field in hidden_fields: + hidden_field_parts = hidden_field.split(".") + relevant_depth_of_ui_schema = task.form_ui_schema + for ii, hidden_field_part in enumerate(hidden_field_parts): + if hidden_field_part not in relevant_depth_of_ui_schema: + relevant_depth_of_ui_schema[hidden_field_part] = {} + relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] + if len(hidden_field_parts) == ii + 1: + relevant_depth_of_ui_schema["ui:widget"] = "hidden" diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 07cb45fdf..308f4bd1e 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -234,10 +234,11 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return params.to_task_guid; }; - const showingLastSpiffStep = () => { - return ( - processInstance && currentToTaskGuid() === processInstance.spiff_step - ); + // right now this just assume if to_task_guid was passed in then + // this cannot be the active task. + // we may need a better way to figure this out. + const showingActiveTask = () => { + return !params.to_task_guid; }; const completionViewLink = (label: any, taskGuid: string) => { @@ -496,7 +497,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const initializeTaskDataToDisplay = (task: Task | null) => { if ( task && - task.state === 'COMPLETED' && + (task.state === 'COMPLETED' || task.state === 'READY') && ability.can('GET', targetUris.processInstanceTaskDataPath) ) { setShowTaskDataLoading(true); @@ -624,7 +625,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ability.can('PUT', targetUris.processInstanceTaskDataPath) && isActiveTask(task) && processInstance.status === 'suspended' && - showingLastSpiffStep() + showingActiveTask() ); }; @@ -637,7 +638,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ability.can('POST', targetUris.processInstanceSendEventPath) && taskTypes.filter((t) => t === task.typename).length > 0 && task.state === 'WAITING' && - showingLastSpiffStep() + showingActiveTask() ); }; @@ -647,18 +648,20 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { processInstance.status === 'suspended' && ability.can('POST', targetUris.processInstanceCompleteTaskPath) && isActiveTask(task) && - showingLastSpiffStep() + showingActiveTask() ); }; - const canResetProcess = (task: Task) => { - return ( - ability.can('POST', targetUris.processInstanceResetPath) && - processInstance && - processInstance.status === 'suspended' && - task.state === 'READY' && - !showingLastSpiffStep() - ); + const canResetProcess = (_task: Task) => { + // disabling this feature for now + return false; + // return ( + // ability.can('POST', targetUris.processInstanceResetPath) && + // processInstance && + // processInstance.status === 'suspended' && + // task.state === 'READY' && + // !showingActiveTask() + // ); }; const getEvents = (task: Task) => { @@ -714,7 +717,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { // taskToUse is copy of taskToDisplay, with taskDataToDisplay in data attribute const taskToUse: Task = { ...taskToDisplay, data: taskDataToDisplay }; HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.id}`, + path: `${targetUris.processInstanceTaskDataPath}/${taskToUse.guid}`, httpMethod: 'PUT', successCallback: saveTaskDataResult, failureCallback: addError, @@ -976,7 +979,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ): {taskToUse.state} {taskDisplayButtons(taskToUse)} - {taskToUse.state == 'COMPLETED' ? ( + {taskToUse.state === 'COMPLETED' ? (
{completionViewLink( @@ -1031,15 +1034,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { return elements; }; - // right now this just assume if to_task_guid was passed in then - // this cannot be the active task. - // we may need a better way to figure this out. - const showingActiveTask = () => { - return !!params.to_task_guid; - }; - const viewMostRecentStateComponent = () => { - if (!showingActiveTask()) { + if (showingActiveTask()) { return null; } From 3f49f912ea6eed319695e64de19ca70e35772125 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 15:16:39 -0400 Subject: [PATCH 15/20] pyl passes w/ burnettk --- .../routes/process_instances_controller.py | 2 -- .../spiffworkflow_backend/services/task_service.py | 1 - .../process_navigation/process_navigation.bpmn | 8 ++++---- .../integration/test_process_api.py | 14 +++++++------- 4 files changed, 11 insertions(+), 14 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 48a931c21..619aaae10 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -218,7 +218,6 @@ def process_instance_resume( try: processor.lock_process_instance("Web") processor.resume() - processor.do_engine_steps(save=True) except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: ErrorHandlingService().handle_error(processor, e) raise e @@ -673,7 +672,6 @@ def process_instance_task_list( TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore TaskModel.guid, TaskModel.state, - TaskModel.properties_json, TaskModel.end_in_seconds, TaskModel.start_in_seconds, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index 29a456772..d3cf545c5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -341,7 +341,6 @@ class TaskService: task_model["state"] = state task_model["start_in_seconds"] = None task_model["end_in_seconds"] = None - task_model["properties_json"]["state"] = getattr(TaskState, state) @classmethod def reset_task_model( diff --git a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn index 540a0e12c..d53c8184f 100644 --- a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn +++ b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn @@ -27,7 +27,7 @@ - + Flow_1q47ol8 @@ -36,7 +36,7 @@ Flow_1w3n49n - + Flow_1vld4r2 Flow_13ai5vv @@ -44,7 +44,7 @@ "PT1H" - + Click the button. @@ -91,7 +91,7 @@ - + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 600bcb666..b0f355c8a 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2619,9 +2619,9 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - end_task = next(task for task in response.json if task["type"] == "End Event") + end_task = next(task for task in response.json if task["bpmn_identifier"] == "Event_174a838") response = client.get( - f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['task_spiff_step']}", + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{end_task['guid']}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -2688,17 +2688,17 @@ class TestProcessApi(BaseTest): f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/task-info", headers=self.logged_in_headers(with_super_admin_user), ) - assert len(response.json) == 1 - task = response.json[0] + assert len(response.json) == 9 + human_task = next(task for task in response.json if task["bpmn_identifier"] == "manual_task_one") response = client.post( - f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}", + f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{human_task['guid']}", headers=self.logged_in_headers(with_super_admin_user), content_type="application/json", data=json.dumps({"execute": False}), ) assert response.json["status"] == "suspended" - task_model = TaskModel.query.filter_by(guid=task["id"]).first() + task_model = TaskModel.query.filter_by(guid=human_task["guid"]).first() assert task_model is not None assert task_model.state == "COMPLETED" @@ -2707,7 +2707,7 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 - assert len(response.json) == 1 + assert len(response.json) == 9 def setup_initial_groups_for_move_tests(self, client: FlaskClient, with_super_admin_user: UserModel) -> None: """Setup_initial_groups_for_move_tests.""" From 94cd732ebd2282b1cf98ea1a7cb9771c995da754 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 16:07:25 -0400 Subject: [PATCH 16/20] save timestamps when manually completing a task and some cleanup w/ burnettk --- .../routes/tasks_controller.py | 5 +- .../services/process_instance_processor.py | 8 ++ .../src/routes/ProcessInstanceShow.tsx | 133 +++++++++++------- 3 files changed, 92 insertions(+), 54 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index c49eda582..0aa0fa7bb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -218,8 +218,9 @@ def task_data_update( task_model, new_task_data_dict, "json_data_hash" ) if json_data_dict is not None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) + TaskService.insert_or_update_json_data_records({json_data_dict['hash']: json_data_dict}) + # json_data = JsonDataModel(**json_data_dict) + # db.session.add(json_data) ProcessInstanceProcessor.add_event_to_process_instance( process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 535a2be41..5791be7ee 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1232,6 +1232,7 @@ class ProcessInstanceProcessor: def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" spiff_tasks_updated = {} + start_in_seconds = time.time() spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) event_type = ProcessInstanceEventType.task_skipped.value if execute: @@ -1264,6 +1265,8 @@ class ProcessInstanceProcessor: spiff_task.workflow.last_task = spiff_task spiff_tasks_updated[spiff_task.id] = spiff_task + end_in_seconds = time.time() + if isinstance(spiff_task.task_spec, EndEvent): for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow): task.complete() @@ -1300,6 +1303,11 @@ class ProcessInstanceProcessor: if bpmn_process_json_data is not None: new_json_data_dicts[bpmn_process_json_data["hash"]] = bpmn_process_json_data + # spiff_task should be the main task we are completing and only it should get the timestamps + if task_model.guid == str(spiff_task.id): + task_model.start_in_seconds = start_in_seconds + task_model.end_in_seconds = end_in_seconds + new_task_models[task_model.guid] = task_model db.session.bulk_save_objects(new_task_models.values()) TaskService.insert_or_update_json_data_records(new_json_data_dicts) diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 308f4bd1e..26231282f 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -68,6 +68,9 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); const [taskToDisplay, setTaskToDisplay] = useState(null); + const [taskToTimeTravelTo, setTaskToTimeTravelTo] = useState( + null + ); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); const [showTaskDataLoading, setShowTaskDataLoading] = useState(false); @@ -127,45 +130,58 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { } useEffect(() => { - if (permissionsLoaded) { - const processTaskFailure = () => { - setTasksCallHadError(true); - }; - let queryParams = ''; - const processIdentifier = searchParams.get('process_identifier'); - if (processIdentifier) { - queryParams = `?process_identifier=${processIdentifier}`; - } - let apiPath = '/process-instances/for-me'; - if (variant === 'all') { - apiPath = '/process-instances'; - } - HttpService.makeCallToBackend({ - path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, - successCallback: setProcessInstance, - }); - let taskParams = '?most_recent_tasks_only=true'; - if (typeof params.to_task_guid !== 'undefined') { - taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; - } - const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); - if (bpmnProcessGuid) { - taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; - } - let taskPath = ''; - if (ability.can('GET', taskListPath)) { - taskPath = `${taskListPath}${taskParams}`; - } - if (taskPath) { - HttpService.makeCallToBackend({ - path: taskPath, - successCallback: setTasks, - failureCallback: processTaskFailure, - }); - } else { - setTasksCallHadError(true); - } + if (!permissionsLoaded) { + return undefined; } + const processTaskFailure = () => { + setTasksCallHadError(true); + }; + const processTasksSuccess = (results: Task[]) => { + if (params.to_task_guid) { + const matchingTask = results.find( + (task: Task) => task.guid === params.to_task_guid + ); + if (matchingTask) { + setTaskToTimeTravelTo(matchingTask); + } + } + setTasks(results); + }; + let queryParams = ''; + const processIdentifier = searchParams.get('process_identifier'); + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } + HttpService.makeCallToBackend({ + path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, + successCallback: setProcessInstance, + }); + let taskParams = '?most_recent_tasks_only=true'; + if (typeof params.to_task_guid !== 'undefined') { + taskParams = `${taskParams}&to_task_guid=${params.to_task_guid}`; + } + const bpmnProcessGuid = searchParams.get('bpmn_process_guid'); + if (bpmnProcessGuid) { + taskParams = `${taskParams}&bpmn_process_guid=${bpmnProcessGuid}`; + } + let taskPath = ''; + if (ability.can('GET', taskListPath)) { + taskPath = `${taskListPath}${taskParams}`; + } + if (taskPath) { + HttpService.makeCallToBackend({ + path: taskPath, + successCallback: processTasksSuccess, + failureCallback: processTaskFailure, + }); + } else { + setTasksCallHadError(true); + } + return undefined; }, [ targetUris, params, @@ -231,14 +247,17 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const currentToTaskGuid = () => { - return params.to_task_guid; + if (taskToTimeTravelTo) { + return taskToTimeTravelTo.guid; + } + return null; }; - // right now this just assume if to_task_guid was passed in then + // right now this just assume if taskToTimeTravelTo was passed in then // this cannot be the active task. // we may need a better way to figure this out. const showingActiveTask = () => { - return !params.to_task_guid; + return !taskToTimeTravelTo; }; const completionViewLink = (label: any, taskGuid: string) => { @@ -983,7 +1002,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
{completionViewLink( - 'View state at task completion', + 'View process instance at the time when this task was active.', taskToUse.guid )} @@ -1035,21 +1054,31 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const viewMostRecentStateComponent = () => { - if (showingActiveTask()) { + if (!taskToTimeTravelTo) { return null; } - + const title = `${taskToTimeTravelTo.id}: ${taskToTimeTravelTo.guid}: ${taskToTimeTravelTo.bpmn_identifier}`; return ( <> - - - View at most recent state - + +

+ Viewing process instance at the time when{' '} + + + {taskToTimeTravelTo.bpmn_name || + taskToTimeTravelTo.bpmn_identifier} + + {' '} + was active.{' '} + + View current process instance state. + +


From 0ae74f8f35239882b66afb0073a0f6dade364d3e Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 16:33:30 -0400 Subject: [PATCH 17/20] removed spiff step details w/ burnettk --- .../{4255f548bfb4_.py => 0b5dd14bfbac_.py} | 25 +--- .../src/spiffworkflow_backend/api.yml | 2 +- .../load_database_models.py | 3 - .../models/process_instance.py | 6 +- .../models/spiff_step_details.py | 37 ----- .../src/spiffworkflow_backend/models/task.py | 3 - .../routes/process_instances_controller.py | 2 - .../routes/script_unit_tests_controller.py | 1 - .../routes/tasks_controller.py | 3 +- .../delete_process_instances_with_criteria.py | 9 -- .../services/logging_service.py | 23 ---- .../services/process_instance_processor.py | 126 ------------------ .../services/process_instance_service.py | 2 - .../services/workflow_execution_service.py | 58 -------- .../components/ProcessInstanceListTable.tsx | 1 - spiffworkflow-frontend/src/interfaces.ts | 5 - .../src/routes/ProcessInstanceShow.tsx | 6 +- 17 files changed, 12 insertions(+), 300 deletions(-) rename spiffworkflow-backend/migrations/versions/{4255f548bfb4_.py => 0b5dd14bfbac_.py} (96%) delete mode 100644 spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py diff --git a/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py b/spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py similarity index 96% rename from spiffworkflow-backend/migrations/versions/4255f548bfb4_.py rename to spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py index a66c074bb..d2ef7c103 100644 --- a/spiffworkflow-backend/migrations/versions/4255f548bfb4_.py +++ b/spiffworkflow-backend/migrations/versions/0b5dd14bfbac_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 4255f548bfb4 +Revision ID: 0b5dd14bfbac Revises: -Create Date: 2023-03-20 13:00:28.655387 +Create Date: 2023-03-23 16:25:33.288500 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. -revision = '4255f548bfb4' +revision = '0b5dd14bfbac' down_revision = None branch_labels = None depends_on = None @@ -251,7 +251,6 @@ def upgrade(): sa.Column('status', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), - sa.Column('spiff_step', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ), sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), @@ -347,22 +346,6 @@ def upgrade(): op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False) op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False) op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False) - op.create_table('spiff_step_details', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('spiff_step', sa.Integer(), nullable=False), - sa.Column('task_json', sa.JSON(), nullable=False), - sa.Column('task_id', sa.String(length=50), nullable=False), - sa.Column('task_state', sa.String(length=50), nullable=False), - sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), - sa.Column('delta_json', sa.JSON(), nullable=True), - sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False), - sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True), - sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step') - ) - op.create_index(op.f('ix_spiff_step_details_process_instance_id'), 'spiff_step_details', ['process_instance_id'], unique=False) op.create_table('task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=False), @@ -468,8 +451,6 @@ def downgrade(): op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') op.drop_index(op.f('ix_task_bpmn_process_id'), table_name='task') op.drop_table('task') - op.drop_index(op.f('ix_spiff_step_details_process_instance_id'), table_name='spiff_step_details') - op.drop_table('spiff_step_details') op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue') op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue') op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 6fa28040e..a4d8156c1 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1595,7 +1595,7 @@ paths: type: string get: operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show - summary: Get task data for a single task in a spiff step. + summary: Get task data for a single task. tags: - Process Instances responses: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 52e0c5733..5e78b4d38 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -41,9 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( ) # noqa: F401 from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401 from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 -from spiffworkflow_backend.models.spiff_step_details import ( - SpiffStepDetailsModel, -) # noqa: F401 from spiffworkflow_backend.models.user import UserModel # noqa: F401 from spiffworkflow_backend.models.group import GroupModel # noqa: F401 from spiffworkflow_backend.models.process_instance_metadata import ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index f9824f02e..3fb8b439f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -87,6 +87,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "ProcessInstanceMetadataModel", cascade="delete", ) # type: ignore + process_instance_queue = relationship( + "ProcessInstanceQueueModel", + cascade="delete", + ) # type: ignore start_in_seconds: int | None = db.Column(db.Integer, index=True) end_in_seconds: int | None = db.Column(db.Integer, index=True) @@ -96,7 +100,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_identifier: str = db.Column(db.String(255)) - spiff_step: int = db.Column(db.Integer) bpmn_xml_file_contents: str | None = None process_model_with_diagram_identifier: str | None = None @@ -117,7 +120,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "bpmn_xml_file_contents": self.bpmn_xml_file_contents, "bpmn_version_control_identifier": self.bpmn_version_control_identifier, "bpmn_version_control_type": self.bpmn_version_control_type, - "spiff_step": self.spiff_step, "process_initiator_username": self.process_initiator.username, } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py deleted file mode 100644 index 58d340950..000000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Spiff_step_details.""" -from dataclasses import dataclass -from typing import Union - -from sqlalchemy import ForeignKey -from sqlalchemy import UniqueConstraint -from sqlalchemy.orm import deferred - -from spiffworkflow_backend.models.db import db -from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel - - -@dataclass -class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): - """SpiffStepDetailsModel.""" - - __tablename__ = "spiff_step_details" - __table_args__ = (UniqueConstraint("process_instance_id", "spiff_step", name="process_instance_id_spiff_step"),) - - id: int = db.Column(db.Integer, primary_key=True) - process_instance_id: int = db.Column( - ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore - ) - spiff_step: int = db.Column(db.Integer, nullable=False) - task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore - task_id: str = db.Column(db.String(50), nullable=False) - task_state: str = db.Column(db.String(50), nullable=False) - bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) - delta_json: list = deferred(db.Column(db.JSON)) # type: ignore - - start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False) - - # to fix mypy in 3.9 - not sure why syntax like: - # float | None - # works in other dataclass db models - end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index dbdd429e7..a1edd259e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -108,7 +108,6 @@ class Task: event_definition: Union[dict[str, Any], None] = None, call_activity_process_identifier: Optional[str] = None, calling_subprocess_task_id: Optional[str] = None, - task_spiff_step: Optional[int] = None, ): """__init__.""" self.id = id @@ -123,7 +122,6 @@ class Task: self.event_definition = event_definition self.call_activity_process_identifier = call_activity_process_identifier self.calling_subprocess_task_id = calling_subprocess_task_id - self.task_spiff_step = task_spiff_step self.data = data if self.data is None: @@ -181,7 +179,6 @@ class Task: "event_definition": self.event_definition, "call_activity_process_identifier": self.call_activity_process_identifier, "calling_subprocess_task_id": self.calling_subprocess_task_id, - "task_spiff_step": self.task_spiff_step, } @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 619aaae10..758a48d9d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -41,7 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel from spiffworkflow_backend.models.user import UserModel @@ -448,7 +447,6 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(SpiffStepDetailsModel).filter_by(process_instance_id=process_instance.id).delete() db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() db.session.delete(process_instance) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py index 303dd94a8..3d7ab5afa 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py @@ -102,7 +102,6 @@ def script_unit_test_run( """Script_unit_test_run.""" # FIXME: We should probably clear this somewhere else but this works current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None python_script = _get_required_parameter_or_raise("python_script", body) input_json = _get_required_parameter_or_raise("input_json", body) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 0aa0fa7bb..50a4402a9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -33,7 +33,6 @@ from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel -from spiffworkflow_backend.models.json_data import JsonDataModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -218,7 +217,7 @@ def task_data_update( task_model, new_task_data_dict, "json_data_hash" ) if json_data_dict is not None: - TaskService.insert_or_update_json_data_records({json_data_dict['hash']: json_data_dict}) + TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) # json_data = JsonDataModel(**json_data_dict) # db.session.add(json_data) ProcessInstanceProcessor.add_event_to_process_instance( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py index a650cb483..f599d799a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py @@ -9,7 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.scripts.script import Script @@ -43,14 +42,6 @@ class DeleteProcessInstancesWithCriteria(Script): rows_affected = len(results) if rows_affected > 0: - ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore - - step_details = SpiffStepDetailsModel.query.filter( - SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore - ).all() - - for deletion in step_details: - db.session.delete(deletion) for deletion in results: db.session.delete(deletion) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index 94f3a67f4..b96f98e51 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -6,7 +6,6 @@ import sys from typing import Any from typing import Optional -from flask import g from flask.app import Flask @@ -88,28 +87,6 @@ class JsonFormatter(logging.Formatter): return json.dumps(message_dict, default=str) -class SpiffFilter(logging.Filter): - """SpiffFilter.""" - - def __init__(self, app: Flask): - """__init__.""" - self.app = app - super().__init__() - - def filter(self, record: logging.LogRecord) -> bool: - """Filter.""" - tld = self.app.config["THREAD_LOCAL_DATA"] - process_instance_id = "" - if hasattr(tld, "process_instance_id"): - process_instance_id = tld.process_instance_id - setattr(record, "process_instance_id", process_instance_id) # noqa: B010 - if hasattr(tld, "spiff_step"): - setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010 - if hasattr(g, "user") and g.user: - setattr(record, "current_user_id", g.user.id) # noqa: B010 - return True - - def setup_logger(app: Flask) -> None: """Setup_logger.""" upper_log_level_string = app.config["SPIFFWORKFLOW_BACKEND_LOG_LEVEL"].upper() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 5791be7ee..722baa0d0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -39,7 +39,6 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore -from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore @@ -84,7 +83,6 @@ from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) from spiffworkflow_backend.models.spec_reference import SpecReferenceCache -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel from spiffworkflow_backend.models.task import TaskNotFoundError from spiffworkflow_backend.models.task_definition import TaskDefinitionModel @@ -92,9 +90,6 @@ from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.file_system_service import FileSystemService -from spiffworkflow_backend.services.process_instance_lock_service import ( - ProcessInstanceLockService, -) from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService from spiffworkflow_backend.services.process_model_service import ProcessModelService @@ -105,9 +100,6 @@ from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( execution_strategy_named, ) -from spiffworkflow_backend.services.workflow_execution_service import ( - StepDetailLoggingDelegate, -) from spiffworkflow_backend.services.workflow_execution_service import ( TaskModelSavingDelegate, ) @@ -151,10 +143,6 @@ class MissingProcessInfoError(Exception): """MissingProcessInfoError.""" -class SpiffStepDetailIsMissingError(Exception): - pass - - class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # type: ignore def __init__(self, environment_globals: Dict[str, Any]): """BoxedTaskDataBasedScriptEngineEnvironment.""" @@ -433,7 +421,6 @@ class ProcessInstanceProcessor: """Create a Workflow Processor based on the serialized information available in the process_instance model.""" tld = current_app.config["THREAD_LOCAL_DATA"] tld.process_instance_id = process_instance_model.id - tld.spiff_step = process_instance_model.spiff_step # we want this to be the fully qualified path to the process model including all group subcomponents current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( @@ -814,37 +801,6 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def spiff_step_details_mapping( - self, - spiff_task: Optional[SpiffTask] = None, - start_in_seconds: Optional[float] = None, - end_in_seconds: Optional[float] = None, - ) -> dict: - """SaveSpiffStepDetails.""" - if spiff_task is None: - # TODO: safer to pass in task vs use last task? - spiff_task = self.bpmn_process_instance.last_task - - if spiff_task is None: - return {} - - # it's only None when we're starting a human task (it's not complete yet) - if start_in_seconds is None: - start_in_seconds = time.time() - - task_json = self.get_task_dict_from_spiff_task(spiff_task) - - return { - "process_instance_id": self.process_instance_model.id, - "spiff_step": self.process_instance_model.spiff_step or 1, - "task_json": task_json, - "task_id": str(spiff_task.id), - "task_state": spiff_task.get_state_name(), - "bpmn_task_identifier": spiff_task.task_spec.name, - "start_in_seconds": start_in_seconds, - "end_in_seconds": end_in_seconds, - } - def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: """Extract_metadata.""" metadata_extraction_paths = process_model_info.metadata_extraction_paths @@ -1182,14 +1138,7 @@ class ProcessInstanceProcessor: human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task) db.session.add(human_task_user) - self.increment_spiff_step() - spiff_step_detail_mapping = self.spiff_step_details_mapping( - spiff_task=ready_or_waiting_task, start_in_seconds=time.time() - ) - spiff_step_detail = SpiffStepDetailsModel(**spiff_step_detail_mapping) - db.session.add(spiff_step_detail) db.session.commit() - # self.log_spiff_step_details(spiff_step_detail_mapping) if len(human_tasks) > 0: for at in human_tasks: @@ -1220,15 +1169,6 @@ class ProcessInstanceProcessor: # TODO: do_engine_steps without a lock self.do_engine_steps(save=True) - def add_step(self, step: Union[dict, None] = None) -> None: - """Add a spiff step.""" - if step is None: - step = self.spiff_step_details_mapping() - spiff_step_detail = SpiffStepDetailsModel(**step) - db.session.add(spiff_step_detail) - db.session.commit() - # self.log_spiff_step_details(step) - def manual_complete_task(self, task_id: str, execute: bool) -> None: """Mark the task complete optionally executing it.""" spiff_tasks_updated = {} @@ -1279,9 +1219,6 @@ class ProcessInstanceProcessor: task.complete() spiff_tasks_updated[task.id] = task - self.increment_spiff_step() - self.add_step() - for updated_spiff_task in spiff_tasks_updated.values(): bpmn_process, task_model, new_task_models, new_json_data_dicts = ( TaskService.find_or_create_task_model_from_spiff_task( @@ -1666,31 +1603,15 @@ class ProcessInstanceProcessor: db.session.add(message_instance) db.session.commit() - def increment_spiff_step(self) -> None: - """Spiff_step++.""" - spiff_step = self.process_instance_model.spiff_step or 0 - spiff_step += 1 - self.process_instance_model.spiff_step = spiff_step - current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step - db.session.add(self.process_instance_model) - def do_engine_steps( self, exit_at: None = None, save: bool = False, execution_strategy_name: Optional[str] = None, ) -> None: - # NOTE: To avoid saving spiff step details, just comment out this function and the step_delegate and - # set the TaskModelSavingDelegate's secondary_engine_step_delegate to None. - def spiff_step_details_mapping_builder(task: SpiffTask, start: float, end: float) -> dict: - self._script_engine.environment.revise_state_with_task_data(task) - return self.spiff_step_details_mapping(task, start, end) - self._add_bpmn_process_definitions() - step_delegate = StepDetailLoggingDelegate(self.increment_spiff_step, spiff_step_details_mapping_builder) task_model_delegate = TaskModelSavingDelegate( - secondary_engine_step_delegate=step_delegate, serializer=self._serializer, process_instance=self.process_instance_model, bpmn_definition_to_task_definitions_mappings=self.bpmn_definition_to_task_definitions_mappings, @@ -1718,31 +1639,6 @@ class ProcessInstanceProcessor: ): self._script_engine.failing_spiff_task = None - # log the spiff step details so we know what is processing the process - # instance when a human task has a timer event. - def log_spiff_step_details(self, step_details: Any) -> None: - if ProcessInstanceLockService.has_lock(self.process_instance_model.id): - locked_by = ProcessInstanceLockService.locked_by() - message = f"ADDING SPIFF BULK STEP DETAILS: {locked_by}: {step_details}" - current_app.logger.debug(message) - - def cancel_notify(self) -> None: - """Cancel_notify.""" - self.__cancel_notify(self.bpmn_process_instance) - - @staticmethod - def __cancel_notify(bpmn_process_instance: BpmnWorkflow) -> None: - """__cancel_notify.""" - try: - # A little hackly, but make the bpmn_process_instance catch a cancel event. - bpmn_process_instance.signal("cancel") # generate a cancel signal. - bpmn_process_instance.catch(CancelEventDefinition()) - # Due to this being static, can't save granular step details in this case - # TODO: do_engine_steps without a lock - bpmn_process_instance.do_engine_steps() - except WorkflowTaskException as we: - raise ApiError.from_workflow_exception("task_error", str(we), we) from we - @classmethod def get_tasks_with_data(cls, bpmn_process_instance: BpmnWorkflow) -> List[SpiffTask]: return [task for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) if len(task.data) > 0] @@ -1891,28 +1787,6 @@ class ProcessInstanceProcessor: human_task.task_status = spiff_task.get_state_name() db.session.add(human_task) - # FIXME: remove when we switch over to using tasks only - details_model = ( - SpiffStepDetailsModel.query.filter_by( - process_instance_id=self.process_instance_model.id, - task_id=str(spiff_task.id), - task_state="READY", - ) - .order_by(SpiffStepDetailsModel.id.desc()) # type: ignore - .first() - ) - if details_model is None: - raise SpiffStepDetailIsMissingError( - "Cannot find a ready spiff_step_detail entry for process instance" - f" {self.process_instance_model.id} and task_id is {spiff_task.id}" - ) - - details_model.task_state = spiff_task.get_state_name() - details_model.end_in_seconds = time.time() - details_model.task_json = self.get_task_dict_from_spiff_task(spiff_task) - db.session.add(details_model) - # ####### - json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer) for json_data_dict in json_data_dict_list: if json_data_dict is not None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 4daabd588..ed2ea918f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -404,7 +404,6 @@ class ProcessInstanceService: spiff_task: SpiffTask, add_docs_and_forms: bool = False, calling_subprocess_task_id: Optional[str] = None, - task_spiff_step: Optional[int] = None, ) -> Task: """Spiff_task_to_api_task.""" task_type = spiff_task.task_spec.spec_type @@ -443,7 +442,6 @@ class ProcessInstanceService: event_definition=serialized_task_spec.get("event_definition"), call_activity_process_identifier=call_activity_process_identifier, calling_subprocess_task_id=calling_subprocess_task_id, - task_spiff_step=task_spiff_step, ) return task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py index b8983f1df..4d9334183 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/workflow_execution_service.py @@ -1,7 +1,6 @@ import logging import time from typing import Callable -from typing import List from typing import Optional from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore @@ -19,7 +18,6 @@ from spiffworkflow_backend.models.message_instance_correlation import ( from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task import TaskModel # noqa: F401 from spiffworkflow_backend.services.assertion_service import safe_assertion from spiffworkflow_backend.services.process_instance_lock_service import ( @@ -45,10 +43,6 @@ class EngineStepDelegate: pass -SpiffStepIncrementer = Callable[[], None] -SpiffStepDetailsMappingBuilder = Callable[[SpiffTask, float, float], dict] - - class TaskModelSavingDelegate(EngineStepDelegate): """Engine step delegate that takes care of saving a task model to the database. @@ -167,58 +161,6 @@ class TaskModelSavingDelegate(EngineStepDelegate): return task_model -class StepDetailLoggingDelegate(EngineStepDelegate): - """Engine step delegate that takes care of logging spiff step details. - - This separates the concerns of step execution and step logging. - """ - - def __init__( - self, - increment_spiff_step: SpiffStepIncrementer, - spiff_step_details_mapping: SpiffStepDetailsMappingBuilder, - ): - """__init__.""" - self.increment_spiff_step = increment_spiff_step - self.spiff_step_details_mapping = spiff_step_details_mapping - self.step_details: List[dict] = [] - self.current_task_start_in_seconds = 0.0 - self.tasks_to_log = { - "BPMN Task", - "Script Task", - "Service Task", - "Default Start Event", - "Exclusive Gateway", - "Call Activity", - # "End Join", - "End Event", - "Default Throwing Event", - "Subprocess", - "Transactional Subprocess", - } - - def should_log(self, spiff_task: SpiffTask) -> bool: - return spiff_task.task_spec.spec_type in self.tasks_to_log and not spiff_task.task_spec.name.endswith( - ".EndJoin" - ) - - def will_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_log(spiff_task): - self.current_task_start_in_seconds = time.time() - self.increment_spiff_step() - - def did_complete_task(self, spiff_task: SpiffTask) -> None: - if self.should_log(spiff_task): - self.step_details.append( - self.spiff_step_details_mapping(spiff_task, self.current_task_start_in_seconds, time.time()) - ) - - def save(self, _bpmn_process_instance: BpmnWorkflow, commit: bool = True) -> None: - db.session.bulk_insert_mappings(SpiffStepDetailsModel, self.step_details) - if commit: - db.session.commit() - - class ExecutionStrategy: """Interface of sorts for a concrete execution strategy.""" diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index eb4f17bf0..335e6a898 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1295,7 +1295,6 @@ export default function ProcessInstanceListTable({ end_in_seconds: 'End Time', status: 'Status', process_initiator_username: 'Started By', - spiff_step: 'SpiffWorkflow Step', }; const getHeaderLabel = (header: string) => { return headerLabels[header] ?? header; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 2b1a457dc..1d34054db 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -53,9 +53,6 @@ export interface Task { task_definition_properties_json: TaskDefinitionPropertiesJson; event_definition?: EventDefinition; - - // TOOD: DELETE THIS! - task_spiff_step?: number; } export interface TaskIds { @@ -88,7 +85,6 @@ export interface ProcessInstanceTask { type: string; updated_at_in_seconds: number; - task_spiff_step?: number; potential_owner_usernames?: string; assigned_user_group_identifier?: string; } @@ -132,7 +128,6 @@ export interface ProcessInstance { end_in_seconds: number | null; process_initiator_username: string; bpmn_xml_file_contents?: string; - spiff_step?: number; created_at_in_seconds: number; updated_at_in_seconds: number; bpmn_version_control_identifier: string; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 26231282f..feaa41737 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -286,14 +286,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const returnToLastSpiffStep = () => { + const returnToProcessInstance = () => { window.location.href = processInstanceShowPageBaseUrl; }; const resetProcessInstance = () => { HttpService.makeCallToBackend({ path: `${targetUris.processInstanceResetPath}/${currentToTaskGuid()}`, - successCallback: returnToLastSpiffStep, + successCallback: returnToProcessInstance, httpMethod: 'POST', }); }; @@ -763,7 +763,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { HttpService.makeCallToBackend({ path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToDisplay.guid}`, httpMethod: 'POST', - successCallback: returnToLastSpiffStep, + successCallback: returnToProcessInstance, postBody: { execute }, }); } From d2d0e7e9920595b00876de1b76706ea46d6eb601 Mon Sep 17 00:00:00 2001 From: jasquat Date: Thu, 23 Mar 2023 17:34:08 -0400 Subject: [PATCH 18/20] attempted to fix some cypress tests --- spiffworkflow-frontend/cypress/e2e/process_instances.cy.js | 5 ++++- spiffworkflow-frontend/cypress/support/commands.js | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index 64e0418a3..b1b87c46b 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -162,7 +162,7 @@ describe('process-instances', () => { cy.getBySel('process-instance-log-list-link').click(); cy.getBySel('process-instance-log-detailed').click(); cy.contains('process_model_one'); - cy.contains('State change to COMPLETED'); + cy.contains('task_completed'); cy.basicPaginationTest(); }); @@ -184,9 +184,12 @@ describe('process-instances', () => { cy.getBySel(`process-instance-status-${processStatus}`); // there should really only be one, but in CI there are sometimes more cy.get('div[aria-label="Clear all selected items"]:first').click(); + cy.wait(1000); cy.get('div[aria-label="Clear all selected items"]').should( 'not.exist' ); + // it seems like the state isn't clearing as quickly as the clear label so let's wait + cy.wait(1000); } }); diff --git a/spiffworkflow-frontend/cypress/support/commands.js b/spiffworkflow-frontend/cypress/support/commands.js index 404c9af75..f2d969398 100644 --- a/spiffworkflow-frontend/cypress/support/commands.js +++ b/spiffworkflow-frontend/cypress/support/commands.js @@ -154,6 +154,10 @@ Cypress.Commands.add( .then(($element) => { const oldId = $element.text().trim(); cy.get('.cds--pagination__button--forward').click(); + cy.contains( + `[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`, + oldId + ).should('not.exist'); cy.contains(/\b3–4 of \d+/); cy.get('.cds--pagination__button--backward').click(); cy.contains(/\b1–2 of \d+/); From 5dc8c257a968f7aad66f2ac693a48d74aaefaec3 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 24 Mar 2023 11:10:04 -0400 Subject: [PATCH 19/20] fixed cypress process instance test w/ burnettk --- spiffworkflow-frontend/cypress/e2e/process_instances.cy.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index b1b87c46b..aa0c66266 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -182,14 +182,14 @@ describe('process-instances', () => { cy.url().should('include', `status=${processStatus}`); cy.assertAtLeastOneItemInPaginatedResults(); cy.getBySel(`process-instance-status-${processStatus}`); + + // maybe waiting a bit before trying to click makes this work consistently? + cy.wait(1000); // there should really only be one, but in CI there are sometimes more cy.get('div[aria-label="Clear all selected items"]:first').click(); - cy.wait(1000); cy.get('div[aria-label="Clear all selected items"]').should( 'not.exist' ); - // it seems like the state isn't clearing as quickly as the clear label so let's wait - cy.wait(1000); } }); From 29b33fe2f1111dfb9efb3d88c1b7d72e9037b1ca Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 24 Mar 2023 11:11:44 -0400 Subject: [PATCH 20/20] removed debug comment w/ burnettk --- .../spiffworkflow_backend/unit/test_error_handling_service.py | 1 - 1 file changed, 1 deletion(-) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py index d41ae3e95..adbd22408 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_error_handling_service.py @@ -91,7 +91,6 @@ class TestErrorHandlingService(BaseTest): # Both send and receive messages should be generated, matched # and considered complete. messages = db.session.query(MessageInstanceModel).all() - # import pdb; pdb.set_trace() assert 2 == len(messages) assert "completed" == messages[0].status assert "completed" == messages[1].status