From 234c5338168039035829b0e167f3db65b4582547 Mon Sep 17 00:00:00 2001 From: Elizabeth Esswein Date: Wed, 12 Apr 2023 13:22:10 -0400 Subject: [PATCH 1/4] add coloring for cancelled tasks --- .../src/components/ReactDiagramEditor.tsx | 19 ++++++++++++++++++- spiffworkflow-frontend/src/index.css | 4 ++++ spiffworkflow-frontend/src/interfaces.ts | 1 + .../src/routes/ProcessInstanceShow.tsx | 9 ++++++++- 4 files changed, 31 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx index eefaff82..82dddd4a 100644 --- a/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx +++ b/spiffworkflow-frontend/src/components/ReactDiagramEditor.tsx @@ -68,6 +68,7 @@ type OwnProps = { diagramType: string; readyOrWaitingProcessInstanceTasks?: Task[] | null; completedProcessInstanceTasks?: Task[] | null; + cancelledProcessInstanceTasks?: Task[] | null; saveDiagram?: (..._args: any[]) => any; onDeleteFile?: (..._args: any[]) => any; isPrimaryFile?: boolean; @@ -94,6 +95,7 @@ export default function ReactDiagramEditor({ diagramType, readyOrWaitingProcessInstanceTasks, completedProcessInstanceTasks, + cancelledProcessInstanceTasks, saveDiagram, onDeleteFile, isPrimaryFile, @@ -358,7 +360,8 @@ export default function ReactDiagramEditor({ function checkTaskCanBeHighlighted(taskBpmnId: string) { return ( !taskSpecsThatCannotBeHighlighted.includes(taskBpmnId) && - !taskBpmnId.match(/EndJoin/) + !taskBpmnId.match(/EndJoin/) && + !taskBpmnId.match(/BoundaryEventParent/) ); } @@ -441,6 +444,19 @@ export default function ReactDiagramEditor({ ); }); } + if (cancelledProcessInstanceTasks) { + const bpmnProcessIdentifiers = getBpmnProcessIdentifiers( + canvas.getRootElement() + ); + cancelledProcessInstanceTasks.forEach((cancelledTask) => { + highlightBpmnIoElement( + canvas, + cancelledTask, + 'cancelled-task-highlight', + bpmnProcessIdentifiers + ); + }); + } } function displayDiagram( @@ -518,6 +534,7 @@ export default function ReactDiagramEditor({ diagramXMLString, readyOrWaitingProcessInstanceTasks, completedProcessInstanceTasks, + cancelledProcessInstanceTasks, fileName, performingXmlUpdates, processModelId, diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 07704036..e0b4336c 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -142,6 +142,10 @@ code { fill: grey !important; opacity: .4; } +.cancelled-task-highlight:not(.djs-connection) .djs-visual > :nth-child(1) { + fill: blue !important; + opacity: .2; +} .accordion-item-label { vertical-align: middle; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 1d34054d..802c48c7 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -58,6 +58,7 @@ export interface Task { export interface TaskIds { completed: Task[]; readyOrWaiting: Task[]; + cancelled: Task[]; } export interface ProcessInstanceTask { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 29d4bedc..c294ae48 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -231,13 +231,19 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { }; const getTaskIds = () => { - const taskIds: TaskIds = { completed: [], readyOrWaiting: [] }; + const taskIds: TaskIds = { + completed: [], + readyOrWaiting: [], + cancelled: [], + }; if (tasks) { tasks.forEach(function getUserTasksElement(task: Task) { if (task.state === 'COMPLETED') { taskIds.completed.push(task); } else if (task.state === 'READY' || task.state === 'WAITING') { taskIds.readyOrWaiting.push(task); + } else if (task.state === 'CANCELLED') { + taskIds.cancelled.push(task); } return null; }); @@ -1152,6 +1158,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { fileName={processInstance.bpmn_xml_file_contents || ''} readyOrWaitingProcessInstanceTasks={taskIds.readyOrWaiting} completedProcessInstanceTasks={taskIds.completed} + cancelledProcessInstanceTasks={taskIds.cancelled} diagramType="readonly" onElementClick={handleClickedDiagramTask} /> From 8129fd4a59070711e72982508fe076e672b5be85 Mon Sep 17 00:00:00 2001 From: Elizabeth Esswein Date: Wed, 12 Apr 2023 17:05:41 -0400 Subject: [PATCH 2/4] exclude tasks from out of date subprocesses --- .../routes/process_instances_controller.py | 186 +++++++++++++----- 1 file changed, 134 insertions(+), 52 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index c6a8ddcd..1e88e475 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -18,7 +18,9 @@ from sqlalchemy.orm import aliased from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel -from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel +from spiffworkflow_backend.models.bpmn_process_definition import ( + BpmnProcessDefinitionModel, +) from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel @@ -28,7 +30,9 @@ from spiffworkflow_backend.models.process_instance import ( ) from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel +from spiffworkflow_backend.models.process_instance_event import ( + ProcessInstanceEventModel, +) from spiffworkflow_backend.models.process_instance_metadata import ( ProcessInstanceMetadataModel, ) @@ -86,7 +90,9 @@ def process_instance_create( modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Create_process_instance.""" - process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier) + process_model_identifier = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) process_model = _get_process_model(process_model_identifier) if process_model.primary_file_name is None: @@ -99,8 +105,10 @@ def process_instance_create( status_code=400, ) - process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user + process_instance = ( + ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user + ) ) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), @@ -150,11 +158,15 @@ def process_instance_run( if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() - process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor) + process_instance_api = ProcessInstanceService.processor_to_process_instance_api( + processor + ) process_instance_data = processor.get_data() process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) process_instance_metadata["data"] = process_instance_data - return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json") + return Response( + json.dumps(process_instance_metadata), status=200, mimetype="application/json" + ) def process_instance_terminate( @@ -168,7 +180,10 @@ def process_instance_terminate( try: with ProcessInstanceQueueService.dequeued(process_instance): processor.terminate() - except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: + except ( + ProcessInstanceIsNotEnqueuedError, + ProcessInstanceIsAlreadyLockedError, + ) as e: ErrorHandlingService().handle_error(processor, e) raise e @@ -186,7 +201,10 @@ def process_instance_suspend( try: with ProcessInstanceQueueService.dequeued(process_instance): processor.suspend() - except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: + except ( + ProcessInstanceIsNotEnqueuedError, + ProcessInstanceIsAlreadyLockedError, + ) as e: ErrorHandlingService().handle_error(processor, e) raise e @@ -204,7 +222,10 @@ def process_instance_resume( try: with ProcessInstanceQueueService.dequeued(process_instance): processor.resume() - except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e: + except ( + ProcessInstanceIsNotEnqueuedError, + ProcessInstanceIsAlreadyLockedError, + ) as e: ErrorHandlingService().handle_error(processor, e) raise e @@ -223,11 +244,17 @@ def process_instance_log_list( process_instance = _find_process_instance_by_id_or_raise(process_instance_id) log_query = ( - ProcessInstanceEventModel.query.filter_by(process_instance_id=process_instance.id) + ProcessInstanceEventModel.query.filter_by( + process_instance_id=process_instance.id + ) .outerjoin(TaskModel, TaskModel.guid == ProcessInstanceEventModel.task_guid) - .outerjoin(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) .outerjoin( - BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id + TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id + ) + .outerjoin( + BpmnProcessDefinitionModel, + BpmnProcessDefinitionModel.id + == TaskDefinitionModel.bpmn_process_definition_id, ) ) if not detailed: @@ -324,7 +351,9 @@ def process_instance_list( report_filter_by: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier) + process_instance_report = ProcessInstanceReportService.report_with_identifier( + g.user, report_id, report_identifier + ) report_column_list = None if report_columns: @@ -348,19 +377,21 @@ def process_instance_list( report_filter_by_list=report_filter_by_list, ) else: - report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model_identifier, - user_group_identifier=user_group_identifier, - start_from=start_from, - start_to=start_to, - end_from=end_from, - end_to=end_to, - process_status=process_status, - with_relation_to_me=with_relation_to_me, - process_initiator_username=process_initiator_username, - report_column_list=report_column_list, - report_filter_by_list=report_filter_by_list, + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + with_relation_to_me=with_relation_to_me, + process_initiator_username=process_initiator_username, + report_column_list=report_column_list, + report_filter_by_list=report_filter_by_list, + ) ) response_json = ProcessInstanceReportService.run_process_instance_report( @@ -374,7 +405,9 @@ def process_instance_list( return make_response(jsonify(response_json), 200) -def process_instance_report_column_list(process_model_identifier: Optional[str] = None) -> flask.wrappers.Response: +def process_instance_report_column_list( + process_model_identifier: Optional[str] = None, +) -> flask.wrappers.Response: """Process_instance_report_column_list.""" table_columns = ProcessInstanceReportService.builtin_column_options() columns_for_metadata_query = ( @@ -383,14 +416,17 @@ def process_instance_report_column_list(process_model_identifier: Optional[str] .distinct() # type: ignore ) if process_model_identifier: - columns_for_metadata_query = columns_for_metadata_query.join(ProcessInstanceModel) + columns_for_metadata_query = columns_for_metadata_query.join( + ProcessInstanceModel + ) columns_for_metadata_query = columns_for_metadata_query.filter( ProcessInstanceModel.process_model_identifier == process_model_identifier ) columns_for_metadata = columns_for_metadata_query.all() columns_for_metadata_strings = [ - {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata + {"Header": i[0], "accessor": i[0], "filterable": True} + for i in columns_for_metadata ] return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) @@ -437,13 +473,17 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() + db.session.query(ProcessInstanceQueueModel).filter_by( + process_instance_id=process_instance.id + ).delete() db.session.delete(process_instance) db.session.commit() return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: +def process_instance_report_list( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: """Process_instance_report_list.""" process_instance_reports = ProcessInstanceReportModel.query.filter_by( created_by_id=g.user.id, @@ -528,7 +568,9 @@ def process_instance_report_show( ) substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report(process_instances.items, substitution_variables) + result_dict = process_instance_report.generate_report( + process_instances.items, substitution_variables + ) # update this if we go back to a database query instead of filtering in memory result_dict["pagination"] = { @@ -597,7 +639,9 @@ def process_instance_task_list( to_task_model: Optional[TaskModel] = None task_models_of_parent_bpmn_processes_guids: list[str] = [] if to_task_guid is not None: - to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() + to_task_model = TaskModel.query.filter_by( + guid=to_task_guid, process_instance_id=process_instance.id + ).first() if to_task_model is None: raise ApiError( error_code="task_not_found", @@ -620,7 +664,9 @@ def process_instance_task_list( _parent_bpmn_processes, task_models_of_parent_bpmn_processes, ) = TaskService.task_models_of_parent_bpmn_processes(to_task_model) - task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] + task_models_of_parent_bpmn_processes_guids = [ + p.guid for p in task_models_of_parent_bpmn_processes if p.guid + ] task_model_query = task_model_query.filter( or_( TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore @@ -634,11 +680,14 @@ def process_instance_task_list( task_model_query = ( task_model_query.order_by(TaskModel.id.desc()) # type: ignore - .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) + .join( + TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id + ) .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) .outerjoin( direct_parent_bpmn_process_alias, - direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id, + direct_parent_bpmn_process_alias.id + == bpmn_process_alias.direct_parent_process_id, ) .outerjoin( direct_parent_bpmn_process_definition_alias, @@ -646,7 +695,9 @@ def process_instance_task_list( == direct_parent_bpmn_process_alias.bpmn_process_definition_id, ) .join( - BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id + BpmnProcessDefinitionModel, + BpmnProcessDefinitionModel.id + == TaskDefinitionModel.bpmn_process_definition_id, ) .add_columns( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore @@ -669,23 +720,41 @@ def process_instance_task_list( ) if len(bpmn_process_ids) > 0: - task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) + task_model_query = task_model_query.filter( + bpmn_process_alias.id.in_(bpmn_process_ids) + ) task_models = task_model_query.all() - task_model_list = {} if most_recent_tasks_only: + most_recent_tasks = {} + most_recent_subprocesses = set() for task_model in task_models: bpmn_process_guid = task_model.bpmn_process_guid or "TOP" row_key = f"{bpmn_process_guid}:::{task_model.bpmn_identifier}" - if row_key not in task_model_list: - task_model_list[row_key] = task_model - task_models = list(task_model_list.values()) + if row_key not in most_recent_tasks: + most_recent_tasks[row_key] = task_model + if task_model.typename in ["SubWorkflowTask", "CallActivity"]: + most_recent_subprocesses.add(task_model.guid) + + task_models = [ + task_model + for task_model in most_recent_tasks.values() + if task_model.bpmn_process_guid in most_recent_subprocesses + or task_model.bpmn_process_guid is None + ] if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None - if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": + end_in_seconds = ( + float(task_model["end_in_seconds"]) + if task_model["end_in_seconds"] is not None + else None + ) + if ( + to_task_model.guid == task_model["guid"] + and task_model["state"] == "COMPLETED" + ): TaskService.reset_task_model_dict(task_model, state="READY") elif ( end_in_seconds is None @@ -714,10 +783,14 @@ def process_instance_find_by_id( ) -> flask.wrappers.Response: """Process_instance_find_by_id.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param( - process_instance.process_model_identifier + modified_process_model_identifier = ( + ProcessModelInfo.modify_process_identifier_for_path_param( + process_instance.process_model_identifier + ) + ) + process_instance_uri = ( + f"/process-instances/{modified_process_model_identifier}/{process_instance.id}" ) - process_instance_uri = f"/process-instances/{modified_process_model_identifier}/{process_instance.id}" has_permission = AuthorizationService.user_has_permission( user=g.user, permission="read", @@ -751,22 +824,31 @@ def _get_process_instance( process_model_with_diagram = None name_of_file_with_diagram = None if process_identifier: - spec_reference = SpecReferenceCache.query.filter_by(identifier=process_identifier, type="process").first() + spec_reference = SpecReferenceCache.query.filter_by( + identifier=process_identifier, type="process" + ).first() if spec_reference is None: raise SpecReferenceNotFoundError( f"Could not find given process identifier in the cache: {process_identifier}" ) - process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.process_model_id) + process_model_with_diagram = ProcessModelService.get_process_model( + spec_reference.process_model_id + ) name_of_file_with_diagram = spec_reference.file_name - process_instance.process_model_with_diagram_identifier = process_model_with_diagram.id + process_instance.process_model_with_diagram_identifier = ( + process_model_with_diagram.id + ) else: process_model_with_diagram = _get_process_model(process_model_identifier) if process_model_with_diagram.primary_file_name: name_of_file_with_diagram = process_model_with_diagram.primary_file_name if process_model_with_diagram and name_of_file_with_diagram: - if process_instance.bpmn_version_control_identifier == current_version_control_revision: + if ( + process_instance.bpmn_version_control_identifier + == current_version_control_revision + ): bpmn_xml_file_contents = SpecFileService.get_data( process_model_with_diagram, name_of_file_with_diagram ).decode("utf-8") From a6ce8bfdc71e31e34ee2142fcbaa5b5c4de00057 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 13 Apr 2023 12:38:31 -0400 Subject: [PATCH 3/4] lint --- .../routes/process_instances_controller.py | 143 ++++++------------ 1 file changed, 43 insertions(+), 100 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py index 1e88e475..37fa778b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -90,9 +90,7 @@ def process_instance_create( modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Create_process_instance.""" - process_model_identifier = _un_modify_modified_process_model_id( - modified_process_model_identifier - ) + process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier) process_model = _get_process_model(process_model_identifier) if process_model.primary_file_name is None: @@ -105,10 +103,8 @@ def process_instance_create( status_code=400, ) - process_instance = ( - ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user - ) + process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user ) return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), @@ -158,15 +154,11 @@ def process_instance_run( if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: MessageService.correlate_all_message_instances() - process_instance_api = ProcessInstanceService.processor_to_process_instance_api( - processor - ) + process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor) process_instance_data = processor.get_data() process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) process_instance_metadata["data"] = process_instance_data - return Response( - json.dumps(process_instance_metadata), status=200, mimetype="application/json" - ) + return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json") def process_instance_terminate( @@ -244,17 +236,12 @@ def process_instance_log_list( process_instance = _find_process_instance_by_id_or_raise(process_instance_id) log_query = ( - ProcessInstanceEventModel.query.filter_by( - process_instance_id=process_instance.id - ) + ProcessInstanceEventModel.query.filter_by(process_instance_id=process_instance.id) .outerjoin(TaskModel, TaskModel.guid == ProcessInstanceEventModel.task_guid) - .outerjoin( - TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id - ) + .outerjoin(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) .outerjoin( BpmnProcessDefinitionModel, - BpmnProcessDefinitionModel.id - == TaskDefinitionModel.bpmn_process_definition_id, + BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id, ) ) if not detailed: @@ -351,9 +338,7 @@ def process_instance_list( report_filter_by: Optional[str] = None, ) -> flask.wrappers.Response: """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_id, report_identifier - ) + process_instance_report = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier) report_column_list = None if report_columns: @@ -377,21 +362,19 @@ def process_instance_list( report_filter_by_list=report_filter_by_list, ) else: - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report=process_instance_report, - process_model_identifier=process_model_identifier, - user_group_identifier=user_group_identifier, - start_from=start_from, - start_to=start_to, - end_from=end_from, - end_to=end_to, - process_status=process_status, - with_relation_to_me=with_relation_to_me, - process_initiator_username=process_initiator_username, - report_column_list=report_column_list, - report_filter_by_list=report_filter_by_list, - ) + report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + with_relation_to_me=with_relation_to_me, + process_initiator_username=process_initiator_username, + report_column_list=report_column_list, + report_filter_by_list=report_filter_by_list, ) response_json = ProcessInstanceReportService.run_process_instance_report( @@ -416,17 +399,14 @@ def process_instance_report_column_list( .distinct() # type: ignore ) if process_model_identifier: - columns_for_metadata_query = columns_for_metadata_query.join( - ProcessInstanceModel - ) + columns_for_metadata_query = columns_for_metadata_query.join(ProcessInstanceModel) columns_for_metadata_query = columns_for_metadata_query.filter( ProcessInstanceModel.process_model_identifier == process_model_identifier ) columns_for_metadata = columns_for_metadata_query.all() columns_for_metadata_strings = [ - {"Header": i[0], "accessor": i[0], "filterable": True} - for i in columns_for_metadata + {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata ] return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) @@ -473,17 +453,13 @@ def process_instance_delete( # (Pdb) db.session.delete # > - db.session.query(ProcessInstanceQueueModel).filter_by( - process_instance_id=process_instance.id - ).delete() + db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete() db.session.delete(process_instance) db.session.commit() return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def process_instance_report_list( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: +def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: """Process_instance_report_list.""" process_instance_reports = ProcessInstanceReportModel.query.filter_by( created_by_id=g.user.id, @@ -568,9 +544,7 @@ def process_instance_report_show( ) substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report( - process_instances.items, substitution_variables - ) + result_dict = process_instance_report.generate_report(process_instances.items, substitution_variables) # update this if we go back to a database query instead of filtering in memory result_dict["pagination"] = { @@ -639,9 +613,7 @@ def process_instance_task_list( to_task_model: Optional[TaskModel] = None task_models_of_parent_bpmn_processes_guids: list[str] = [] if to_task_guid is not None: - to_task_model = TaskModel.query.filter_by( - guid=to_task_guid, process_instance_id=process_instance.id - ).first() + to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first() if to_task_model is None: raise ApiError( error_code="task_not_found", @@ -664,9 +636,7 @@ def process_instance_task_list( _parent_bpmn_processes, task_models_of_parent_bpmn_processes, ) = TaskService.task_models_of_parent_bpmn_processes(to_task_model) - task_models_of_parent_bpmn_processes_guids = [ - p.guid for p in task_models_of_parent_bpmn_processes if p.guid - ] + task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid] task_model_query = task_model_query.filter( or_( TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore @@ -680,14 +650,11 @@ def process_instance_task_list( task_model_query = ( task_model_query.order_by(TaskModel.id.desc()) # type: ignore - .join( - TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id - ) + .join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id) .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id) .outerjoin( direct_parent_bpmn_process_alias, - direct_parent_bpmn_process_alias.id - == bpmn_process_alias.direct_parent_process_id, + direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id, ) .outerjoin( direct_parent_bpmn_process_definition_alias, @@ -696,8 +663,7 @@ def process_instance_task_list( ) .join( BpmnProcessDefinitionModel, - BpmnProcessDefinitionModel.id - == TaskDefinitionModel.bpmn_process_definition_id, + BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id, ) .add_columns( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore @@ -720,9 +686,7 @@ def process_instance_task_list( ) if len(bpmn_process_ids) > 0: - task_model_query = task_model_query.filter( - bpmn_process_alias.id.in_(bpmn_process_ids) - ) + task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids)) task_models = task_model_query.all() if most_recent_tasks_only: @@ -739,22 +703,14 @@ def process_instance_task_list( task_models = [ task_model for task_model in most_recent_tasks.values() - if task_model.bpmn_process_guid in most_recent_subprocesses - or task_model.bpmn_process_guid is None + if task_model.bpmn_process_guid in most_recent_subprocesses or task_model.bpmn_process_guid is None ] if to_task_model is not None: task_models_dict = json.loads(current_app.json.dumps(task_models)) for task_model in task_models_dict: - end_in_seconds = ( - float(task_model["end_in_seconds"]) - if task_model["end_in_seconds"] is not None - else None - ) - if ( - to_task_model.guid == task_model["guid"] - and task_model["state"] == "COMPLETED" - ): + end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None + if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED": TaskService.reset_task_model_dict(task_model, state="READY") elif ( end_in_seconds is None @@ -783,14 +739,10 @@ def process_instance_find_by_id( ) -> flask.wrappers.Response: """Process_instance_find_by_id.""" process_instance = _find_process_instance_by_id_or_raise(process_instance_id) - modified_process_model_identifier = ( - ProcessModelInfo.modify_process_identifier_for_path_param( - process_instance.process_model_identifier - ) - ) - process_instance_uri = ( - f"/process-instances/{modified_process_model_identifier}/{process_instance.id}" + modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param( + process_instance.process_model_identifier ) + process_instance_uri = f"/process-instances/{modified_process_model_identifier}/{process_instance.id}" has_permission = AuthorizationService.user_has_permission( user=g.user, permission="read", @@ -824,31 +776,22 @@ def _get_process_instance( process_model_with_diagram = None name_of_file_with_diagram = None if process_identifier: - spec_reference = SpecReferenceCache.query.filter_by( - identifier=process_identifier, type="process" - ).first() + spec_reference = SpecReferenceCache.query.filter_by(identifier=process_identifier, type="process").first() if spec_reference is None: raise SpecReferenceNotFoundError( f"Could not find given process identifier in the cache: {process_identifier}" ) - process_model_with_diagram = ProcessModelService.get_process_model( - spec_reference.process_model_id - ) + process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.process_model_id) name_of_file_with_diagram = spec_reference.file_name - process_instance.process_model_with_diagram_identifier = ( - process_model_with_diagram.id - ) + process_instance.process_model_with_diagram_identifier = process_model_with_diagram.id else: process_model_with_diagram = _get_process_model(process_model_identifier) if process_model_with_diagram.primary_file_name: name_of_file_with_diagram = process_model_with_diagram.primary_file_name if process_model_with_diagram and name_of_file_with_diagram: - if ( - process_instance.bpmn_version_control_identifier - == current_version_control_revision - ): + if process_instance.bpmn_version_control_identifier == current_version_control_revision: bpmn_xml_file_contents = SpecFileService.get_data( process_model_with_diagram, name_of_file_with_diagram ).decode("utf-8") From 6fc1e2be9667579a8283148fa226d4f51de8af34 Mon Sep 17 00:00:00 2001 From: burnettk Date: Thu, 13 Apr 2023 15:31:40 -0400 Subject: [PATCH 4/4] bump spiff --- spiffworkflow-backend/poetry.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index b2db7e5f..303fcc2f 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1916,7 +1916,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "98a1b37e01a00faea60025f517a89867b7261432" +resolved_reference = "162a1c5f56cf12fc589a1e368704c0819bfcc0cd" [[package]] name = "sqlalchemy"