From e5c1ccb60da8603364d98ac81c2b348d5fbb0755 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 13:47:41 -0500 Subject: [PATCH 01/40] Save as report component --- .../ProcessInstanceListSaveAsReport.tsx | 86 +++++++++++++++++++ .../components/ProcessInstanceListTable.tsx | 14 +++ 2 files changed, 100 insertions(+) create mode 100644 spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx new file mode 100644 index 00000000..a642d4f9 --- /dev/null +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -0,0 +1,86 @@ +import { useState } from 'react'; +// TODO: carbon controls +/* +import { + Button, + Textbox, + // @ts-ignore +} from '@carbon/react'; +*/ +import HttpService from '../services/HttpService'; + +type OwnProps = { + onSuccess: (..._args: any[]) => any; + columns: string; + orderBy: string; + filterBy: string; + buttonText?: string; +}; + +export default function ProcessInstanceListSaveAsReport({ + onSuccess, + columns, + orderBy, + filterBy, + buttonText = 'Save as New Perspective', +}: OwnProps) { + const [identifier, setIdentifier] = useState(''); + + const hasIdentifier = () => { + return identifier?.length > 0; + }; + + const addProcessInstanceReport = (event: any) => { + event.preventDefault(); + + const columnArray = columns.split(',').map((column) => { + return { Header: column, accessor: column }; + }); + const orderByArray = orderBy.split(',').filter((n) => n); + + const filterByArray = filterBy + .split(',') + .map((filterByItem) => { + const [fieldName, fieldValue] = filterByItem.split('='); + if (fieldValue) { + return { + field_name: fieldName, + operator: 'equals', + field_value: fieldValue, + }; + } + return null; + }) + .filter((n) => n); + + HttpService.makeCallToBackend({ + path: `/process-instances/reports`, + successCallback: onSuccess, + httpMethod: 'POST', + postBody: { + identifier, + report_metadata: { + columns: columnArray, + order_by: orderByArray, + filter_by: filterByArray, + }, + }, + }); + }; + + return ( +
+ + +
+ ); +} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index bd060af6..769bfea7 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -52,6 +52,7 @@ import { } from '../interfaces'; import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; +import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport'; const REFRESH_INTERVAL = 5; const REFRESH_TIMEOUT = 600; @@ -764,6 +765,18 @@ export default function ProcessInstanceListTable({ return null; }; + const saveAsReportComponent = () => { + const callback = (_: any) => {}; + return ( + + ); + }; + const filterComponent = () => { if (!filtersEnabled) { return null; @@ -788,6 +801,7 @@ export default function ProcessInstanceListTable({ {filterOptions()} + {saveAsReportComponent()} ); }; From 6ec3f775af18dc253df6488ab7a7015580936176 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 13:56:45 -0500 Subject: [PATCH 02/40] WIP --- .../components/ProcessInstanceListSaveAsReport.tsx | 11 +++++------ .../src/components/ProcessInstanceListTable.tsx | 11 ++++++++--- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index a642d4f9..e225d2ff 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -11,7 +11,7 @@ import HttpService from '../services/HttpService'; type OwnProps = { onSuccess: (..._args: any[]) => any; - columns: string; + columnArray: { Header: string; accessor: string}; orderBy: string; filterBy: string; buttonText?: string; @@ -19,10 +19,10 @@ type OwnProps = { export default function ProcessInstanceListSaveAsReport({ onSuccess, - columns, + columnArray, orderBy, filterBy, - buttonText = 'Save as New Perspective', + buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -33,9 +33,8 @@ export default function ProcessInstanceListSaveAsReport({ const addProcessInstanceReport = (event: any) => { event.preventDefault(); - const columnArray = columns.split(',').map((column) => { - return { Header: column, accessor: column }; - }); + console.log(columnArray); + const orderByArray = orderBy.split(',').filter((n) => n); const filterByArray = filterBy diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 769bfea7..fcbe45ad 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -622,6 +622,10 @@ export default function ProcessInstanceListTable({ ); }; + const reportColumns = () => { + return (reportMetadata as any).columns; + }; + const buildTable = () => { const headerLabels: Record = { id: 'Id', @@ -636,7 +640,7 @@ export default function ProcessInstanceListTable({ const getHeaderLabel = (header: string) => { return headerLabels[header] ?? header; }; - const headers = (reportMetadata as any).columns.map((column: any) => { + const headers = reportColumns().map((column: any) => { // return {getHeaderLabel((column as any).Header)}; return getHeaderLabel((column as any).Header); }); @@ -710,7 +714,7 @@ export default function ProcessInstanceListTable({ }; const rows = processInstances.map((row: any) => { - const currentRow = (reportMetadata as any).columns.map((column: any) => { + const currentRow = reportColumns().map((column: any) => { return formattedColumn(row, column); }); return {currentRow}; @@ -766,11 +770,12 @@ export default function ProcessInstanceListTable({ }; const saveAsReportComponent = () => { + // TODO onSuccess reload/select the new report const callback = (_: any) => {}; return ( From 02c113fb54adbbf503a63af1db5fb98d70624ddc Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 13:59:38 -0500 Subject: [PATCH 03/40] Use current columns --- .../src/components/ProcessInstanceListSaveAsReport.tsx | 6 ++++-- .../src/components/ProcessInstanceListTable.tsx | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index e225d2ff..b1187e9e 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -11,7 +11,7 @@ import HttpService from '../services/HttpService'; type OwnProps = { onSuccess: (..._args: any[]) => any; - columnArray: { Header: string; accessor: string}; + columnArray: { Header: string; accessor: string }; orderBy: string; filterBy: string; buttonText?: string; @@ -79,7 +79,9 @@ export default function ProcessInstanceListSaveAsReport({ onChange={(e) => setIdentifier(e.target.value)} /> - + ); } diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index fcbe45ad..e98b5978 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -770,7 +770,7 @@ export default function ProcessInstanceListTable({ }; const saveAsReportComponent = () => { - // TODO onSuccess reload/select the new report + // TODO onSuccess reload/select the new report in the report search const callback = (_: any) => {}; return ( Date: Mon, 28 Nov 2022 14:14:35 -0500 Subject: [PATCH 04/40] Save selected process model --- .../ProcessInstanceListSaveAsReport.tsx | 29 +++++++------------ .../components/ProcessInstanceListTable.tsx | 2 +- 2 files changed, 12 insertions(+), 19 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index b1187e9e..35cb9223 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -7,13 +7,14 @@ import { // @ts-ignore } from '@carbon/react'; */ +import { ProcessModel } from '../interfaces'; import HttpService from '../services/HttpService'; type OwnProps = { onSuccess: (..._args: any[]) => any; columnArray: { Header: string; accessor: string }; orderBy: string; - filterBy: string; + processModelSelection: ProcessModel | null; buttonText?: string; }; @@ -21,7 +22,7 @@ export default function ProcessInstanceListSaveAsReport({ onSuccess, columnArray, orderBy, - filterBy, + processModelSelection, buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -33,24 +34,16 @@ export default function ProcessInstanceListSaveAsReport({ const addProcessInstanceReport = (event: any) => { event.preventDefault(); - console.log(columnArray); - const orderByArray = orderBy.split(',').filter((n) => n); - const filterByArray = filterBy - .split(',') - .map((filterByItem) => { - const [fieldName, fieldValue] = filterByItem.split('='); - if (fieldValue) { - return { - field_name: fieldName, - operator: 'equals', - field_value: fieldValue, - }; - } - return null; - }) - .filter((n) => n); + const filterByArray: any = []; + + if (processModelSelection) { + filterByArray.push({ + field_name: 'process_model_identifier', + field_value: processModelSelection.id, + }); + } HttpService.makeCallToBackend({ path: `/process-instances/reports`, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index e98b5978..46bdae47 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -777,7 +777,7 @@ export default function ProcessInstanceListTable({ onSuccess={callback} columnArray={reportColumns()} orderBy="" - filterBy="" + processModelSelection={processModelSelection} /> ); }; From c21022bf05d910c3b43da05381d379a200ddf96b Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 16:46:19 -0500 Subject: [PATCH 05/40] Save first status --- .../src/components/ProcessInstanceListSaveAsReport.tsx | 9 +++++++++ .../src/components/ProcessInstanceListTable.tsx | 1 + 2 files changed, 10 insertions(+) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index 35cb9223..77e2ac37 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -15,6 +15,7 @@ type OwnProps = { columnArray: { Header: string; accessor: string }; orderBy: string; processModelSelection: ProcessModel | null; + processStatusSelection: string[]; buttonText?: string; }; @@ -23,6 +24,7 @@ export default function ProcessInstanceListSaveAsReport({ columnArray, orderBy, processModelSelection, + processStatusSelection, buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -45,6 +47,13 @@ export default function ProcessInstanceListSaveAsReport({ }); } + if (processStatusSelection.length > 0) { + filterByArray.push({ + field_name: 'process_status', + field_value: processStatusSelection[0], // TODO: support more than one status + }); + } + HttpService.makeCallToBackend({ path: `/process-instances/reports`, successCallback: onSuccess, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 46bdae47..9ad23ac3 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -778,6 +778,7 @@ export default function ProcessInstanceListTable({ columnArray={reportColumns()} orderBy="" processModelSelection={processModelSelection} + processStatusSelection={processStatusSelection} /> ); }; From ddadefee6079ef864611206a22c5bec056c819e2 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 28 Nov 2022 17:03:00 -0500 Subject: [PATCH 06/40] some basics to add metadata to reports w/ burnettk cullerton --- .../models/process_instance_report.py | 10 ---- .../process_instance_metadata.bpmn | 40 ++++++++++++++ .../integration/test_process_api.py | 53 +++++++++++++++++++ 3 files changed, 93 insertions(+), 10 deletions(-) create mode 100644 spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index 5cccf4a5..4f0b0f46 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -204,18 +204,8 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): user: UserModel, ) -> ProcessInstanceReportModel: """Create_with_attributes.""" - # <<<<<<< HEAD - # process_model = ProcessModelService.get_process_model( - # process_model_id=f"{process_model_identifier}" - # ) - # process_instance_report = cls( - # identifier=identifier, - # process_group_identifier="process_model.process_group_id", - # process_model_identifier=process_model.id, - # ======= process_instance_report = cls( identifier=identifier, - # >>>>>>> main created_by_id=user.id, report_metadata=report_metadata, ) diff --git a/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn b/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn new file mode 100644 index 00000000..f371a350 --- /dev/null +++ b/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn @@ -0,0 +1,40 @@ + + + + + Flow_0fmt4q1 + + + + Flow_0fmt4q1 + Flow_0hhrkce + save_process_instance_metadata({"key1": "value1", "key2": "value2"}) + + + Flow_0hhrkce + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 5ee5ae9f..4c60cb8c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -4,11 +4,13 @@ import json import os import time from typing import Any +from conftest import with_super_admin_user import pytest from flask.app import Flask from flask.testing import FlaskClient from flask_bpmn.models.db import db +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec @@ -2544,3 +2546,54 @@ class TestProcessApi(BaseTest): # make sure the new subgroup does exist new_process_group = ProcessModelService.get_process_group(new_sub_path) assert new_process_group.id == new_sub_path + + def test_can_get_process_instance_list_with_report_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + process_model = load_test_spec( + process_model_id='test-process-instance-metadata-report', + bpmn_file_name='process_instance_metadata.bpmn', + process_model_source_directory='test-process-instance-metadata-report', + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 2 + + + report_metadata = { + "columns": [ + {"Header": "ID", "accessor": "id"}, + {"Header": "Status", "accessor": "status"}, + {"Header": "Key One", "accessor": "key1"}, + ], + "order_by": ["status"], + "filter_by": [], + } + process_instance_report = ProcessInstanceReportModel.create_with_attributes( + identifier="sure", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", + headers=self.logged_in_headers(with_super_admin_user), + ) + print(f"response.json: {response.json}") + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["pages"] == 1 + assert response.json["pagination"]["total"] == 1 From 49e4db6ae2535009a110f8a98311fa9e627ba4fd Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 17:07:28 -0500 Subject: [PATCH 07/40] Save dates --- .../ProcessInstanceListSaveAsReport.tsx | 36 +++++++++++ .../components/ProcessInstanceListTable.tsx | 63 +++++++++++++++---- 2 files changed, 86 insertions(+), 13 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index 77e2ac37..d23daed0 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -16,6 +16,10 @@ type OwnProps = { orderBy: string; processModelSelection: ProcessModel | null; processStatusSelection: string[]; + startFromSeconds: string | null; + startToSeconds: string | null; + endFromSeconds: string | null; + endToSeconds: string | null; buttonText?: string; }; @@ -25,6 +29,10 @@ export default function ProcessInstanceListSaveAsReport({ orderBy, processModelSelection, processStatusSelection, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, buttonText = 'Save as Perspective', }: OwnProps) { const [identifier, setIdentifier] = useState(''); @@ -54,6 +62,34 @@ export default function ProcessInstanceListSaveAsReport({ }); } + if (startFromSeconds) { + filterByArray.push({ + field_name: 'start_from', + field_value: startFromSeconds, + }); + } + + if (startToSeconds) { + filterByArray.push({ + field_name: 'start_to', + field_value: startToSeconds, + }); + } + + if (endFromSeconds) { + filterByArray.push({ + field_name: 'end_from', + field_value: endFromSeconds, + }); + } + + if (endToSeconds) { + filterByArray.push({ + field_name: 'end_to', + field_value: endToSeconds, + }); + } + HttpService.makeCallToBackend({ path: `/process-instances/reports`, successCallback: onSuccess, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 9ad23ac3..32674a05 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -367,16 +367,7 @@ export default function ProcessInstanceListTable({ } }; - const applyFilter = (event: any) => { - event.preventDefault(); - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - undefined, - undefined, - paginationQueryParamPrefix - ); - let queryParamString = `per_page=${perPage}&page=${page}&user_filter=true`; - + const calculateStartAndEndSeconds = () => { const startFromSeconds = convertDateAndTimeStringsToSeconds( startFromDate, startFromTime || '00:00:00' @@ -393,28 +384,59 @@ export default function ProcessInstanceListTable({ endToDate, endToTime || '00:00:00' ); + let valid = true; if (isTrueComparison(startFromSeconds, '>', startToSeconds)) { setErrorMessage({ message: '"Start date from" cannot be after "start date to"', }); - return; + valid = false; } if (isTrueComparison(endFromSeconds, '>', endToSeconds)) { setErrorMessage({ message: '"End date from" cannot be after "end date to"', }); - return; + valid = false; } if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) { setErrorMessage({ message: '"Start date from" cannot be after "end date from"', }); - return; + valid = false; } if (isTrueComparison(startToSeconds, '>', endToSeconds)) { setErrorMessage({ message: '"Start date to" cannot be after "end date to"', }); + valid = false; + } + + return { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + }; + }; + + const applyFilter = (event: any) => { + event.preventDefault(); + const { page, perPage } = getPageInfoFromSearchParams( + searchParams, + undefined, + undefined, + paginationQueryParamPrefix + ); + let queryParamString = `per_page=${perPage}&page=${page}&user_filter=true`; + const { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + } = calculateStartAndEndSeconds(); + + if (!valid) { return; } @@ -772,6 +794,17 @@ export default function ProcessInstanceListTable({ const saveAsReportComponent = () => { // TODO onSuccess reload/select the new report in the report search const callback = (_: any) => {}; + const { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + } = calculateStartAndEndSeconds(); + + if (!valid) { + return null; + } return ( ); }; From 589361b7fcf8afd87c5030ff05da9bca42259907 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Mon, 28 Nov 2022 17:26:26 -0500 Subject: [PATCH 08/40] Choose new report --- .../src/components/ProcessInstanceListSaveAsReport.tsx | 8 +++++++- .../src/components/ProcessInstanceListTable.tsx | 7 ++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index d23daed0..6c8f5fb9 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -41,6 +41,12 @@ export default function ProcessInstanceListSaveAsReport({ return identifier?.length > 0; }; + const responseHandler = (result: any) => { + if (result.ok === true) { + onSuccess(identifier); + } + }; + const addProcessInstanceReport = (event: any) => { event.preventDefault(); @@ -92,7 +98,7 @@ export default function ProcessInstanceListSaveAsReport({ HttpService.makeCallToBackend({ path: `/process-instances/reports`, - successCallback: onSuccess, + successCallback: responseHandler, httpMethod: 'POST', postBody: { identifier, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 32674a05..9b239502 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -367,6 +367,7 @@ export default function ProcessInstanceListTable({ } }; + // TODO: after factoring this out page hangs when invalid date ranges and applying the filter const calculateStartAndEndSeconds = () => { const startFromSeconds = convertDateAndTimeStringsToSeconds( startFromDate, @@ -793,7 +794,11 @@ export default function ProcessInstanceListTable({ const saveAsReportComponent = () => { // TODO onSuccess reload/select the new report in the report search - const callback = (_: any) => {}; + const callback = (identifier: string) => { + processInstanceReportDidChange({ + selectedItem: { id: identifier, display_name: identifier }, + }); + }; const { valid, startFromSeconds, From e5f04d10a9937d17d2e99cf37c8add52416f1b1c Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 15:59:46 -0500 Subject: [PATCH 09/40] WIP more metadata reporting w/ burnettk --- spiffworkflow-backend/: | 1908 +++++++++++++++++ .../routes/process_api_blueprint.py | 49 +- .../services/authentication_service.py | 5 +- .../process_instance_report_service.py | 19 + .../integration/test_process_api.py | 7 +- 5 files changed, 1977 insertions(+), 11 deletions(-) create mode 100644 spiffworkflow-backend/: diff --git a/spiffworkflow-backend/: b/spiffworkflow-backend/: new file mode 100644 index 00000000..5516fdae --- /dev/null +++ b/spiffworkflow-backend/: @@ -0,0 +1,1908 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import random +import string +import uuid +from typing import Any +from typing import Dict +from typing import Optional +from typing import TypedDict +from typing import Union + +import connexion # type: ignore +import flask.wrappers +import jinja2 +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel +import werkzeug +from flask import Blueprint +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask import redirect +from flask import request +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from lxml import etree # type: ignore +from lxml.builder import ElementMaker # type: ignore +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState +from sqlalchemy import and_ +from sqlalchemy import asc +from sqlalchemy import desc +from sqlalchemy.orm import aliased, joinedload + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel +from spiffworkflow_backend.models.file import FileSchema +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.models.secret_model import SecretModel +from spiffworkflow_backend.models.secret_model import SecretModelSchema +from spiffworkflow_backend.models.spec_reference import SpecReferenceCache +from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel +from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.routes.user import verify_token +from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportFilter, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportService, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.service_task_service import ServiceTaskService +from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.user_service import UserService + + +class TaskDataSelectOption(TypedDict): + """TaskDataSelectOption.""" + + value: str + label: str + + +class ReactJsonSchemaSelectOption(TypedDict): + """ReactJsonSchemaSelectOption.""" + + type: str + title: str + enum: list[str] + + +process_api_blueprint = Blueprint("process_api", __name__) + + +def status() -> flask.wrappers.Response: + """Status.""" + ProcessInstanceModel.query.filter().first() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response: + """Permissions_check.""" + if "requests_to_check" not in body: + raise ( + ApiError( + error_code="could_not_requests_to_check", + message="The key 'requests_to_check' not found at root of request body.", + status_code=400, + ) + ) + + response_dict: dict[str, dict[str, bool]] = {} + requests_to_check = body["requests_to_check"] + + for target_uri, http_methods in requests_to_check.items(): + if target_uri not in response_dict: + response_dict[target_uri] = {} + + for http_method in http_methods: + permission_string = AuthorizationService.get_permission_from_http_method( + http_method + ) + if permission_string: + has_permission = AuthorizationService.user_has_permission( + user=g.user, + permission=permission_string, + target_uri=target_uri, + ) + response_dict[target_uri][http_method] = has_permission + + return make_response(jsonify({"results": response_dict}), 200) + + +def modify_process_model_id(process_model_id: str) -> str: + """Modify_process_model_id.""" + return process_model_id.replace("/", ":") + + +def un_modify_modified_process_model_id(modified_process_model_id: str) -> str: + """Un_modify_modified_process_model_id.""" + return modified_process_model_id.replace(":", "/") + + +def process_group_add(body: dict) -> flask.wrappers.Response: + """Add_process_group.""" + process_group = ProcessGroup(**body) + ProcessModelService.add_process_group(process_group) + return make_response(jsonify(process_group), 201) + + +def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: + """Process_group_delete.""" + process_group_id = un_modify_modified_process_model_id(modified_process_group_id) + ProcessModelService().process_group_delete(process_group_id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_group_update( + modified_process_group_id: str, body: dict +) -> flask.wrappers.Response: + """Process Group Update.""" + body_include_list = ["display_name", "description"] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_group_id = un_modify_modified_process_model_id(modified_process_group_id) + process_group = ProcessGroup(id=process_group_id, **body_filtered) + ProcessModelService.update_process_group(process_group) + return make_response(jsonify(process_group), 200) + + +def process_group_list( + process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_group_list.""" + if process_group_identifier is not None: + process_groups = ProcessModelService.get_process_groups( + process_group_identifier + ) + else: + process_groups = ProcessModelService.get_process_groups() + batch = ProcessModelService().get_batch( + items=process_groups, page=page, per_page=per_page + ) + pages = len(process_groups) // per_page + remainder = len(process_groups) % per_page + if remainder > 0: + pages += 1 + + response_json = { + "results": ProcessGroupSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_groups), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_group_show( + modified_process_group_id: str, +) -> Any: + """Process_group_show.""" + process_group_id = un_modify_modified_process_model_id(modified_process_group_id) + try: + process_group = ProcessModelService.get_process_group(process_group_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_group_cannot_be_found", + message=f"Process group cannot be found: {process_group_id}", + status_code=400, + ) + ) from exception + + process_group.parent_groups = ProcessModelService.get_parent_group_array( + process_group.id + ) + return make_response(jsonify(process_group), 200) + + +def process_group_move( + modified_process_group_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_group_move.""" + original_process_group_id = un_modify_modified_process_model_id( + modified_process_group_identifier + ) + new_process_group = ProcessModelService().process_group_move( + original_process_group_id, new_location + ) + return make_response(jsonify(new_process_group), 201) + + +def process_model_create( + modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Process_model_create.""" + process_model_info = ProcessModelInfoSchema().load(body) + if modified_process_group_id is None: + raise ApiError( + error_code="process_group_id_not_specified", + message="Process Model could not be created when process_group_id path param is unspecified", + status_code=400, + ) + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + unmodified_process_group_id = un_modify_modified_process_model_id( + modified_process_group_id + ) + process_group = ProcessModelService.get_process_group(unmodified_process_group_id) + if process_group is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body because Process Group could not be found: {body}", + status_code=400, + ) + + ProcessModelService.add_process_model(process_model_info) + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def process_model_delete( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_model_delete.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + # process_model_identifier = f"{process_group_id}/{process_model_id}" + ProcessModelService().process_model_delete(process_model_identifier) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_update( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> Any: + """Process_model_update.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + body_include_list = [ + "display_name", + "primary_file_name", + "primary_process_id", + "description", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + ProcessModelService.update_process_model(process_model, body_filtered) + return ProcessModelInfoSchema().dump(process_model) + + +def process_model_show(modified_process_model_identifier: str) -> Any: + """Process_model_show.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + # TODO: Temporary. Should not need the next line once models have correct ids + # process_model.id = process_model_identifier + files = sorted(SpecFileService.get_files(process_model)) + process_model.files = files + for file in process_model.files: + file.references = SpecFileService.get_references_for_file(file, process_model) + + process_model.parent_groups = ProcessModelService.get_parent_group_array( + process_model.id + ) + return make_response(jsonify(process_model), 200) + + +def process_model_move( + modified_process_model_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_model_move.""" + original_process_model_id = un_modify_modified_process_model_id( + modified_process_model_identifier + ) + new_process_model = ProcessModelService().process_model_move( + original_process_model_id, new_location + ) + return make_response(jsonify(new_process_model), 201) + + +def process_model_list( + process_group_identifier: Optional[str] = None, + recursive: Optional[bool] = False, + filter_runnable_by_user: Optional[bool] = False, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process model list!""" + process_models = ProcessModelService.get_process_models( + process_group_id=process_group_identifier, + recursive=recursive, + filter_runnable_by_user=filter_runnable_by_user, + ) + batch = ProcessModelService().get_batch( + process_models, page=page, per_page=per_page + ) + pages = len(process_models) // per_page + remainder = len(process_models) % per_page + if remainder > 0: + pages += 1 + response_json = { + "results": ProcessModelInfoSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_models), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_list() -> Any: + """Returns a list of all known processes. + + This includes processes that are not the + primary process - helpful for finding possible call activities. + """ + references = SpecReferenceCache.query.filter_by(type="process").all() + return SpecReferenceSchema(many=True).dump(references) + + +def get_file(modified_process_model_id: str, file_name: str) -> Any: + """Get_file.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + process_model = get_process_model(process_model_identifier) + files = SpecFileService.get_files(process_model, file_name) + if len(files) == 0: + raise ApiError( + error_code="unknown file", + message=f"No information exists for file {file_name}" + f" it does not exist in workflow {process_model_identifier}.", + status_code=404, + ) + + file = files[0] + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + # file.process_group_id = process_model.process_group_id + return FileSchema().dump(file) + + +def process_model_file_update( + modified_process_model_id: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_update.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + # process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + + request_file = get_file_from_request() + request_file_contents = request_file.stream.read() + if not request_file_contents: + raise ApiError( + error_code="file_contents_empty", + message="Given request file does not have any content", + status_code=400, + ) + + SpecFileService.update_file(process_model, file_name, request_file_contents) + + if current_app.config["GIT_COMMIT_ON_SAVE"]: + git_output = GitService.commit( + message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" + ) + current_app.logger.info(f"git output: {git_output}") + else: + current_app.logger.info("Git commit on save is disabled") + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_delete( + modified_process_model_id: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_delete.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + process_model = get_process_model(process_model_identifier) + try: + SpecFileService.delete_file(process_model, file_name) + except FileNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_file_cannot_be_found", + message=f"Process model file cannot be found: {file_name}", + status_code=400, + ) + ) from exception + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def add_file(modified_process_model_id: str) -> flask.wrappers.Response: + """Add_file.""" + process_model_identifier = modified_process_model_id.replace(":", "/") + process_model = get_process_model(process_model_identifier) + request_file = get_file_from_request() + if not request_file.filename: + raise ApiError( + error_code="could_not_get_filename", + message="Could not get filename from request", + status_code=400, + ) + + file = SpecFileService.add_file( + process_model, request_file.filename, request_file.stream.read() + ) + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + return Response( + json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" + ) + + +def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response: + """Create_process_instance.""" + process_model_identifier = un_modify_modified_process_model_id( + modified_process_model_id + ) + process_instance = ( + ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user + ) + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=201, + mimetype="application/json", + ) + + +def process_instance_run( + modified_process_model_identifier: str, + process_instance_id: int, + do_engine_steps: bool = True, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + + if do_engine_steps: + try: + processor.do_engine_steps() + except ApiError as e: + ErrorHandlingService().handle_error(processor, e) + raise e + except Exception as e: + ErrorHandlingService().handle_error(processor, e) + task = processor.bpmn_process_instance.last_task + raise ApiError.from_task( + error_code="unknown_exception", + message=f"An unknown error occurred. Original error: {e}", + status_code=400, + task=task, + ) from e + processor.save() + + if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: + MessageService.process_message_instances() + + process_instance_api = ProcessInstanceService.processor_to_process_instance_api( + processor + ) + process_instance_data = processor.get_data() + process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) + process_instance_metadata["data"] = process_instance_data + return Response( + json.dumps(process_instance_metadata), status=200, mimetype="application/json" + ) + + +def process_instance_terminate( + process_instance_id: int, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.terminate() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_suspend( + process_instance_id: int, +) -> flask.wrappers.Response: + """Process_instance_suspend.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.suspend() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_resume( + process_instance_id: int, +) -> flask.wrappers.Response: + """Process_instance_resume.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_log_list( + process_instance_id: int, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_log_list.""" + # to make sure the process instance exists + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + logs = ( + SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + .join( + UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True + ) # isouter since if we don't have a user, we still want the log + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": logs.items, + "pagination": { + "count": len(logs.items), + "total": logs.total, + "pages": logs.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def message_instance_list( + process_instance_id: Optional[int] = None, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Message_instance_list.""" + # to make sure the process instance exists + message_instances_query = MessageInstanceModel.query + + if process_instance_id: + message_instances_query = message_instances_query.filter_by( + process_instance_id=process_instance_id + ) + + message_instances = ( + message_instances_query.order_by( + MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore + MessageInstanceModel.id.desc(), # type: ignore + ) + .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) + .join(ProcessInstanceModel) + .add_columns( + MessageModel.identifier.label("message_identifier"), + ProcessInstanceModel.process_model_identifier, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + for message_instance in message_instances: + message_correlations: dict = {} + for ( + mcmi + ) in ( + message_instance.MessageInstanceModel.message_correlations_message_instances + ): + mc = MessageCorrelationModel.query.filter_by( + id=mcmi.message_correlation_id + ).all() + for m in mc: + if m.name not in message_correlations: + message_correlations[m.name] = {} + message_correlations[m.name][ + m.message_correlation_property.identifier + ] = m.value + message_instance.MessageInstanceModel.message_correlations = ( + message_correlations + ) + + response_json = { + "results": message_instances.items, + "pagination": { + "count": len(message_instances.items), + "total": message_instances.total, + "pages": message_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +# body: { +# payload: dict, +# process_instance_id: Optional[int], +# } +def message_start( + message_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Message_start.""" + message_model = MessageModel.query.filter_by(identifier=message_identifier).first() + if message_model is None: + raise ( + ApiError( + error_code="unknown_message", + message=f"Could not find message with identifier: {message_identifier}", + status_code=404, + ) + ) + + if "payload" not in body: + raise ( + ApiError( + error_code="missing_payload", + message="Body is missing payload.", + status_code=400, + ) + ) + + process_instance = None + if "process_instance_id" in body: + # to make sure we have a valid process_instance_id + process_instance = find_process_instance_by_id_or_raise( + body["process_instance_id"] + ) + + message_instance = MessageInstanceModel.query.filter_by( + process_instance_id=process_instance.id, + message_model_id=message_model.id, + message_type="receive", + status="ready", + ).first() + if message_instance is None: + raise ( + ApiError( + error_code="cannot_find_waiting_message", + message=f"Could not find waiting message for identifier {message_identifier} " + f"and process instance {process_instance.id}", + status_code=400, + ) + ) + MessageService.process_message_receive( + message_instance, message_model.name, body["payload"] + ) + + else: + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_model.id + ).first() + ) + + if message_triggerable_process_model is None: + raise ( + ApiError( + error_code="cannot_start_message", + message=f"Message with identifier cannot be start with message: {message_identifier}", + status_code=400, + ) + ) + + process_instance = MessageService.process_message_triggerable_process_model( + message_triggerable_process_model, + message_model.name, + body["payload"], + g.user, + ) + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def process_instance_list( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + initiated_by_me: Optional[bool] = None, + with_tasks_completed_by_me: Optional[bool] = None, + with_tasks_completed_by_my_group: Optional[bool] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instance_report = ProcessInstanceReportService.report_with_identifier( + g.user, report_identifier + ) + + if user_filter: + report_filter = ProcessInstanceReportFilter( + process_model_identifier, + start_from, + start_to, + end_from, + end_to, + process_status.split(",") if process_status else None, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, + ) + else: + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report, + process_model_identifier, + start_from, + start_to, + end_from, + end_to, + process_status, + initiated_by_me, + with_tasks_completed_by_me, + with_tasks_completed_by_my_group, + ) + ) + + # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) + process_instance_query = ProcessInstanceModel.query + # Always join that hot user table for good performance at serialization time. + process_instance_query = process_instance_query.options( + joinedload(ProcessInstanceModel.process_initiator) + ) + + if report_filter.process_model_identifier is not None: + process_model = get_process_model( + f"{report_filter.process_model_identifier}", + ) + + process_instance_query = process_instance_query.filter_by( + process_model_identifier=process_model.id + ) + + # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. + if ( + ProcessInstanceModel.start_in_seconds is None + or ProcessInstanceModel.end_in_seconds is None + ): + raise ( + ApiError( + error_code="unexpected_condition", + message="Something went very wrong", + status_code=500, + ) + ) + + if report_filter.start_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds >= report_filter.start_from + ) + if report_filter.start_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds <= report_filter.start_to + ) + if report_filter.end_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds >= report_filter.end_from + ) + if report_filter.end_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds <= report_filter.end_to + ) + if report_filter.process_status is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore + ) + + if report_filter.initiated_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + process_instance_query = process_instance_query.filter_by( + process_initiator=g.user + ) + + # TODO: not sure if this is exactly what is wanted + if report_filter.with_tasks_completed_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + # process_instance_query = process_instance_query.add_columns(UserModel.username) + # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. + + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.process_initiator_id != g.user.id + ) + process_instance_query = process_instance_query.join( + SpiffStepDetailsModel, + ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, + ) + process_instance_query = process_instance_query.join( + SpiffLoggingModel, + ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.message.contains("COMPLETED") # type: ignore + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step + ) + process_instance_query = process_instance_query.filter( + SpiffStepDetailsModel.completed_by_user_id == g.user.id + ) + + if report_filter.with_tasks_completed_by_my_group is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore + ) + process_instance_query = process_instance_query.join( + SpiffStepDetailsModel, + ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, + ) + process_instance_query = process_instance_query.join( + SpiffLoggingModel, + ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.message.contains("COMPLETED") # type: ignore + ) + process_instance_query = process_instance_query.filter( + SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step + ) + process_instance_query = process_instance_query.join( + GroupModel, + GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, + ) + process_instance_query = process_instance_query.join( + UserGroupAssignmentModel, + UserGroupAssignmentModel.group_id == GroupModel.id, + ) + process_instance_query = process_instance_query.filter( + UserGroupAssignmentModel.user_id == g.user.id + ) + + # userSkillF = aliased(UserSkill) + # userSkillI = aliased(UserSkill) + + import pdb; pdb.set_trace() + for column in process_instance_report.report_metadata['columns']: + print(f"column: {column['accessor']}") + # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) + instance_metadata_alias = alias(ProcessInstanceMetadataModel) + process_instance_query = ( + process_instance_query.outerjoin(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id) + .add_column(ProcessInstanceMetadataModel.value.label(column['accessor'])) + ) + import pdb; pdb.set_trace() + + process_instances = ( + process_instance_query.group_by(ProcessInstanceModel.id) + .order_by( + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + import pdb; pdb.set_trace() + + # def awesome_serialize(process_instance) + # dict_thing = process_instance.serialize + # + # # add columns since we have access to columns here + # dict_thing['awesome'] = 'awesome' + # + # return dict_thing + + results = list( + map( + ProcessInstanceService.serialize_flat_with_task_data, + process_instances.items, + ) + ) + report_metadata = process_instance_report.report_metadata + + response_json = { + "report_identifier": process_instance_report.identifier, + "report_metadata": report_metadata, + "results": results, + "filters": report_filter.to_dict(), + "pagination": { + "count": len(results), + "total": process_instances.total, + "pages": process_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def process_instance_show( + modified_process_model_identifier: str, process_instance_id: int +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + current_version_control_revision = GitService.get_current_revision() + process_model = get_process_model(process_model_identifier) + + if process_model.primary_file_name: + if ( + process_instance.bpmn_version_control_identifier + == current_version_control_revision + ): + bpmn_xml_file_contents = SpecFileService.get_data( + process_model, process_model.primary_file_name + ) + else: + bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( + process_model, process_instance.bpmn_version_control_identifier + ) + process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents + + return make_response(jsonify(process_instance), 200) + + +def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + # (Pdb) db.session.delete + # > + db.session.query(SpiffLoggingModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.query(SpiffStepDetailsModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.delete(process_instance) + db.session.commit() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_list( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_instance_report_list.""" + process_instance_reports = ProcessInstanceReportModel.query.filter_by( + created_by_id=g.user.id, + ).all() + + return make_response(jsonify(process_instance_reports), 200) + + +def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: + """Process_instance_report_create.""" + ProcessInstanceReportModel.create_report( + identifier=body["identifier"], + user=g.user, + report_metadata=body["report_metadata"], + ) + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_update( + report_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + process_instance_report.report_metadata = body["report_metadata"] + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_delete( + report_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + db.session.delete(process_instance_report) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def service_tasks_show() -> flask.wrappers.Response: + """Service_tasks_show.""" + available_connectors = ServiceTaskService.available_connectors() + print(available_connectors) + + return Response( + json.dumps(available_connectors), status=200, mimetype="application/json" + ) + + +def authentication_list() -> flask.wrappers.Response: + """Authentication_list.""" + available_authentications = ServiceTaskService.authentication_list() + response_json = { + "results": available_authentications, + "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], + "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", + } + + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def authentication_callback( + service: str, + auth_method: str, +) -> werkzeug.wrappers.Response: + """Authentication_callback.""" + verify_token(request.args.get("token"), force_run=True) + response = request.args["response"] + SecretService().update_secret( + f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True + ) + return redirect( + f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" + ) + + +def process_instance_report_show( + report_identifier: str, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id) + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ).paginate( + page=page, per_page=per_page, error_out=False + ) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + substitution_variables = request.args.to_dict() + result_dict = process_instance_report.generate_report( + process_instances.items, substitution_variables + ) + + # update this if we go back to a database query instead of filtering in memory + result_dict["pagination"] = { + "count": len(result_dict["results"]), + "total": len(result_dict["results"]), + "pages": 1, + } + + return Response(json.dumps(result_dict), status=200, mimetype="application/json") + + +# TODO: see comment for before_request +# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) +def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_my_tasks.""" + principal = find_principal_or_raise() + active_tasks = ( + ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore + .join(ProcessInstanceModel) + .join(ActiveTaskUserModel) + .filter_by(user_id=principal.user_id) + # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. + .add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, + ProcessInstanceModel.status, + ActiveTaskModel.task_name, + ActiveTaskModel.task_title, + ActiveTaskModel.task_type, + ActiveTaskModel.task_status, + ActiveTaskModel.task_id, + ActiveTaskModel.id, + ActiveTaskModel.process_model_display_name, + ActiveTaskModel.process_instance_id, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] + + response_json = { + "results": tasks, + "pagination": { + "count": len(active_tasks.items), + "total": active_tasks.total, + "pages": active_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def task_list_for_my_open_processes( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_my_open_processes.""" + return get_tasks(page=page, per_page=per_page) + + +def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_for_processes_started_by_others.""" + return get_tasks( + processes_started_by_user=False, + has_lane_assignment_id=False, + page=page, + per_page=per_page, + ) + + +def task_list_for_my_groups( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_processes_started_by_others.""" + return get_tasks(processes_started_by_user=False, page=page, per_page=per_page) + + +def get_tasks( + processes_started_by_user: bool = True, + has_lane_assignment_id: bool = True, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Get_tasks.""" + user_id = g.user.id + + # use distinct to ensure we only get one row per active task otherwise + # we can get back multiple for the same active task row which throws off + # pagination later on + # https://stackoverflow.com/q/34582014/6090676 + active_tasks_query = ( + ActiveTaskModel.query.distinct() + .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id) + .join(ProcessInstanceModel) + .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + ) + + if processes_started_by_user: + active_tasks_query = active_tasks_query.filter( + ProcessInstanceModel.process_initiator_id == user_id + ).outerjoin( + ActiveTaskUserModel, + and_( + ActiveTaskUserModel.user_id == user_id, + ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, + ), + ) + else: + active_tasks_query = active_tasks_query.filter( + ProcessInstanceModel.process_initiator_id != user_id + ).join( + ActiveTaskUserModel, + and_( + ActiveTaskUserModel.user_id == user_id, + ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, + ), + ) + if has_lane_assignment_id: + active_tasks_query = active_tasks_query.filter( + ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore + ) + else: + active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore + + active_tasks = active_tasks_query.add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.status.label("process_instance_status"), # type: ignore + ProcessInstanceModel.updated_at_in_seconds, + ProcessInstanceModel.created_at_in_seconds, + UserModel.username, + GroupModel.identifier.label("group_identifier"), + ActiveTaskModel.task_name, + ActiveTaskModel.task_title, + ActiveTaskModel.process_model_display_name, + ActiveTaskModel.process_instance_id, + ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"), + ).paginate(page=page, per_page=per_page, error_out=False) + + response_json = { + "results": active_tasks.items, + "pagination": { + "count": len(active_tasks.items), + "total": active_tasks.total, + "pages": active_tasks.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def process_instance_task_list( + modified_process_model_id: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + if spiff_step > 0: + step_detail = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.spiff_step == spiff_step, + ) + .first() + ) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json + process_instance.bpmn_json = json.dumps(bpmn_json) + + processor = ProcessInstanceProcessor(process_instance) + + spiff_tasks = None + if all_tasks: + spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + else: + spiff_tasks = processor.get_all_user_tasks() + + tasks = [] + for spiff_task in spiff_tasks: + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + task.data = spiff_task.data + tasks.append(task) + + return make_response(jsonify(tasks), 200) + + +def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: + """Task_show.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + if process_instance.status == ProcessInstanceStatus.suspended.value: + raise ApiError( + error_code="error_suspended", + message="The process instance is suspended", + status_code=400, + ) + + process_model = get_process_model( + process_instance.process_model_identifier, + ) + + form_schema_file_name = "" + form_ui_schema_file_name = "" + spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) + extensions = spiff_task.task_spec.extensions + + if "properties" in extensions: + properties = extensions["properties"] + if "formJsonSchemaFilename" in properties: + form_schema_file_name = properties["formJsonSchemaFilename"] + if "formUiSchemaFilename" in properties: + form_ui_schema_file_name = properties["formUiSchemaFilename"] + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + task.data = spiff_task.data + task.process_model_display_name = process_model.display_name + task.process_model_identifier = process_model.id + process_model_with_form = process_model + + if task.type == "User Task": + if not form_schema_file_name: + raise ( + ApiError( + error_code="missing_form_file", + message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", + status_code=400, + ) + ) + + form_contents = prepare_form_data( + form_schema_file_name, + task.data, + process_model_with_form, + ) + + try: + # form_contents is a str + form_dict = json.loads(form_contents) + except Exception as exception: + raise ( + ApiError( + error_code="error_loading_form", + message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", + status_code=400, + ) + ) from exception + + if task.data: + _update_form_schema_with_task_data_as_needed(form_dict, task.data) + + if form_contents: + task.form_schema = form_dict + + if form_ui_schema_file_name: + ui_form_contents = prepare_form_data( + form_ui_schema_file_name, + task.data, + process_model_with_form, + ) + if ui_form_contents: + task.form_ui_schema = ui_form_contents + + if task.properties and task.data and "instructionsForEndUser" in task.properties: + print( + f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}" + ) + if task.properties["instructionsForEndUser"]: + task.properties["instructionsForEndUser"] = render_jinja_template( + task.properties["instructionsForEndUser"], task.data + ) + return make_response(jsonify(task), 200) + + +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + principal = find_principal_or_raise() + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + processor = ProcessInstanceProcessor(process_instance) + spiff_task = get_spiff_task_from_process_instance( + task_id, process_instance, processor=processor + ) + AuthorizationService.assert_user_can_complete_spiff_task( + process_instance.id, spiff_task, principal.user + ) + + if spiff_task.state != TaskState.READY: + raise ( + ApiError( + error_code="invalid_state", + message="You may not update a task unless it is in the READY state.", + status_code=400, + ) + ) + + if terminate_loop and spiff_task.is_looping(): + spiff_task.terminate_loop() + + active_task = ActiveTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id + ).first() + if active_task is None: + raise ( + ApiError( + error_code="no_active_task", + message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.", + status_code=500, + ) + ) + + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + active_task=active_task, + ) + + # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same + # task spec, complete that form as well. + # if update_all: + # last_index = spiff_task.task_info()["mi_index"] + # next_task = processor.next_task() + # while next_task and next_task.task_info()["mi_index"] > last_index: + # __update_task(processor, next_task, form_data, user) + # last_index = next_task.task_info()["mi_index"] + # next_task = processor.next_task() + + next_active_task_assigned_to_me = ( + ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id) + .order_by(asc(ActiveTaskModel.id)) # type: ignore + .join(ActiveTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_active_task_assigned_to_me: + return make_response( + jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_create( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_create.""" + bpmn_task_identifier = _get_required_parameter_or_raise( + "bpmn_task_identifier", body + ) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + process_model_identifier = f"{process_group_id}/{process_model_id}" + process_model = get_process_model(process_model_identifier) + file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] + if file is None: + raise ApiError( + error_code="cannot_find_file", + message=f"Could not find the primary bpmn file for process_model: {process_model.id}", + status_code=404, + ) + + # TODO: move this to an xml service or something + file_contents = SpecFileService.get_data(process_model, file.name) + bpmn_etree_element = etree.fromstring(file_contents) + + nsmap = bpmn_etree_element.nsmap + spiff_element_maker = ElementMaker( + namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap + ) + + script_task_elements = bpmn_etree_element.xpath( + f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(script_task_elements) == 0: + raise ApiError( + error_code="missing_script_task", + message=f"Cannot find a script task with id: {bpmn_task_identifier}", + status_code=404, + ) + script_task_element = script_task_elements[0] + + extension_elements = None + extension_elements_array = script_task_element.xpath( + "//bpmn:extensionElements", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(extension_elements_array) == 0: + bpmn_element_maker = ElementMaker( + namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap + ) + extension_elements = bpmn_element_maker("extensionElements") + script_task_element.append(extension_elements) + else: + extension_elements = extension_elements_array[0] + + unit_test_elements = None + unit_test_elements_array = extension_elements.xpath( + "//spiffworkflow:unitTests", + namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, + ) + if len(unit_test_elements_array) == 0: + unit_test_elements = spiff_element_maker("unitTests") + extension_elements.append(unit_test_elements) + else: + unit_test_elements = unit_test_elements_array[0] + + fuzz = "".join( + random.choice(string.ascii_uppercase + string.digits) # noqa: S311 + for _ in range(7) + ) + unit_test_id = f"unit_test_{fuzz}" + + input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) + expected_output_json_element = spiff_element_maker( + "expectedOutputJson", json.dumps(expected_output_json) + ) + unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) + unit_test_element.append(input_json_element) + unit_test_element.append(expected_output_json_element) + unit_test_elements.append(unit_test_element) + SpecFileService.update_file( + process_model, file.name, etree.tostring(bpmn_etree_element) + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_run( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_run.""" + # FIXME: We should probably clear this somewhere else but this works + current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None + current_app.config["THREAD_LOCAL_DATA"].spiff_step = None + + python_script = _get_required_parameter_or_raise("python_script", body) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + python_script, input_json, expected_output_json + ) + return make_response(jsonify(result), 200) + + +def get_file_from_request() -> Any: + """Get_file_from_request.""" + request_file = connexion.request.files.get("file") + if not request_file: + raise ApiError( + error_code="no_file_given", + message="Given request does not contain a file", + status_code=400, + ) + return request_file + + +def get_process_model(process_model_id: str) -> ProcessModelInfo: + """Get_process_model.""" + process_model = None + try: + process_model = ProcessModelService.get_process_model(process_model_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_cannot_be_found", + message=f"Process model cannot be found: {process_model_id}", + status_code=400, + ) + ) from exception + + return process_model + + +def find_principal_or_raise() -> PrincipalModel: + """Find_principal_or_raise.""" + principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() + if principal is None: + raise ( + ApiError( + error_code="principal_not_found", + message=f"Principal not found from user id: {g.user.id}", + status_code=400, + ) + ) + return principal # type: ignore + + +def find_process_instance_by_id_or_raise( + process_instance_id: int, +) -> ProcessInstanceModel: + """Find_process_instance_by_id_or_raise.""" + process_instance_query = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ) + + # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: + # this returns an object that allows you to do: process_instance.UserModel.username + # process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first() + # you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance + # attributes or username like we wanted: + # process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username) + + process_instance = process_instance_query.first() + if process_instance is None: + raise ( + ApiError( + error_code="process_instance_cannot_be_found", + message=f"Process instance cannot be found: {process_instance_id}", + status_code=400, + ) + ) + return process_instance # type: ignore + + +def get_value_from_array_with_index(array: list, index: int) -> Any: + """Get_value_from_array_with_index.""" + if index < 0: + return None + + if index >= len(array): + return None + + return array[index] + + +def prepare_form_data( + form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo +) -> str: + """Prepare_form_data.""" + if task_data is None: + return "" + + file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") + return render_jinja_template(file_contents, task_data) + + +def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: + """Render_jinja_template.""" + jinja_environment = jinja2.Environment( + autoescape=True, lstrip_blocks=True, trim_blocks=True + ) + template = jinja_environment.from_string(unprocessed_template) + return template.render(**data) + + +def get_spiff_task_from_process_instance( + task_id: str, + process_instance: ProcessInstanceModel, + processor: Union[ProcessInstanceProcessor, None] = None, +) -> SpiffTask: + """Get_spiff_task_from_process_instance.""" + if processor is None: + processor = ProcessInstanceProcessor(process_instance) + task_uuid = uuid.UUID(task_id) + spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + + if spiff_task is None: + raise ( + ApiError( + error_code="empty_task", + message="Processor failed to obtain task.", + status_code=500, + ) + ) + return spiff_task + + +# +# Methods for secrets CRUD - maybe move somewhere else: +# +def get_secret(key: str) -> Optional[str]: + """Get_secret.""" + return SecretService.get_secret(key) + + +def secret_list( + page: int = 1, + per_page: int = 100, +) -> Response: + """Secret_list.""" + secrets = ( + SecretModel.query.order_by(SecretModel.key) + .join(UserModel) + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + response_json = { + "results": secrets.items, + "pagination": { + "count": len(secrets.items), + "total": secrets.total, + "pages": secrets.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def add_secret(body: Dict) -> Response: + """Add secret.""" + secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) + assert secret_model # noqa: S101 + return Response( + json.dumps(SecretModelSchema().dump(secret_model)), + status=201, + mimetype="application/json", + ) + + +def update_secret(key: str, body: dict) -> Response: + """Update secret.""" + SecretService().update_secret(key, body["value"], g.user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def delete_secret(key: str) -> Response: + """Delete secret.""" + current_user = UserService.current_user() + SecretService.delete_secret(key, current_user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: + """Get_required_parameter_or_raise.""" + return_value = None + if parameter in post_body: + return_value = post_body[parameter] + + if return_value is None or return_value == "": + raise ( + ApiError( + error_code="missing_required_parameter", + message=f"Parameter is missing from json request body: {parameter}", + status_code=400, + ) + ) + + return return_value + + +# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches +def _update_form_schema_with_task_data_as_needed( + in_dict: dict, task_data: dict +) -> None: + """Update_nested.""" + for k, value in in_dict.items(): + if "anyOf" == k: + # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] + if isinstance(value, list): + if len(value) == 1: + first_element_in_value_list = value[0] + if isinstance(first_element_in_value_list, str): + if first_element_in_value_list.startswith( + "options_from_task_data_var:" + ): + task_data_var = first_element_in_value_list.replace( + "options_from_task_data_var:", "" + ) + + if task_data_var not in task_data: + raise ( + ApiError( + error_code="missing_task_data_var", + message=f"Task data is missing variable: {task_data_var}", + status_code=500, + ) + ) + + select_options_from_task_data = task_data.get(task_data_var) + if isinstance(select_options_from_task_data, list): + if all( + "value" in d and "label" in d + for d in select_options_from_task_data + ): + + def map_function( + task_data_select_option: TaskDataSelectOption, + ) -> ReactJsonSchemaSelectOption: + """Map_function.""" + return { + "type": "string", + "enum": [task_data_select_option["value"]], + "title": task_data_select_option["label"], + } + + options_for_react_json_schema_form = list( + map(map_function, select_options_from_task_data) + ) + + in_dict[k] = options_for_react_json_schema_form + elif isinstance(value, dict): + _update_form_schema_with_task_data_as_needed(value, task_data) + elif isinstance(value, list): + for o in value: + if isinstance(o, dict): + _update_form_schema_with_task_data_as_needed(o, task_data) + + +def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + if task_id in process_instance_bpmn_json_dict["tasks"]: + process_instance_bpmn_json_dict["tasks"][task_id][ + "data" + ] = new_task_data_dict + process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) + db.session.add(process_instance) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", + ) + else: + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 739e689d..9e4c54be 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -12,6 +12,7 @@ from typing import Union import connexion # type: ignore import flask.wrappers import jinja2 +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel import werkzeug from flask import Blueprint from flask import current_app @@ -27,10 +28,10 @@ from lxml import etree # type: ignore from lxml.builder import ElementMaker # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ +from sqlalchemy import and_, func from sqlalchemy import asc from sqlalchemy import desc -from sqlalchemy.orm import joinedload +from sqlalchemy.orm import aliased, joinedload from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, @@ -928,6 +929,26 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) + # userSkillF = aliased(UserSkill) + # userSkillI = aliased(UserSkill) + + # import pdb; pdb.set_trace() + stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) + # print(f"stock_columns: {stock_columns}") + # import pdb; pdb.set_trace() + # for column in process_instance_report.report_metadata['columns']: + # if column not in stock_columns: + # # continue + for column in [{'accessor': 'key1'}]: + # print(f"column: {column['accessor']}") + # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) + instance_metadata_alias = aliased(ProcessInstanceMetadataModel) + process_instance_query = ( + process_instance_query.options(joinedload(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, innerjoin=False)).filter(instance_metadata_alias.key == column['accessor']) + .add_column(func.max(instance_metadata_alias.value).label(column['accessor'])) + ) + # import pdb; pdb.set_trace() + process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) .order_by( @@ -935,14 +956,26 @@ def process_instance_list( ) .paginate(page=page, per_page=per_page, error_out=False) ) + import pdb; pdb.set_trace() - results = list( - map( - ProcessInstanceService.serialize_flat_with_task_data, - process_instances.items, - ) - ) + # def awesome_serialize(process_instance) + # dict_thing = process_instance.serialize + # + # # add columns since we have access to columns here + # dict_thing['awesome'] = 'awesome' + # + # return dict_thing + + # results = list( + # map( + # ProcessInstanceService.serialize_flat_with_task_data, + # process_instances.items, + # ) + # ) + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(process_instances.items, process_instance_report.report_metadata['columns']) report_metadata = process_instance_report.report_metadata + print(f"results: {results}") + import pdb; pdb.set_trace() response_json = { "report_identifier": process_instance_report.identifier, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index 18f08d0f..3868adf6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -235,8 +235,9 @@ class AuthenticationService: refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter( RefreshTokenModel.user_id == user_id ).first() - assert refresh_token_object # noqa: S101 - return refresh_token_object.token + if refresh_token_object: + return refresh_token_object.token + return None @classmethod def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index fc5a93da..6c579826 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,6 +1,8 @@ """Process_instance_report_service.""" from dataclasses import dataclass +from flask_bpmn.models.db import db from typing import Optional +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, @@ -241,3 +243,20 @@ class ProcessInstanceReportService: ) return report_filter + + @classmethod + def add_metadata_columns_to_process_instance(cls, process_instance_sqlalchemy_rows, metadata_columns: list[dict]) -> list[dict]: + stock_columns = cls.get_column_names_for_model(ProcessInstanceModel) + results = [] + for process_instance in process_instance_sqlalchemy_rows: + process_instance_dict = process_instance['ProcessInstanceModel'].serialized + for metadata_column in metadata_columns: + if metadata_column['accessor'] not in stock_columns: + process_instance_dict[metadata_column['accessor']] = process_instance[metadata_column['accessor']] + + results.append(process_instance_dict) + return results + + @classmethod + def get_column_names_for_model(cls, model: db.Model) -> list[str]: + return [i.name for i in model.__table__.columns] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 4c60cb8c..b7fc0479 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2588,12 +2588,17 @@ class TestProcessApi(BaseTest): response = client.get( f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", + # f"/v1.0/process-instances?report_identifier=demo1", headers=self.logged_in_headers(with_super_admin_user), ) print(f"response.json: {response.json}") - assert response.status_code == 200 assert response.json is not None + assert response.status_code == 200 + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["status"] == "complete" + assert response.json["results"][0]["id"] == process_instance.id + # assert response.json["results"][0]["key1"] == "value1" assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["total"] == 1 From 860b83ad36de24270cc80e3c1d0578b27717409e Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:00:19 -0500 Subject: [PATCH 10/40] WIP more metadata reporting w/ burnettk --- .../spiffworkflow_backend/routes/process_api_blueprint.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 9e4c54be..57890fb2 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -936,10 +936,10 @@ def process_instance_list( stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) # print(f"stock_columns: {stock_columns}") # import pdb; pdb.set_trace() - # for column in process_instance_report.report_metadata['columns']: - # if column not in stock_columns: - # # continue - for column in [{'accessor': 'key1'}]: + for column in process_instance_report.report_metadata['columns']: + if column['accessor'] in stock_columns: + continue + # for column in [{'accessor': 'key1'}]: # print(f"column: {column['accessor']}") # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) instance_metadata_alias = aliased(ProcessInstanceMetadataModel) From 46179746369f698fdd85182bbcac80a6cbadca55 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:09:55 -0500 Subject: [PATCH 11/40] metadat reports work w/ burnettk --- .../routes/process_api_blueprint.py | 35 +++---------------- .../integration/test_process_api.py | 9 ++--- 2 files changed, 9 insertions(+), 35 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 57890fb2..b2b07ae5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -814,9 +814,9 @@ def process_instance_list( # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) process_instance_query = ProcessInstanceModel.query # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - joinedload(ProcessInstanceModel.process_initiator) - ) + # process_instance_query = process_instance_query.options( + # joinedload(ProcessInstanceModel.process_initiator, ProcessInstanceModel.process_initiator_id == UserModel.id) + # ) if report_filter.process_model_identifier is not None: process_model = get_process_model( @@ -929,25 +929,15 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) - # userSkillF = aliased(UserSkill) - # userSkillI = aliased(UserSkill) - - # import pdb; pdb.set_trace() stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) - # print(f"stock_columns: {stock_columns}") - # import pdb; pdb.set_trace() for column in process_instance_report.report_metadata['columns']: if column['accessor'] in stock_columns: continue - # for column in [{'accessor': 'key1'}]: - # print(f"column: {column['accessor']}") - # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) instance_metadata_alias = aliased(ProcessInstanceMetadataModel) process_instance_query = ( process_instance_query.options(joinedload(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, innerjoin=False)).filter(instance_metadata_alias.key == column['accessor']) - .add_column(func.max(instance_metadata_alias.value).label(column['accessor'])) + .add_columns(func.max(instance_metadata_alias.value).label(column['accessor'])) ) - # import pdb; pdb.set_trace() process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) @@ -956,26 +946,9 @@ def process_instance_list( ) .paginate(page=page, per_page=per_page, error_out=False) ) - import pdb; pdb.set_trace() - # def awesome_serialize(process_instance) - # dict_thing = process_instance.serialize - # - # # add columns since we have access to columns here - # dict_thing['awesome'] = 'awesome' - # - # return dict_thing - - # results = list( - # map( - # ProcessInstanceService.serialize_flat_with_task_data, - # process_instances.items, - # ) - # ) results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(process_instances.items, process_instance_report.report_metadata['columns']) report_metadata = process_instance_report.report_metadata - print(f"results: {results}") - import pdb; pdb.set_trace() response_json = { "report_identifier": process_instance_report.identifier, diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index b7fc0479..fb33d246 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2562,7 +2562,7 @@ class TestProcessApi(BaseTest): process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=with_super_admin_user ) - + processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( @@ -2576,6 +2576,7 @@ class TestProcessApi(BaseTest): {"Header": "ID", "accessor": "id"}, {"Header": "Status", "accessor": "status"}, {"Header": "Key One", "accessor": "key1"}, + # {"Header": "Key Two", "accessor": "key2"}, ], "order_by": ["status"], "filter_by": [], @@ -2588,17 +2589,17 @@ class TestProcessApi(BaseTest): response = client.get( f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}", - # f"/v1.0/process-instances?report_identifier=demo1", headers=self.logged_in_headers(with_super_admin_user), ) - print(f"response.json: {response.json}") + assert response.json is not None assert response.status_code == 200 assert len(response.json["results"]) == 1 assert response.json["results"][0]["status"] == "complete" assert response.json["results"][0]["id"] == process_instance.id - # assert response.json["results"][0]["key1"] == "value1" + assert response.json["results"][0]["key1"] == "value1" + # assert response.json["results"][0]["key2"] == "value2" assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["total"] == 1 From 45eafc6060617cf945165be0c18a0afde5394caf Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:19:55 -0500 Subject: [PATCH 12/40] some cleanup for metadata w/ burnettk --- .../routes/process_api_blueprint.py | 38 +++++++++++++------ .../process_instance_report_service.py | 24 ++++++++---- .../integration/test_process_api.py | 13 ++++--- 3 files changed, 50 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index b2b07ae5..753b6c3c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -12,7 +12,6 @@ from typing import Union import connexion # type: ignore import flask.wrappers import jinja2 -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel import werkzeug from flask import Blueprint from flask import current_app @@ -28,10 +27,12 @@ from lxml import etree # type: ignore from lxml.builder import ElementMaker # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState -from sqlalchemy import and_, func +from sqlalchemy import and_ from sqlalchemy import asc from sqlalchemy import desc -from sqlalchemy.orm import aliased, joinedload +from sqlalchemy import func +from sqlalchemy.orm import aliased +from sqlalchemy.orm import joinedload from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, @@ -53,6 +54,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSche from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -814,9 +818,9 @@ def process_instance_list( # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) process_instance_query = ProcessInstanceModel.query # Always join that hot user table for good performance at serialization time. - # process_instance_query = process_instance_query.options( - # joinedload(ProcessInstanceModel.process_initiator, ProcessInstanceModel.process_initiator_id == UserModel.id) - # ) + process_instance_query = process_instance_query.options( + joinedload(ProcessInstanceModel.process_initiator) + ) if report_filter.process_model_identifier is not None: process_model = get_process_model( @@ -929,14 +933,22 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) - stock_columns = ProcessInstanceReportService.get_column_names_for_model(ProcessInstanceModel) - for column in process_instance_report.report_metadata['columns']: - if column['accessor'] in stock_columns: + stock_columns = ProcessInstanceReportService.get_column_names_for_model( + ProcessInstanceModel + ) + for column in process_instance_report.report_metadata["columns"]: + if column["accessor"] in stock_columns: continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) process_instance_query = ( - process_instance_query.options(joinedload(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, innerjoin=False)).filter(instance_metadata_alias.key == column['accessor']) - .add_columns(func.max(instance_metadata_alias.value).label(column['accessor'])) + process_instance_query.outerjoin( + instance_metadata_alias, + ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, + ) + .filter(instance_metadata_alias.key == column["accessor"]) + .add_columns( + func.max(instance_metadata_alias.value).label(column["accessor"]) + ) ) process_instances = ( @@ -947,7 +959,9 @@ def process_instance_list( .paginate(page=page, per_page=per_page, error_out=False) ) - results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(process_instances.items, process_instance_report.report_metadata['columns']) + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( + process_instances.items, process_instance_report.report_metadata["columns"] + ) report_metadata = process_instance_report.report_metadata response_json = { diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 6c579826..20563be3 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,9 +1,11 @@ """Process_instance_report_service.""" from dataclasses import dataclass -from flask_bpmn.models.db import db from typing import Optional -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +import sqlalchemy +from flask_bpmn.models.db import db + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -245,18 +247,26 @@ class ProcessInstanceReportService: return report_filter @classmethod - def add_metadata_columns_to_process_instance(cls, process_instance_sqlalchemy_rows, metadata_columns: list[dict]) -> list[dict]: + def add_metadata_columns_to_process_instance( + cls, + process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore + metadata_columns: list[dict], + ) -> list[dict]: + """Add_metadata_columns_to_process_instance.""" stock_columns = cls.get_column_names_for_model(ProcessInstanceModel) results = [] for process_instance in process_instance_sqlalchemy_rows: - process_instance_dict = process_instance['ProcessInstanceModel'].serialized + process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: - if metadata_column['accessor'] not in stock_columns: - process_instance_dict[metadata_column['accessor']] = process_instance[metadata_column['accessor']] + if metadata_column["accessor"] not in stock_columns: + process_instance_dict[ + metadata_column["accessor"] + ] = process_instance[metadata_column["accessor"]] results.append(process_instance_dict) return results @classmethod - def get_column_names_for_model(cls, model: db.Model) -> list[str]: + def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore + """Get_column_names_for_model.""" return [i.name for i in model.__table__.columns] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index fb33d246..e22ec77b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -4,13 +4,11 @@ import json import os import time from typing import Any -from conftest import with_super_admin_user import pytest from flask.app import Flask from flask.testing import FlaskClient from flask_bpmn.models.db import db -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec @@ -22,6 +20,9 @@ from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -2554,10 +2555,11 @@ class TestProcessApi(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: + """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id='test-process-instance-metadata-report', - bpmn_file_name='process_instance_metadata.bpmn', - process_model_source_directory='test-process-instance-metadata-report', + process_model_id="test-process-instance-metadata-report", + bpmn_file_name="process_instance_metadata.bpmn", + process_model_source_directory="test-process-instance-metadata-report", ) process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=with_super_admin_user @@ -2570,7 +2572,6 @@ class TestProcessApi(BaseTest): ).all() assert len(process_instance_metadata) == 2 - report_metadata = { "columns": [ {"Header": "ID", "accessor": "id"}, From 4d048d6e117f68fc41050599de664fa44d954e56 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 16:37:19 -0500 Subject: [PATCH 13/40] finished base for metadata reporting w/ burnettk --- .../routes/process_api_blueprint.py | 16 +++++++--------- .../services/process_instance_report_service.py | 4 +--- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 753b6c3c..b3bc1a22 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -940,19 +940,17 @@ def process_instance_list( if column["accessor"] in stock_columns: continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) - process_instance_query = ( - process_instance_query.outerjoin( - instance_metadata_alias, + process_instance_query = process_instance_query.outerjoin( + instance_metadata_alias, + and_( ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, - ) - .filter(instance_metadata_alias.key == column["accessor"]) - .add_columns( - func.max(instance_metadata_alias.value).label(column["accessor"]) - ) - ) + instance_metadata_alias.key == column["accessor"], + ), + ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) + .add_columns(ProcessInstanceModel.id) .order_by( ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 20563be3..ad9dec0a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -5,7 +5,6 @@ from typing import Optional import sqlalchemy from flask_bpmn.models.db import db -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -253,12 +252,11 @@ class ProcessInstanceReportService: metadata_columns: list[dict], ) -> list[dict]: """Add_metadata_columns_to_process_instance.""" - stock_columns = cls.get_column_names_for_model(ProcessInstanceModel) results = [] for process_instance in process_instance_sqlalchemy_rows: process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: - if metadata_column["accessor"] not in stock_columns: + if metadata_column["accessor"] not in process_instance_dict: process_instance_dict[ metadata_column["accessor"] ] = process_instance[metadata_column["accessor"]] From 2fc44907e80a6f41a1c057dc98e79b37af2628d8 Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 29 Nov 2022 17:32:29 -0500 Subject: [PATCH 14/40] added api to get list of process report columns --- spiffworkflow-backend/migrations/env.py | 2 + .../{ff1c1628337c_.py => 40a2ed63cc5a_.py} | 8 ++-- .../src/spiffworkflow_backend/api.yml | 16 ++++++++ .../models/process_instance_metadata.py | 2 +- .../routes/process_api_blueprint.py | 9 +++++ .../process_instance_report_service.py | 14 +++++++ .../integration/test_process_api.py | 38 ++++++++++++++++++- 7 files changed, 83 insertions(+), 6 deletions(-) rename spiffworkflow-backend/migrations/versions/{ff1c1628337c_.py => 40a2ed63cc5a_.py} (98%) diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 630e381a..68feded2 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,3 +1,5 @@ +from __future__ import with_statement + import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/migrations/versions/ff1c1628337c_.py b/spiffworkflow-backend/migrations/versions/40a2ed63cc5a_.py similarity index 98% rename from spiffworkflow-backend/migrations/versions/ff1c1628337c_.py rename to spiffworkflow-backend/migrations/versions/40a2ed63cc5a_.py index d8da6d3c..6abd6b4a 100644 --- a/spiffworkflow-backend/migrations/versions/ff1c1628337c_.py +++ b/spiffworkflow-backend/migrations/versions/40a2ed63cc5a_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: ff1c1628337c +Revision ID: 40a2ed63cc5a Revises: -Create Date: 2022-11-28 15:08:52.014254 +Create Date: 2022-11-29 16:59:02.980181 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'ff1c1628337c' +revision = '40a2ed63cc5a' down_revision = None branch_labels = None depends_on = None @@ -249,6 +249,7 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('process_instance_id', 'key', name='process_instance_metadata_unique') ) + op.create_index(op.f('ix_process_instance_metadata_key'), 'process_instance_metadata', ['key'], unique=False) op.create_table('spiff_step_details', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -295,6 +296,7 @@ def downgrade(): op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user') op.drop_table('active_task_user') op.drop_table('spiff_step_details') + op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata') op.drop_table('process_instance_metadata') op.drop_table('permission_assignment') op.drop_table('message_instance') diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index e7dc00fe..81fa92bd 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -841,6 +841,22 @@ paths: schema: $ref: "#/components/schemas/OkTrue" + /process-instances/reports/columns: + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list + summary: Returns all available columns for a process instance report. + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + /process-instances/reports/{report_identifier}: parameters: - name: report_identifier diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index 5a4d4ca5..c9003594 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -23,7 +23,7 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel): process_instance_id: int = db.Column( ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) - key: str = db.Column(db.String(255), nullable=False) + key: str = db.Column(db.String(255), nullable=False, index=True) value: str = db.Column(db.String(255), nullable=False) updated_at_in_seconds: int = db.Column(db.Integer, nullable=False) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index b3bc1a22..b96cc262 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -977,6 +977,15 @@ def process_instance_list( return make_response(jsonify(response_json), 200) +def process_instance_report_column_list() -> flask.wrappers.Response: + + table_columns = ProcessInstanceReportService.builtin_column_options() + columns_for_metadata = db.session.query(ProcessInstanceMetadataModel.key).distinct().all() # type: ignore + columns_for_metadata_strings = [{ 'Header': i[0], 'accessor': i[0]} for i in columns_for_metadata] + # columns = sorted(table_columns + columns_for_metadata_strings) + return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) + + def process_instance_show( modified_process_model_identifier: str, process_instance_id: int ) -> flask.wrappers.Response: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index ad9dec0a..da70f0c0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -268,3 +268,17 @@ class ProcessInstanceReportService: def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore """Get_column_names_for_model.""" return [i.name for i in model.__table__.columns] + + @classmethod + def builtin_column_options(cls) -> list[dict]: + return [ + {"Header": "id", "accessor": "id"}, + { + "Header": "process_model_display_name", + "accessor": "process_model_display_name", + }, + {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, + {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, + {"Header": "username", "accessor": "username"}, + {"Header": "status", "accessor": "status"}, + ] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index e22ec77b..beef3b74 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2577,7 +2577,7 @@ class TestProcessApi(BaseTest): {"Header": "ID", "accessor": "id"}, {"Header": "Status", "accessor": "status"}, {"Header": "Key One", "accessor": "key1"}, - # {"Header": "Key Two", "accessor": "key2"}, + {"Header": "Key Two", "accessor": "key2"}, ], "order_by": ["status"], "filter_by": [], @@ -2600,7 +2600,41 @@ class TestProcessApi(BaseTest): assert response.json["results"][0]["status"] == "complete" assert response.json["results"][0]["id"] == process_instance.id assert response.json["results"][0]["key1"] == "value1" - # assert response.json["results"][0]["key2"] == "value2" + assert response.json["results"][0]["key2"] == "value2" assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["total"] == 1 + + def test_can_get_process_instance_report_column_list( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_process_instance_list_with_report_metadata.""" + process_model = load_test_spec( + process_model_id="test-process-instance-metadata-report", + bpmn_file_name="process_instance_metadata.bpmn", + process_model_source_directory="test-process-instance-metadata-report", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id + ).all() + assert len(process_instance_metadata) == 2 + + response = client.get( + f"/v1.0/process-instances/reports/columns", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.json is not None + assert response.status_code == 200 + assert response.json == [{'Header': 'id', 'accessor': 'id'}, {'Header': 'process_model_display_name', 'accessor': 'process_model_display_name'}, {'Header': 'start_in_seconds', 'accessor': 'start_in_seconds'}, {'Header': 'end_in_seconds', 'accessor': 'end_in_seconds'}, {'Header': 'username', 'accessor': 'username'}, {'Header': 'status', 'accessor': 'status'}, {'Header': 'key1', 'accessor': 'key1'}, {'Header': 'key2', 'accessor': 'key2'}] + From 83bbae7533deed0a45940159dae3d664fcbcb190 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 30 Nov 2022 07:24:24 -0500 Subject: [PATCH 15/40] removed file named ':' --- spiffworkflow-backend/: | 1908 --------------------------------------- 1 file changed, 1908 deletions(-) delete mode 100644 spiffworkflow-backend/: diff --git a/spiffworkflow-backend/: b/spiffworkflow-backend/: deleted file mode 100644 index 5516fdae..00000000 --- a/spiffworkflow-backend/: +++ /dev/null @@ -1,1908 +0,0 @@ -"""APIs for dealing with process groups, process models, and process instances.""" -import json -import random -import string -import uuid -from typing import Any -from typing import Dict -from typing import Optional -from typing import TypedDict -from typing import Union - -import connexion # type: ignore -import flask.wrappers -import jinja2 -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel -import werkzeug -from flask import Blueprint -from flask import current_app -from flask import g -from flask import jsonify -from flask import make_response -from flask import redirect -from flask import request -from flask.wrappers import Response -from flask_bpmn.api.api_error import ApiError -from flask_bpmn.models.db import db -from lxml import etree # type: ignore -from lxml.builder import ElementMaker # type: ignore -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ -from sqlalchemy import asc -from sqlalchemy import desc -from sqlalchemy.orm import aliased, joinedload - -from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( - ProcessEntityNotFoundError, -) -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel -from spiffworkflow_backend.models.file import FileSchema -from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel -from spiffworkflow_backend.models.message_instance import MessageInstanceModel -from spiffworkflow_backend.models.message_model import MessageModel -from spiffworkflow_backend.models.message_triggerable_process_model import ( - MessageTriggerableProcessModel, -) -from spiffworkflow_backend.models.principal import PrincipalModel -from spiffworkflow_backend.models.process_group import ProcessGroup -from spiffworkflow_backend.models.process_group import ProcessGroupSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceModel -from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus -from spiffworkflow_backend.models.process_instance_report import ( - ProcessInstanceReportModel, -) -from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema -from spiffworkflow_backend.models.secret_model import SecretModel -from spiffworkflow_backend.models.secret_model import SecretModelSchema -from spiffworkflow_backend.models.spec_reference import SpecReferenceCache -from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel -from spiffworkflow_backend.routes.user import verify_token -from spiffworkflow_backend.services.authorization_service import AuthorizationService -from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService -from spiffworkflow_backend.services.git_service import GitService -from spiffworkflow_backend.services.message_service import MessageService -from spiffworkflow_backend.services.process_instance_processor import ( - ProcessInstanceProcessor, -) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportFilter, -) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportService, -) -from spiffworkflow_backend.services.process_instance_service import ( - ProcessInstanceService, -) -from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner -from spiffworkflow_backend.services.secret_service import SecretService -from spiffworkflow_backend.services.service_task_service import ServiceTaskService -from spiffworkflow_backend.services.spec_file_service import SpecFileService -from spiffworkflow_backend.services.user_service import UserService - - -class TaskDataSelectOption(TypedDict): - """TaskDataSelectOption.""" - - value: str - label: str - - -class ReactJsonSchemaSelectOption(TypedDict): - """ReactJsonSchemaSelectOption.""" - - type: str - title: str - enum: list[str] - - -process_api_blueprint = Blueprint("process_api", __name__) - - -def status() -> flask.wrappers.Response: - """Status.""" - ProcessInstanceModel.query.filter().first() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response: - """Permissions_check.""" - if "requests_to_check" not in body: - raise ( - ApiError( - error_code="could_not_requests_to_check", - message="The key 'requests_to_check' not found at root of request body.", - status_code=400, - ) - ) - - response_dict: dict[str, dict[str, bool]] = {} - requests_to_check = body["requests_to_check"] - - for target_uri, http_methods in requests_to_check.items(): - if target_uri not in response_dict: - response_dict[target_uri] = {} - - for http_method in http_methods: - permission_string = AuthorizationService.get_permission_from_http_method( - http_method - ) - if permission_string: - has_permission = AuthorizationService.user_has_permission( - user=g.user, - permission=permission_string, - target_uri=target_uri, - ) - response_dict[target_uri][http_method] = has_permission - - return make_response(jsonify({"results": response_dict}), 200) - - -def modify_process_model_id(process_model_id: str) -> str: - """Modify_process_model_id.""" - return process_model_id.replace("/", ":") - - -def un_modify_modified_process_model_id(modified_process_model_id: str) -> str: - """Un_modify_modified_process_model_id.""" - return modified_process_model_id.replace(":", "/") - - -def process_group_add(body: dict) -> flask.wrappers.Response: - """Add_process_group.""" - process_group = ProcessGroup(**body) - ProcessModelService.add_process_group(process_group) - return make_response(jsonify(process_group), 201) - - -def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: - """Process_group_delete.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - ProcessModelService().process_group_delete(process_group_id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_group_update( - modified_process_group_id: str, body: dict -) -> flask.wrappers.Response: - """Process Group Update.""" - body_include_list = ["display_name", "description"] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - process_group = ProcessGroup(id=process_group_id, **body_filtered) - ProcessModelService.update_process_group(process_group) - return make_response(jsonify(process_group), 200) - - -def process_group_list( - process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_group_list.""" - if process_group_identifier is not None: - process_groups = ProcessModelService.get_process_groups( - process_group_identifier - ) - else: - process_groups = ProcessModelService.get_process_groups() - batch = ProcessModelService().get_batch( - items=process_groups, page=page, per_page=per_page - ) - pages = len(process_groups) // per_page - remainder = len(process_groups) % per_page - if remainder > 0: - pages += 1 - - response_json = { - "results": ProcessGroupSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_groups), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def process_group_show( - modified_process_group_id: str, -) -> Any: - """Process_group_show.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - try: - process_group = ProcessModelService.get_process_group(process_group_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_group_cannot_be_found", - message=f"Process group cannot be found: {process_group_id}", - status_code=400, - ) - ) from exception - - process_group.parent_groups = ProcessModelService.get_parent_group_array( - process_group.id - ) - return make_response(jsonify(process_group), 200) - - -def process_group_move( - modified_process_group_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_group_move.""" - original_process_group_id = un_modify_modified_process_model_id( - modified_process_group_identifier - ) - new_process_group = ProcessModelService().process_group_move( - original_process_group_id, new_location - ) - return make_response(jsonify(new_process_group), 201) - - -def process_model_create( - modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Process_model_create.""" - process_model_info = ProcessModelInfoSchema().load(body) - if modified_process_group_id is None: - raise ApiError( - error_code="process_group_id_not_specified", - message="Process Model could not be created when process_group_id path param is unspecified", - status_code=400, - ) - if process_model_info is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body: {body}", - status_code=400, - ) - - unmodified_process_group_id = un_modify_modified_process_model_id( - modified_process_group_id - ) - process_group = ProcessModelService.get_process_group(unmodified_process_group_id) - if process_group is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body because Process Group could not be found: {body}", - status_code=400, - ) - - ProcessModelService.add_process_model(process_model_info) - return Response( - json.dumps(ProcessModelInfoSchema().dump(process_model_info)), - status=201, - mimetype="application/json", - ) - - -def process_model_delete( - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_model_delete.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" - ProcessModelService().process_model_delete(process_model_identifier) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_update( - modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] -) -> Any: - """Process_model_update.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - body_include_list = [ - "display_name", - "primary_file_name", - "primary_process_id", - "description", - ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - # process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - ProcessModelService.update_process_model(process_model, body_filtered) - return ProcessModelInfoSchema().dump(process_model) - - -def process_model_show(modified_process_model_identifier: str) -> Any: - """Process_model_show.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - # TODO: Temporary. Should not need the next line once models have correct ids - # process_model.id = process_model_identifier - files = sorted(SpecFileService.get_files(process_model)) - process_model.files = files - for file in process_model.files: - file.references = SpecFileService.get_references_for_file(file, process_model) - - process_model.parent_groups = ProcessModelService.get_parent_group_array( - process_model.id - ) - return make_response(jsonify(process_model), 200) - - -def process_model_move( - modified_process_model_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_model_move.""" - original_process_model_id = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - new_process_model = ProcessModelService().process_model_move( - original_process_model_id, new_location - ) - return make_response(jsonify(new_process_model), 201) - - -def process_model_list( - process_group_identifier: Optional[str] = None, - recursive: Optional[bool] = False, - filter_runnable_by_user: Optional[bool] = False, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process model list!""" - process_models = ProcessModelService.get_process_models( - process_group_id=process_group_identifier, - recursive=recursive, - filter_runnable_by_user=filter_runnable_by_user, - ) - batch = ProcessModelService().get_batch( - process_models, page=page, per_page=per_page - ) - pages = len(process_models) // per_page - remainder = len(process_models) % per_page - if remainder > 0: - pages += 1 - response_json = { - "results": ProcessModelInfoSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_models), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def process_list() -> Any: - """Returns a list of all known processes. - - This includes processes that are not the - primary process - helpful for finding possible call activities. - """ - references = SpecReferenceCache.query.filter_by(type="process").all() - return SpecReferenceSchema(many=True).dump(references) - - -def get_file(modified_process_model_id: str, file_name: str) -> Any: - """Get_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - process_model = get_process_model(process_model_identifier) - files = SpecFileService.get_files(process_model, file_name) - if len(files) == 0: - raise ApiError( - error_code="unknown file", - message=f"No information exists for file {file_name}" - f" it does not exist in workflow {process_model_identifier}.", - status_code=404, - ) - - file = files[0] - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - # file.process_group_id = process_model.process_group_id - return FileSchema().dump(file) - - -def process_model_file_update( - modified_process_model_id: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_update.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - - request_file = get_file_from_request() - request_file_contents = request_file.stream.read() - if not request_file_contents: - raise ApiError( - error_code="file_contents_empty", - message="Given request file does not have any content", - status_code=400, - ) - - SpecFileService.update_file(process_model, file_name, request_file_contents) - - if current_app.config["GIT_COMMIT_ON_SAVE"]: - git_output = GitService.commit( - message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" - ) - current_app.logger.info(f"git output: {git_output}") - else: - current_app.logger.info("Git commit on save is disabled") - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_file_delete( - modified_process_model_id: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_delete.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - process_model = get_process_model(process_model_identifier) - try: - SpecFileService.delete_file(process_model, file_name) - except FileNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_file_cannot_be_found", - message=f"Process model file cannot be found: {file_name}", - status_code=400, - ) - ) from exception - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def add_file(modified_process_model_id: str) -> flask.wrappers.Response: - """Add_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") - process_model = get_process_model(process_model_identifier) - request_file = get_file_from_request() - if not request_file.filename: - raise ApiError( - error_code="could_not_get_filename", - message="Could not get filename from request", - status_code=400, - ) - - file = SpecFileService.add_file( - process_model, request_file.filename, request_file.stream.read() - ) - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - return Response( - json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" - ) - - -def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_id - ) - process_instance = ( - ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user - ) - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=201, - mimetype="application/json", - ) - - -def process_instance_run( - modified_process_model_identifier: str, - process_instance_id: int, - do_engine_steps: bool = True, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - - if do_engine_steps: - try: - processor.do_engine_steps() - except ApiError as e: - ErrorHandlingService().handle_error(processor, e) - raise e - except Exception as e: - ErrorHandlingService().handle_error(processor, e) - task = processor.bpmn_process_instance.last_task - raise ApiError.from_task( - error_code="unknown_exception", - message=f"An unknown error occurred. Original error: {e}", - status_code=400, - task=task, - ) from e - processor.save() - - if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: - MessageService.process_message_instances() - - process_instance_api = ProcessInstanceService.processor_to_process_instance_api( - processor - ) - process_instance_data = processor.get_data() - process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) - process_instance_metadata["data"] = process_instance_data - return Response( - json.dumps(process_instance_metadata), status=200, mimetype="application/json" - ) - - -def process_instance_terminate( - process_instance_id: int, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.terminate() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_suspend( - process_instance_id: int, -) -> flask.wrappers.Response: - """Process_instance_suspend.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.suspend() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_resume( - process_instance_id: int, -) -> flask.wrappers.Response: - """Process_instance_resume.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.resume() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_log_list( - process_instance_id: int, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process_instance_log_list.""" - # to make sure the process instance exists - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - logs = ( - SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) - .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore - .join( - UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True - ) # isouter since if we don't have a user, we still want the log - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - response_json = { - "results": logs.items, - "pagination": { - "count": len(logs.items), - "total": logs.total, - "pages": logs.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def message_instance_list( - process_instance_id: Optional[int] = None, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Message_instance_list.""" - # to make sure the process instance exists - message_instances_query = MessageInstanceModel.query - - if process_instance_id: - message_instances_query = message_instances_query.filter_by( - process_instance_id=process_instance_id - ) - - message_instances = ( - message_instances_query.order_by( - MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore - MessageInstanceModel.id.desc(), # type: ignore - ) - .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) - .join(ProcessInstanceModel) - .add_columns( - MessageModel.identifier.label("message_identifier"), - ProcessInstanceModel.process_model_identifier, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - for message_instance in message_instances: - message_correlations: dict = {} - for ( - mcmi - ) in ( - message_instance.MessageInstanceModel.message_correlations_message_instances - ): - mc = MessageCorrelationModel.query.filter_by( - id=mcmi.message_correlation_id - ).all() - for m in mc: - if m.name not in message_correlations: - message_correlations[m.name] = {} - message_correlations[m.name][ - m.message_correlation_property.identifier - ] = m.value - message_instance.MessageInstanceModel.message_correlations = ( - message_correlations - ) - - response_json = { - "results": message_instances.items, - "pagination": { - "count": len(message_instances.items), - "total": message_instances.total, - "pages": message_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -# body: { -# payload: dict, -# process_instance_id: Optional[int], -# } -def message_start( - message_identifier: str, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Message_start.""" - message_model = MessageModel.query.filter_by(identifier=message_identifier).first() - if message_model is None: - raise ( - ApiError( - error_code="unknown_message", - message=f"Could not find message with identifier: {message_identifier}", - status_code=404, - ) - ) - - if "payload" not in body: - raise ( - ApiError( - error_code="missing_payload", - message="Body is missing payload.", - status_code=400, - ) - ) - - process_instance = None - if "process_instance_id" in body: - # to make sure we have a valid process_instance_id - process_instance = find_process_instance_by_id_or_raise( - body["process_instance_id"] - ) - - message_instance = MessageInstanceModel.query.filter_by( - process_instance_id=process_instance.id, - message_model_id=message_model.id, - message_type="receive", - status="ready", - ).first() - if message_instance is None: - raise ( - ApiError( - error_code="cannot_find_waiting_message", - message=f"Could not find waiting message for identifier {message_identifier} " - f"and process instance {process_instance.id}", - status_code=400, - ) - ) - MessageService.process_message_receive( - message_instance, message_model.name, body["payload"] - ) - - else: - message_triggerable_process_model = ( - MessageTriggerableProcessModel.query.filter_by( - message_model_id=message_model.id - ).first() - ) - - if message_triggerable_process_model is None: - raise ( - ApiError( - error_code="cannot_start_message", - message=f"Message with identifier cannot be start with message: {message_identifier}", - status_code=400, - ) - ) - - process_instance = MessageService.process_message_triggerable_process_model( - message_triggerable_process_model, - message_model.name, - body["payload"], - g.user, - ) - - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) - - -def process_instance_list( - process_model_identifier: Optional[str] = None, - page: int = 1, - per_page: int = 100, - start_from: Optional[int] = None, - start_to: Optional[int] = None, - end_from: Optional[int] = None, - end_to: Optional[int] = None, - process_status: Optional[str] = None, - initiated_by_me: Optional[bool] = None, - with_tasks_completed_by_me: Optional[bool] = None, - with_tasks_completed_by_my_group: Optional[bool] = None, - user_filter: Optional[bool] = False, - report_identifier: Optional[str] = None, -) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_identifier - ) - - if user_filter: - report_filter = ProcessInstanceReportFilter( - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status.split(",") if process_status else None, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, - ) - else: - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report, - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, - ) - ) - - # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier) - process_instance_query = ProcessInstanceModel.query - # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - joinedload(ProcessInstanceModel.process_initiator) - ) - - if report_filter.process_model_identifier is not None: - process_model = get_process_model( - f"{report_filter.process_model_identifier}", - ) - - process_instance_query = process_instance_query.filter_by( - process_model_identifier=process_model.id - ) - - # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. - if ( - ProcessInstanceModel.start_in_seconds is None - or ProcessInstanceModel.end_in_seconds is None - ): - raise ( - ApiError( - error_code="unexpected_condition", - message="Something went very wrong", - status_code=500, - ) - ) - - if report_filter.start_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds >= report_filter.start_from - ) - if report_filter.start_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds <= report_filter.start_to - ) - if report_filter.end_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds >= report_filter.end_from - ) - if report_filter.end_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds <= report_filter.end_to - ) - if report_filter.process_status is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore - ) - - if report_filter.initiated_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.filter_by( - process_initiator=g.user - ) - - # TODO: not sure if this is exactly what is wanted - if report_filter.with_tasks_completed_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - # process_instance_query = process_instance_query.add_columns(UserModel.username) - # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. - - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.process_initiator_id != g.user.id - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.filter( - SpiffStepDetailsModel.completed_by_user_id == g.user.id - ) - - if report_filter.with_tasks_completed_by_my_group is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.join( - GroupModel, - GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, - ) - process_instance_query = process_instance_query.join( - UserGroupAssignmentModel, - UserGroupAssignmentModel.group_id == GroupModel.id, - ) - process_instance_query = process_instance_query.filter( - UserGroupAssignmentModel.user_id == g.user.id - ) - - # userSkillF = aliased(UserSkill) - # userSkillI = aliased(UserSkill) - - import pdb; pdb.set_trace() - for column in process_instance_report.report_metadata['columns']: - print(f"column: {column['accessor']}") - # process_instance_query = process_instance_query.outerjoin(ProcessInstanceMetadataModel, ProcessInstanceModel.id == ProcessInstanceMetadataModel.process_instance_id, ProcessInstanceMetadataModel.key == column['accessor']) - instance_metadata_alias = alias(ProcessInstanceMetadataModel) - process_instance_query = ( - process_instance_query.outerjoin(instance_metadata_alias, ProcessInstanceModel.id == instance_metadata_alias.process_instance_id) - .add_column(ProcessInstanceMetadataModel.value.label(column['accessor'])) - ) - import pdb; pdb.set_trace() - - process_instances = ( - process_instance_query.group_by(ProcessInstanceModel.id) - .order_by( - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - import pdb; pdb.set_trace() - - # def awesome_serialize(process_instance) - # dict_thing = process_instance.serialize - # - # # add columns since we have access to columns here - # dict_thing['awesome'] = 'awesome' - # - # return dict_thing - - results = list( - map( - ProcessInstanceService.serialize_flat_with_task_data, - process_instances.items, - ) - ) - report_metadata = process_instance_report.report_metadata - - response_json = { - "report_identifier": process_instance_report.identifier, - "report_metadata": report_metadata, - "results": results, - "filters": report_filter.to_dict(), - "pagination": { - "count": len(results), - "total": process_instances.total, - "pages": process_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def process_instance_show( - modified_process_model_identifier: str, process_instance_id: int -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - current_version_control_revision = GitService.get_current_revision() - process_model = get_process_model(process_model_identifier) - - if process_model.primary_file_name: - if ( - process_instance.bpmn_version_control_identifier - == current_version_control_revision - ): - bpmn_xml_file_contents = SpecFileService.get_data( - process_model, process_model.primary_file_name - ) - else: - bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( - process_model, process_instance.bpmn_version_control_identifier - ) - process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents - - return make_response(jsonify(process_instance), 200) - - -def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response: - """Create_process_instance.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - # (Pdb) db.session.delete - # > - db.session.query(SpiffLoggingModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.query(SpiffStepDetailsModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.delete(process_instance) - db.session.commit() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_list( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_instance_report_list.""" - process_instance_reports = ProcessInstanceReportModel.query.filter_by( - created_by_id=g.user.id, - ).all() - - return make_response(jsonify(process_instance_reports), 200) - - -def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: - """Process_instance_report_create.""" - ProcessInstanceReportModel.create_report( - identifier=body["identifier"], - user=g.user, - report_metadata=body["report_metadata"], - ) - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_update( - report_identifier: str, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - process_instance_report.report_metadata = body["report_metadata"] - db.session.commit() - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_delete( - report_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - db.session.delete(process_instance_report) - db.session.commit() - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def service_tasks_show() -> flask.wrappers.Response: - """Service_tasks_show.""" - available_connectors = ServiceTaskService.available_connectors() - print(available_connectors) - - return Response( - json.dumps(available_connectors), status=200, mimetype="application/json" - ) - - -def authentication_list() -> flask.wrappers.Response: - """Authentication_list.""" - available_authentications = ServiceTaskService.authentication_list() - response_json = { - "results": available_authentications, - "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], - "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", - } - - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def authentication_callback( - service: str, - auth_method: str, -) -> werkzeug.wrappers.Response: - """Authentication_callback.""" - verify_token(request.args.get("token"), force_run=True) - response = request.args["response"] - SecretService().update_secret( - f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True - ) - return redirect( - f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" - ) - - -def process_instance_report_show( - report_identifier: str, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id) - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate( - page=page, per_page=per_page, error_out=False - ) - - process_instance_report = ProcessInstanceReportModel.query.filter_by( - identifier=report_identifier, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report( - process_instances.items, substitution_variables - ) - - # update this if we go back to a database query instead of filtering in memory - result_dict["pagination"] = { - "count": len(result_dict["results"]), - "total": len(result_dict["results"]), - "pages": 1, - } - - return Response(json.dumps(result_dict), status=200, mimetype="application/json") - - -# TODO: see comment for before_request -# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) -def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_my_tasks.""" - principal = find_principal_or_raise() - active_tasks = ( - ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore - .join(ProcessInstanceModel) - .join(ActiveTaskUserModel) - .filter_by(user_id=principal.user_id) - # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. - .add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_model_display_name, - ProcessInstanceModel.status, - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.task_type, - ActiveTaskModel.task_status, - ActiveTaskModel.task_id, - ActiveTaskModel.id, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] - - response_json = { - "results": tasks, - "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def task_list_for_my_open_processes( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_my_open_processes.""" - return get_tasks(page=page, per_page=per_page) - - -def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_for_processes_started_by_others.""" - return get_tasks( - processes_started_by_user=False, - has_lane_assignment_id=False, - page=page, - per_page=per_page, - ) - - -def task_list_for_my_groups( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_processes_started_by_others.""" - return get_tasks(processes_started_by_user=False, page=page, per_page=per_page) - - -def get_tasks( - processes_started_by_user: bool = True, - has_lane_assignment_id: bool = True, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Get_tasks.""" - user_id = g.user.id - - # use distinct to ensure we only get one row per active task otherwise - # we can get back multiple for the same active task row which throws off - # pagination later on - # https://stackoverflow.com/q/34582014/6090676 - active_tasks_query = ( - ActiveTaskModel.query.distinct() - .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id) - .join(ProcessInstanceModel) - .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - ) - - if processes_started_by_user: - active_tasks_query = active_tasks_query.filter( - ProcessInstanceModel.process_initiator_id == user_id - ).outerjoin( - ActiveTaskUserModel, - and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, - ), - ) - else: - active_tasks_query = active_tasks_query.filter( - ProcessInstanceModel.process_initiator_id != user_id - ).join( - ActiveTaskUserModel, - and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, - ), - ) - if has_lane_assignment_id: - active_tasks_query = active_tasks_query.filter( - ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore - ) - else: - active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore - - active_tasks = active_tasks_query.add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - ProcessInstanceModel.updated_at_in_seconds, - ProcessInstanceModel.created_at_in_seconds, - UserModel.username, - GroupModel.identifier.label("group_identifier"), - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"), - ).paginate(page=page, per_page=per_page, error_out=False) - - response_json = { - "results": active_tasks.items, - "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def process_instance_task_list( - modified_process_model_id: str, - process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, -) -> flask.wrappers.Response: - """Process_instance_task_list.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if spiff_step > 0: - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) - if step_detail is not None and process_instance.bpmn_json is not None: - bpmn_json = json.loads(process_instance.bpmn_json) - bpmn_json["tasks"] = step_detail.task_json - process_instance.bpmn_json = json.dumps(bpmn_json) - - processor = ProcessInstanceProcessor(process_instance) - - spiff_tasks = None - if all_tasks: - spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - else: - spiff_tasks = processor.get_all_user_tasks() - - tasks = [] - for spiff_task in spiff_tasks: - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - tasks.append(task) - - return make_response(jsonify(tasks), 200) - - -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: - """Task_show.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if process_instance.status == ProcessInstanceStatus.suspended.value: - raise ApiError( - error_code="error_suspended", - message="The process instance is suspended", - status_code=400, - ) - - process_model = get_process_model( - process_instance.process_model_identifier, - ) - - form_schema_file_name = "" - form_ui_schema_file_name = "" - spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) - extensions = spiff_task.task_spec.extensions - - if "properties" in extensions: - properties = extensions["properties"] - if "formJsonSchemaFilename" in properties: - form_schema_file_name = properties["formJsonSchemaFilename"] - if "formUiSchemaFilename" in properties: - form_ui_schema_file_name = properties["formUiSchemaFilename"] - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - task.process_model_display_name = process_model.display_name - task.process_model_identifier = process_model.id - process_model_with_form = process_model - - if task.type == "User Task": - if not form_schema_file_name: - raise ( - ApiError( - error_code="missing_form_file", - message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", - status_code=400, - ) - ) - - form_contents = prepare_form_data( - form_schema_file_name, - task.data, - process_model_with_form, - ) - - try: - # form_contents is a str - form_dict = json.loads(form_contents) - except Exception as exception: - raise ( - ApiError( - error_code="error_loading_form", - message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", - status_code=400, - ) - ) from exception - - if task.data: - _update_form_schema_with_task_data_as_needed(form_dict, task.data) - - if form_contents: - task.form_schema = form_dict - - if form_ui_schema_file_name: - ui_form_contents = prepare_form_data( - form_ui_schema_file_name, - task.data, - process_model_with_form, - ) - if ui_form_contents: - task.form_ui_schema = ui_form_contents - - if task.properties and task.data and "instructionsForEndUser" in task.properties: - print( - f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}" - ) - if task.properties["instructionsForEndUser"]: - task.properties["instructionsForEndUser"] = render_jinja_template( - task.properties["instructionsForEndUser"], task.data - ) - return make_response(jsonify(task), 200) - - -def task_submit( - process_instance_id: int, - task_id: str, - body: Dict[str, Any], - terminate_loop: bool = False, -) -> flask.wrappers.Response: - """Task_submit_user_data.""" - principal = find_principal_or_raise() - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = get_spiff_task_from_process_instance( - task_id, process_instance, processor=processor - ) - AuthorizationService.assert_user_can_complete_spiff_task( - process_instance.id, spiff_task, principal.user - ) - - if spiff_task.state != TaskState.READY: - raise ( - ApiError( - error_code="invalid_state", - message="You may not update a task unless it is in the READY state.", - status_code=400, - ) - ) - - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - active_task = ActiveTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id - ).first() - if active_task is None: - raise ( - ApiError( - error_code="no_active_task", - message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.", - status_code=500, - ) - ) - - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - active_task=active_task, - ) - - # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same - # task spec, complete that form as well. - # if update_all: - # last_index = spiff_task.task_info()["mi_index"] - # next_task = processor.next_task() - # while next_task and next_task.task_info()["mi_index"] > last_index: - # __update_task(processor, next_task, form_data, user) - # last_index = next_task.task_info()["mi_index"] - # next_task = processor.next_task() - - next_active_task_assigned_to_me = ( - ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id) - .order_by(asc(ActiveTaskModel.id)) # type: ignore - .join(ActiveTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_active_task_assigned_to_me: - return make_response( - jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_create( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_create.""" - bpmn_task_identifier = _get_required_parameter_or_raise( - "bpmn_task_identifier", body - ) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] - if file is None: - raise ApiError( - error_code="cannot_find_file", - message=f"Could not find the primary bpmn file for process_model: {process_model.id}", - status_code=404, - ) - - # TODO: move this to an xml service or something - file_contents = SpecFileService.get_data(process_model, file.name) - bpmn_etree_element = etree.fromstring(file_contents) - - nsmap = bpmn_etree_element.nsmap - spiff_element_maker = ElementMaker( - namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap - ) - - script_task_elements = bpmn_etree_element.xpath( - f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(script_task_elements) == 0: - raise ApiError( - error_code="missing_script_task", - message=f"Cannot find a script task with id: {bpmn_task_identifier}", - status_code=404, - ) - script_task_element = script_task_elements[0] - - extension_elements = None - extension_elements_array = script_task_element.xpath( - "//bpmn:extensionElements", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(extension_elements_array) == 0: - bpmn_element_maker = ElementMaker( - namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap - ) - extension_elements = bpmn_element_maker("extensionElements") - script_task_element.append(extension_elements) - else: - extension_elements = extension_elements_array[0] - - unit_test_elements = None - unit_test_elements_array = extension_elements.xpath( - "//spiffworkflow:unitTests", - namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, - ) - if len(unit_test_elements_array) == 0: - unit_test_elements = spiff_element_maker("unitTests") - extension_elements.append(unit_test_elements) - else: - unit_test_elements = unit_test_elements_array[0] - - fuzz = "".join( - random.choice(string.ascii_uppercase + string.digits) # noqa: S311 - for _ in range(7) - ) - unit_test_id = f"unit_test_{fuzz}" - - input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) - expected_output_json_element = spiff_element_maker( - "expectedOutputJson", json.dumps(expected_output_json) - ) - unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) - unit_test_element.append(input_json_element) - unit_test_element.append(expected_output_json_element) - unit_test_elements.append(unit_test_element) - SpecFileService.update_file( - process_model, file.name, etree.tostring(bpmn_etree_element) - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_run( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_run.""" - # FIXME: We should probably clear this somewhere else but this works - current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None - - python_script = _get_required_parameter_or_raise("python_script", body) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( - python_script, input_json, expected_output_json - ) - return make_response(jsonify(result), 200) - - -def get_file_from_request() -> Any: - """Get_file_from_request.""" - request_file = connexion.request.files.get("file") - if not request_file: - raise ApiError( - error_code="no_file_given", - message="Given request does not contain a file", - status_code=400, - ) - return request_file - - -def get_process_model(process_model_id: str) -> ProcessModelInfo: - """Get_process_model.""" - process_model = None - try: - process_model = ProcessModelService.get_process_model(process_model_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_cannot_be_found", - message=f"Process model cannot be found: {process_model_id}", - status_code=400, - ) - ) from exception - - return process_model - - -def find_principal_or_raise() -> PrincipalModel: - """Find_principal_or_raise.""" - principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() - if principal is None: - raise ( - ApiError( - error_code="principal_not_found", - message=f"Principal not found from user id: {g.user.id}", - status_code=400, - ) - ) - return principal # type: ignore - - -def find_process_instance_by_id_or_raise( - process_instance_id: int, -) -> ProcessInstanceModel: - """Find_process_instance_by_id_or_raise.""" - process_instance_query = ProcessInstanceModel.query.filter_by( - id=process_instance_id - ) - - # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: - # this returns an object that allows you to do: process_instance.UserModel.username - # process_instance = db.session.query(ProcessInstanceModel, UserModel).filter_by(id=process_instance_id).first() - # you can also use splat with add_columns, but it still didn't ultimately give us access to the process instance - # attributes or username like we wanted: - # process_instance_query.join(UserModel).add_columns(*ProcessInstanceModel.__table__.columns, UserModel.username) - - process_instance = process_instance_query.first() - if process_instance is None: - raise ( - ApiError( - error_code="process_instance_cannot_be_found", - message=f"Process instance cannot be found: {process_instance_id}", - status_code=400, - ) - ) - return process_instance # type: ignore - - -def get_value_from_array_with_index(array: list, index: int) -> Any: - """Get_value_from_array_with_index.""" - if index < 0: - return None - - if index >= len(array): - return None - - return array[index] - - -def prepare_form_data( - form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo -) -> str: - """Prepare_form_data.""" - if task_data is None: - return "" - - file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") - return render_jinja_template(file_contents, task_data) - - -def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: - """Render_jinja_template.""" - jinja_environment = jinja2.Environment( - autoescape=True, lstrip_blocks=True, trim_blocks=True - ) - template = jinja_environment.from_string(unprocessed_template) - return template.render(**data) - - -def get_spiff_task_from_process_instance( - task_id: str, - process_instance: ProcessInstanceModel, - processor: Union[ProcessInstanceProcessor, None] = None, -) -> SpiffTask: - """Get_spiff_task_from_process_instance.""" - if processor is None: - processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) - spiff_task = processor.bpmn_process_instance.get_task(task_uuid) - - if spiff_task is None: - raise ( - ApiError( - error_code="empty_task", - message="Processor failed to obtain task.", - status_code=500, - ) - ) - return spiff_task - - -# -# Methods for secrets CRUD - maybe move somewhere else: -# -def get_secret(key: str) -> Optional[str]: - """Get_secret.""" - return SecretService.get_secret(key) - - -def secret_list( - page: int = 1, - per_page: int = 100, -) -> Response: - """Secret_list.""" - secrets = ( - SecretModel.query.order_by(SecretModel.key) - .join(UserModel) - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - response_json = { - "results": secrets.items, - "pagination": { - "count": len(secrets.items), - "total": secrets.total, - "pages": secrets.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def add_secret(body: Dict) -> Response: - """Add secret.""" - secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) - assert secret_model # noqa: S101 - return Response( - json.dumps(SecretModelSchema().dump(secret_model)), - status=201, - mimetype="application/json", - ) - - -def update_secret(key: str, body: dict) -> Response: - """Update secret.""" - SecretService().update_secret(key, body["value"], g.user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def delete_secret(key: str) -> Response: - """Delete secret.""" - current_user = UserService.current_user() - SecretService.delete_secret(key, current_user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: - """Get_required_parameter_or_raise.""" - return_value = None - if parameter in post_body: - return_value = post_body[parameter] - - if return_value is None or return_value == "": - raise ( - ApiError( - error_code="missing_required_parameter", - message=f"Parameter is missing from json request body: {parameter}", - status_code=400, - ) - ) - - return return_value - - -# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches -def _update_form_schema_with_task_data_as_needed( - in_dict: dict, task_data: dict -) -> None: - """Update_nested.""" - for k, value in in_dict.items(): - if "anyOf" == k: - # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] - if isinstance(value, list): - if len(value) == 1: - first_element_in_value_list = value[0] - if isinstance(first_element_in_value_list, str): - if first_element_in_value_list.startswith( - "options_from_task_data_var:" - ): - task_data_var = first_element_in_value_list.replace( - "options_from_task_data_var:", "" - ) - - if task_data_var not in task_data: - raise ( - ApiError( - error_code="missing_task_data_var", - message=f"Task data is missing variable: {task_data_var}", - status_code=500, - ) - ) - - select_options_from_task_data = task_data.get(task_data_var) - if isinstance(select_options_from_task_data, list): - if all( - "value" in d and "label" in d - for d in select_options_from_task_data - ): - - def map_function( - task_data_select_option: TaskDataSelectOption, - ) -> ReactJsonSchemaSelectOption: - """Map_function.""" - return { - "type": "string", - "enum": [task_data_select_option["value"]], - "title": task_data_select_option["label"], - } - - options_for_react_json_schema_form = list( - map(map_function, select_options_from_task_data) - ) - - in_dict[k] = options_for_react_json_schema_form - elif isinstance(value, dict): - _update_form_schema_with_task_data_as_needed(value, task_data) - elif isinstance(value, list): - for o in value: - if isinstance(o, dict): - _update_form_schema_with_task_data_as_needed(o, task_data) - - -def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() - if process_instance: - process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - if task_id in process_instance_bpmn_json_dict["tasks"]: - process_instance_bpmn_json_dict["tasks"][task_id][ - "data" - ] = new_task_data_dict - process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) - db.session.add(process_instance) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) From abffccedbd17f09d49a048f4003bf0f35408d6e5 Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 30 Nov 2022 15:08:04 -0500 Subject: [PATCH 16/40] added correlations to message list table w/ burnettk --- spiffworkflow-backend/migrations/env.py | 2 - spiffworkflow-backend/poetry.lock | 19 +--- .../routes/process_api_blueprint.py | 7 +- .../process_instance_report_service.py | 1 + .../integration/test_process_api.py | 17 +++- .../src/components/MiniComponents.tsx | 22 +++++ .../components/ProcessInstanceListTable.tsx | 19 +--- spiffworkflow-frontend/src/index.css | 14 +++ spiffworkflow-frontend/src/interfaces.ts | 22 +++++ .../src/routes/MessageInstanceList.tsx | 92 ++++++++++++------- 10 files changed, 145 insertions(+), 70 deletions(-) create mode 100644 spiffworkflow-frontend/src/components/MiniComponents.tsx diff --git a/spiffworkflow-backend/migrations/env.py b/spiffworkflow-backend/migrations/env.py index 68feded2..630e381a 100644 --- a/spiffworkflow-backend/migrations/env.py +++ b/spiffworkflow-backend/migrations/env.py @@ -1,5 +1,3 @@ -from __future__ import with_statement - import logging from logging.config import fileConfig diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 8484cdb4..ac024241 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1851,7 +1851,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "46f410a2852baeedc8f9ac5165347ce6d4470594" +resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4" [[package]] name = "SQLAlchemy" @@ -2563,7 +2563,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, - {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2572,7 +2571,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, - {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2581,7 +2579,6 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, - {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, @@ -2880,7 +2877,10 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, + {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, + {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, + {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, @@ -2989,18 +2989,7 @@ psycopg2 = [ {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, ] pyasn1 = [ - {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, - {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, - {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, - {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, - {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, - {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, - {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, - {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, - {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, - {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index b96cc262..46067031 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -647,6 +647,7 @@ def message_instance_list( .add_columns( MessageModel.identifier.label("message_identifier"), ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, ) .paginate(page=page, per_page=per_page, error_out=False) ) @@ -978,10 +979,12 @@ def process_instance_list( def process_instance_report_column_list() -> flask.wrappers.Response: - + """Process_instance_report_column_list.""" table_columns = ProcessInstanceReportService.builtin_column_options() columns_for_metadata = db.session.query(ProcessInstanceMetadataModel.key).distinct().all() # type: ignore - columns_for_metadata_strings = [{ 'Header': i[0], 'accessor': i[0]} for i in columns_for_metadata] + columns_for_metadata_strings = [ + {"Header": i[0], "accessor": i[0]} for i in columns_for_metadata + ] # columns = sorted(table_columns + columns_for_metadata_strings) return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index da70f0c0..bd3a2e08 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -271,6 +271,7 @@ class ProcessInstanceReportService: @classmethod def builtin_column_options(cls) -> list[dict]: + """Builtin_column_options.""" return [ {"Header": "id", "accessor": "id"}, { diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index beef3b74..215e44d4 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2630,11 +2630,22 @@ class TestProcessApi(BaseTest): assert len(process_instance_metadata) == 2 response = client.get( - f"/v1.0/process-instances/reports/columns", + "/v1.0/process-instances/reports/columns", headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None assert response.status_code == 200 - assert response.json == [{'Header': 'id', 'accessor': 'id'}, {'Header': 'process_model_display_name', 'accessor': 'process_model_display_name'}, {'Header': 'start_in_seconds', 'accessor': 'start_in_seconds'}, {'Header': 'end_in_seconds', 'accessor': 'end_in_seconds'}, {'Header': 'username', 'accessor': 'username'}, {'Header': 'status', 'accessor': 'status'}, {'Header': 'key1', 'accessor': 'key1'}, {'Header': 'key2', 'accessor': 'key2'}] - + assert response.json == [ + {"Header": "id", "accessor": "id"}, + { + "Header": "process_model_display_name", + "accessor": "process_model_display_name", + }, + {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, + {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, + {"Header": "username", "accessor": "username"}, + {"Header": "status", "accessor": "status"}, + {"Header": "key1", "accessor": "key1"}, + {"Header": "key2", "accessor": "key2"}, + ] diff --git a/spiffworkflow-frontend/src/components/MiniComponents.tsx b/spiffworkflow-frontend/src/components/MiniComponents.tsx new file mode 100644 index 00000000..6f0a1293 --- /dev/null +++ b/spiffworkflow-frontend/src/components/MiniComponents.tsx @@ -0,0 +1,22 @@ +import { Link } from 'react-router-dom'; +import { modifyProcessIdentifierForPathParam } from '../helpers'; +import { MessageInstance, ProcessInstance } from '../interfaces'; + +export function FormatProcessModelDisplayName( + instanceObject: ProcessInstance | MessageInstance +) { + const { + process_model_identifier: processModelIdentifier, + process_model_display_name: processModelDisplayName, + } = instanceObject; + return ( + + {processModelDisplayName} + + ); +} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 9b239502..50b69c0b 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -53,6 +53,7 @@ import { import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport'; +import { FormatProcessModelDisplayName } from './MiniComponents'; const REFRESH_INTERVAL = 5; const REFRESH_TIMEOUT = 600; @@ -693,22 +694,6 @@ export default function ProcessInstanceListTable({ ); }; - const formatProcessModelDisplayName = ( - row: ProcessInstance, - displayName: string - ) => { - return ( - - {displayName} - - ); - }; - const formatSecondsForDisplay = (_row: any, seconds: any) => { return convertSecondsToFormattedDateTime(seconds) || '-'; }; @@ -719,7 +704,7 @@ export default function ProcessInstanceListTable({ const columnFormatters: Record = { id: formatProcessInstanceId, process_model_identifier: formatProcessModelIdentifier, - process_model_display_name: formatProcessModelDisplayName, + process_model_display_name: FormatProcessModelDisplayName, start_in_seconds: formatSecondsForDisplay, end_in_seconds: formatSecondsForDisplay, }; diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 4723e557..53e04b78 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -69,6 +69,20 @@ h2 { color: black; } +/* match normal link colors */ +.cds--btn--ghost.button-link { + color: #0062fe; +} +.cds--btn--ghost.button-link:visited { + color: #0062fe; +} +.cds--btn--ghost.button-link:hover { + color: #0062fe; +} +.cds--btn--ghost.button-link:visited:hover { + color: #0062fe; +} + .cds--header__global .cds--btn--primary { background-color: #161616 } diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 42ba5335..66759dfe 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -39,6 +39,28 @@ export interface ProcessFile { export interface ProcessInstance { id: number; process_model_identifier: string; + process_model_display_name: string; +} + +export interface MessageCorrelationProperties { + [key: string]: string; +} + +export interface MessageCorrelations { + [key: string]: MessageCorrelationProperties; +} + +export interface MessageInstance { + id: number; + process_model_identifier: string; + process_model_display_name: string; + process_instance_id: number; + message_identifier: string; + message_type: string; + failure_cause: string; + status: string; + created_at_in_seconds: number; + message_correlations?: MessageCorrelations; } export interface ProcessInstanceReport { diff --git a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx index f1478058..5a2d4e1a 100644 --- a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx @@ -1,15 +1,17 @@ import { useEffect, useState } from 'react'; // @ts-ignore -import { Table } from '@carbon/react'; +import { Table, Modal, Button } from '@carbon/react'; import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import { - convertSecondsToFormattedDateString, + convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, modifyProcessIdentifierForPathParam, } from '../helpers'; import HttpService from '../services/HttpService'; +import { FormatProcessModelDisplayName } from '../components/MiniComponents'; +import { MessageInstance } from '../interfaces'; export default function MessageInstanceList() { const params = useParams(); @@ -17,6 +19,9 @@ export default function MessageInstanceList() { const [messageIntances, setMessageInstances] = useState([]); const [pagination, setPagination] = useState(null); + const [messageInstanceForModal, setMessageInstanceForModal] = + useState(null); + useEffect(() => { const setMessageInstanceListFromResult = (result: any) => { setMessageInstances(result.results); @@ -35,41 +40,64 @@ export default function MessageInstanceList() { }); }, [searchParams, params]); - const buildTable = () => { - // return null; - const rows = messageIntances.map((row) => { - const rowToUse = row as any; + const handleCorrelationDisplayClose = () => { + setMessageInstanceForModal(null); + }; + + const correlationsDisplayModal = () => { + if (messageInstanceForModal) { return ( - - {rowToUse.id} - - - {rowToUse.process_model_identifier} - - + +
+            {JSON.stringify(
+              messageInstanceForModal.message_correlations,
+              null,
+              2
+            )}
+          
+
+ ); + } + return null; + }; + + const buildTable = () => { + const rows = messageIntances.map((row: MessageInstance) => { + return ( + + {row.id} + {FormatProcessModelDisplayName(row)} - {rowToUse.process_instance_id} + {row.process_instance_id} - {rowToUse.message_identifier} - {rowToUse.message_type} - {rowToUse.failure_cause || '-'} - {rowToUse.status} + {row.message_identifier} + {row.message_type} + {row.failure_cause || '-'} - {convertSecondsToFormattedDateString( - rowToUse.created_at_in_seconds - )} + + + {row.status} + + {convertSecondsToFormattedDateTime(row.created_at_in_seconds)} ); @@ -78,12 +106,13 @@ export default function MessageInstanceList() { - - + + - + + @@ -121,6 +150,7 @@ export default function MessageInstanceList() { <> {breadcrumbElement}

Messages

+ {correlationsDisplayModal()} Date: Wed, 30 Nov 2022 15:35:37 -0500 Subject: [PATCH 17/40] better display for failure causes on message list w/ burnettk --- spiffworkflow-frontend/src/index.css | 12 +++++++ .../src/routes/MessageInstanceList.tsx | 34 ++++++++++++++++--- 2 files changed, 42 insertions(+), 4 deletions(-) diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 53e04b78..ade073f5 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -72,15 +72,19 @@ h2 { /* match normal link colors */ .cds--btn--ghost.button-link { color: #0062fe; + padding-left: 0; } .cds--btn--ghost.button-link:visited { color: #0062fe; + padding-left: 0; } .cds--btn--ghost.button-link:hover { color: #0062fe; + padding-left: 0; } .cds--btn--ghost.button-link:visited:hover { color: #0062fe; + padding-left: 0; } .cds--header__global .cds--btn--primary { @@ -311,3 +315,11 @@ td.actions-cell { text-align: right; padding-bottom: 10px; } + +.cds--btn--ghost:not([disabled]) svg { + fill: red; +} + +.failure-string { + color: red; +} diff --git a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx index 5a2d4e1a..b77b744c 100644 --- a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx @@ -1,5 +1,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore +import { ErrorOutline } from '@carbon/icons-react'; +// @ts-ignore import { Table, Modal, Button } from '@carbon/react'; import { Link, useParams, useSearchParams } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; @@ -46,14 +48,27 @@ export default function MessageInstanceList() { const correlationsDisplayModal = () => { if (messageInstanceForModal) { + let failureCausePre = null; + if (messageInstanceForModal.failure_cause) { + failureCausePre = ( + <> +

+ {messageInstanceForModal.failure_cause} +

+
+ + ); + } return ( + {failureCausePre} +

Correlations:

             {JSON.stringify(
               messageInstanceForModal.message_correlations,
@@ -69,6 +84,17 @@ export default function MessageInstanceList() {
 
   const buildTable = () => {
     const rows = messageIntances.map((row: MessageInstance) => {
+      let errorIcon = null;
+      let errorTitle = null;
+      if (row.failure_cause) {
+        errorTitle = 'Instance has an error';
+        errorIcon = (
+          <>
+             
+            
+          
+        );
+      }
       return (
         
@@ -85,14 +111,15 @@ export default function MessageInstanceList() { - @@ -111,8 +138,7 @@ export default function MessageInstanceList() { - - + From 98c775db8edcc3d632eaf4867d5154c3a63aeebe Mon Sep 17 00:00:00 2001 From: jasquat Date: Wed, 30 Nov 2022 17:20:34 -0500 Subject: [PATCH 18/40] some updates for process instance reports and metadata w/ burnettk --- .../process_instance_report_service.py | 48 +----- .../src/components/ProcessGroupForm.tsx | 1 - .../ProcessInstanceListSaveAsReport.tsx | 32 ++-- .../components/ProcessInstanceListTable.tsx | 156 ++++++++++++------ spiffworkflow-frontend/src/index.css | 13 +- spiffworkflow-frontend/src/interfaces.ts | 5 + 6 files changed, 145 insertions(+), 110 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index bd3a2e08..6397cc20 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -76,17 +76,7 @@ class ProcessInstanceReportService: # TODO replace with system reports that are loaded on launch (or similar) temp_system_metadata_map = { "default": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - ], + "columns": cls.builtin_column_options() }, "system_report_instances_initiated_by_me": { "columns": [ @@ -102,33 +92,13 @@ class ProcessInstanceReportService: "filter_by": [{"field_name": "initiated_by_me", "field_value": True}], }, "system_report_instances_with_tasks_completed_by_me": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - ], + "columns": cls.builtin_column_options(), "filter_by": [ {"field_name": "with_tasks_completed_by_me", "field_value": True} ], }, "system_report_instances_with_tasks_completed_by_my_groups": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - ], + "columns": cls.builtin_column_options(), "filter_by": [ { "field_name": "with_tasks_completed_by_my_group", @@ -273,13 +243,13 @@ class ProcessInstanceReportService: def builtin_column_options(cls) -> list[dict]: """Builtin_column_options.""" return [ - {"Header": "id", "accessor": "id"}, + {"Header": "Id", "accessor": "id"}, { - "Header": "process_model_display_name", + "Header": "Process", "accessor": "process_model_display_name", }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, + {"Header": "Start", "accessor": "start_in_seconds"}, + {"Header": "End", "accessor": "end_in_seconds"}, + {"Header": "Username", "accessor": "username"}, + {"Header": "Status", "accessor": "status"}, ] diff --git a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx index a518e47b..79ab8253 100644 --- a/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessGroupForm.tsx @@ -115,7 +115,6 @@ export default function ProcessGroupForm({ labelText="Display Name*" value={processGroup.display_name} onChange={(event: any) => onDisplayNameChanged(event.target.value)} - onBlur={(event: any) => console.log('event', event)} />, ]; diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index 6c8f5fb9..d70aab3e 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -1,12 +1,11 @@ import { useState } from 'react'; -// TODO: carbon controls -/* import { Button, - Textbox, + TextInput, + Form, + Stack, // @ts-ignore } from '@carbon/react'; -*/ import { ProcessModel } from '../interfaces'; import HttpService from '../services/HttpService'; @@ -112,20 +111,21 @@ export default function ProcessInstanceListSaveAsReport({ }; return ( -
- - - + + + ); } diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 50b69c0b..ebf6a446 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -7,7 +7,7 @@ import { } from 'react-router-dom'; // @ts-ignore -import { Filter } from '@carbon/icons-react'; +import { Filter, Close } from '@carbon/icons-react'; import { Button, ButtonSet, @@ -21,6 +21,7 @@ import { TableHead, TableRow, TimePicker, + Tag, // @ts-ignore } from '@carbon/react'; import { PROCESS_STATUSES, DATE_FORMAT, DATE_FORMAT_CARBON } from '../config'; @@ -49,6 +50,7 @@ import { ProcessModel, ProcessInstanceReport, ProcessInstance, + ReportColumn, } from '../interfaces'; import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; @@ -127,6 +129,10 @@ export default function ProcessInstanceListTable({ const [processInstanceReportSelection, setProcessInstanceReportSelection] = useState(null); + const [availableReportColumns, setAvailableReportColumns] = useState< + ReportColumn[] + >([]); + const dateParametersToAlwaysFilterBy: dateParameters = useMemo(() => { return { start_from: [setStartFromDate, setStartFromTime], @@ -554,12 +560,99 @@ export default function ProcessInstanceListTable({ setEndToTime(''); }; + const processInstanceReportDidChange = (selection: any) => { + clearFilters(); + + const selectedReport = selection.selectedItem; + setProcessInstanceReportSelection(selectedReport); + + const queryParamString = selectedReport + ? `&report_identifier=${selectedReport.id}` + : ''; + + setErrorMessage(null); + navigate(`/admin/process-instances?${queryParamString}`); + }; + + const reportColumns = () => { + return (reportMetadata as any).columns; + }; + + const saveAsReportComponent = () => { + // TODO onSuccess reload/select the new report in the report search + const callback = (identifier: string) => { + processInstanceReportDidChange({ + selectedItem: { id: identifier, display_name: identifier }, + }); + }; + const { + valid, + startFromSeconds, + startToSeconds, + endFromSeconds, + endToSeconds, + } = calculateStartAndEndSeconds(); + + if (!valid) { + return null; + } + return ( + + ); + }; + + const columnSelections = () => { + if (reportColumns()) { + const tags: any = []; + + (reportColumns() as any).forEach((reportColumn: ReportColumn) => { + tags.push( + + + diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index ebf6a446..548418b1 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -7,7 +7,7 @@ import { } from 'react-router-dom'; // @ts-ignore -import { Filter, Close } from '@carbon/icons-react'; +import { Filter, Close, AddAlt, AddFilled } from '@carbon/icons-react'; import { Button, ButtonSet, @@ -22,6 +22,11 @@ import { TableRow, TimePicker, Tag, + InlineNotification, + Stack, + Modal, + ComboBox, + TextInput, // @ts-ignore } from '@carbon/react'; import { PROCESS_STATUSES, DATE_FORMAT, DATE_FORMAT_CARBON } from '../config'; @@ -88,7 +93,7 @@ export default function ProcessInstanceListTable({ autoReload = false, }: OwnProps) { const params = useParams(); - const [searchParams] = useSearchParams(); + const [searchParams, setSearchParams] = useSearchParams(); const navigate = useNavigate(); const [processInstances, setProcessInstances] = useState([]); @@ -132,6 +137,12 @@ export default function ProcessInstanceListTable({ const [availableReportColumns, setAvailableReportColumns] = useState< ReportColumn[] >([]); + const [processInstanceReportJustSaved, setProcessInstanceReportJustSaved] = + useState(false); + const [showColumnForm, setShowColumnForm] = useState(false); + const [reportColumnToOperateOn, setReportColumnToOperateOn] = + useState(null); + const [columnFormMode, setColumnFormMode] = useState(''); const dateParametersToAlwaysFilterBy: dateParameters = useMemo(() => { return { @@ -357,6 +368,23 @@ export default function ProcessInstanceListTable({ processModelAvailableItems, ]); + const processInstanceReportSaveTag = () => { + if (processInstanceReportJustSaved) { + return ( + + ); + } + return null; + }; + // does the comparison, but also returns false if either argument // is not truthy and therefore not comparable. const isTrueComparison = (param1: any, operation: any, param2: any) => { @@ -473,6 +501,7 @@ export default function ProcessInstanceListTable({ } setErrorMessage(null); + setProcessInstanceReportJustSaved(false); navigate(`/admin/process-instances?${queryParamString}`); }; @@ -560,17 +589,22 @@ export default function ProcessInstanceListTable({ setEndToTime(''); }; - const processInstanceReportDidChange = (selection: any) => { + const processInstanceReportDidChange = ( + selection: any, + savedReport: boolean = false + ) => { clearFilters(); const selectedReport = selection.selectedItem; setProcessInstanceReportSelection(selectedReport); - const queryParamString = selectedReport - ? `&report_identifier=${selectedReport.id}` - : ''; + let queryParamString = ''; + if (selectedReport) { + queryParamString = `&report_identifier=${selectedReport.id}`; + } setErrorMessage(null); + setProcessInstanceReportJustSaved(savedReport); navigate(`/admin/process-instances?${queryParamString}`); }; @@ -578,12 +612,21 @@ export default function ProcessInstanceListTable({ return (reportMetadata as any).columns; }; + const reportColumnAccessors = () => { + return reportColumns().map((reportColumn: ReportColumn) => { + return reportColumn.accessor; + }); + }; + const saveAsReportComponent = () => { // TODO onSuccess reload/select the new report in the report search const callback = (identifier: string) => { - processInstanceReportDidChange({ - selectedItem: { id: identifier, display_name: identifier }, - }); + processInstanceReportDidChange( + { + selectedItem: { id: identifier, display_name: identifier }, + }, + true + ); }; const { valid, @@ -611,18 +654,146 @@ export default function ProcessInstanceListTable({ ); }; + const removeColumn = (reportColumn: ReportColumn) => { + const reportMetadataCopy = { ...reportMetadata }; + const newColumns = reportColumns().filter( + (rc: ReportColumn) => rc.accessor !== reportColumn.accessor + ); + Object.assign(reportMetadataCopy, { columns: newColumns }); + setReportMetadata(reportMetadataCopy); + }; + + const handleColumnFormClose = () => { + setShowColumnForm(false); + setColumnFormMode(''); + setReportColumnToOperateOn(null); + }; + + const handleUpdateColumn = () => { + if (reportColumnToOperateOn) { + const reportMetadataCopy = { ...reportMetadata }; + let newReportColumns = null; + if (columnFormMode === 'new') { + newReportColumns = reportColumns().concat([reportColumnToOperateOn]); + } else { + newReportColumns = reportColumns().map((rc: ReportColumn) => { + if (rc.accessor === reportColumnToOperateOn.accessor) { + return reportColumnToOperateOn; + } + return rc; + }); + } + Object.assign(reportMetadataCopy, { + columns: newReportColumns, + }); + setReportMetadata(reportMetadataCopy); + setReportColumnToOperateOn(null); + setShowColumnForm(false); + setShowColumnForm(false); + } + }; + + const updateReportColumn = (event: any) => { + setReportColumnToOperateOn(event.selectedItem); + }; + + // options includes item and inputValue + const shouldFilterReportColumn = (options: any) => { + const reportColumn: ReportColumn = options.item; + const { inputValue } = options; + return ( + !reportColumnAccessors().includes(reportColumn.accessor) && + (reportColumn.accessor || '') + .toLowerCase() + .includes((inputValue || '').toLowerCase()) + ); + }; + + const columnForm = () => { + if (columnFormMode === '') { + return null; + } + const formElements = [ + { + if (reportColumnToOperateOn) { + const reportColumnToOperateOnCopy = { + ...reportColumnToOperateOn, + }; + reportColumnToOperateOnCopy.Header = event.target.value; + setReportColumnToOperateOn(reportColumnToOperateOnCopy); + } + }} + />, + ]; + if (columnFormMode === 'new') { + formElements.push( + { + if (reportColumn) { + return reportColumn.accessor; + } + return null; + }} + shouldFilterItem={shouldFilterReportColumn} + placeholder="Choose a report column" + titleText="Report Column" + /> + ); + } + const modalHeading = + columnFormMode === 'new' + ? 'Add Column' + : `Edit ${ + reportColumnToOperateOn ? reportColumnToOperateOn.accessor : '' + } column`; + return ( + + {formElements} + + ); + }; + const columnSelections = () => { if (reportColumns()) { const tags: any = []; (reportColumns() as any).forEach((reportColumn: ReportColumn) => { + let tagType = 'cool-gray'; + if (reportColumn.filterable) { + tagType = 'green'; + } tags.push( - + @@ -634,12 +805,29 @@ export default function ProcessInstanceListTable({ hasIconOnly size="sm" kind="ghost" - onClick={toggleShowFilterOptions} + onClick={() => removeColumn(reportColumn)} /> ); }); - return tags; + return ( + + {tags} + diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 250eb6a0..cb782c03 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -7,7 +7,7 @@ import { } from 'react-router-dom'; // @ts-ignore -import { Filter, Close, AddAlt, AddFilled } from '@carbon/icons-react'; +import { Filter, Close, AddAlt } from '@carbon/icons-react'; import { Button, ButtonSet, @@ -27,6 +27,7 @@ import { Modal, ComboBox, TextInput, + FormLabel, // @ts-ignore } from '@carbon/react'; import { PROCESS_STATUSES, DATE_FORMAT, DATE_FORMAT_CARBON } from '../config'; @@ -93,7 +94,7 @@ export default function ProcessInstanceListTable({ autoReload = false, }: OwnProps) { const params = useParams(); - const [searchParams, setSearchParams] = useSearchParams(); + const [searchParams] = useSearchParams(); const navigate = useNavigate(); const [processInstances, setProcessInstances] = useState([]); @@ -175,16 +176,12 @@ export default function ProcessInstanceListTable({ function setProcessInstancesFromResult(result: any) { const processInstancesFromApi = result.results; setProcessInstances(processInstancesFromApi); - setReportMetadata(result.report_metadata); setPagination(result.pagination); setProcessInstanceFilters(result.filters); - // TODO: need to iron out this interaction some more - if (result.report_identifier !== 'default') { - setProcessInstanceReportSelection({ - id: result.report_identifier, - display_name: result.report_identifier, - }); + setReportMetadata(result.report.report_metadata); + if (result.report.id) { + setProcessInstanceReportSelection(result.report); } } function getProcessInstances() { @@ -206,14 +203,10 @@ export default function ProcessInstanceListTable({ queryParamString += `&user_filter=${userAppliedFilter}`; } - let reportIdentifierToUse: any = reportIdentifier; - - if (!reportIdentifierToUse) { - reportIdentifierToUse = searchParams.get('report_identifier'); - } - - if (reportIdentifierToUse) { - queryParamString += `&report_identifier=${reportIdentifierToUse}`; + if (searchParams.get('report_id')) { + queryParamString += `&report_id=${searchParams.get('report_id')}`; + } else if (reportIdentifier) { + queryParamString += `&report_identifier=${reportIdentifier}`; } Object.keys(dateParametersToAlwaysFilterBy).forEach( @@ -376,7 +369,7 @@ export default function ProcessInstanceListTable({ title="Perspective Saved" subtitle={`as '${ processInstanceReportSelection - ? processInstanceReportSelection.display_name + ? processInstanceReportSelection.identifier : '' }'`} kind="success" @@ -498,7 +491,7 @@ export default function ProcessInstanceListTable({ } if (processInstanceReportSelection) { - queryParamString += `&report_identifier=${processInstanceReportSelection.id}`; + queryParamString += `&report_id=${processInstanceReportSelection.id}`; } setErrorMessage(null); @@ -595,18 +588,17 @@ export default function ProcessInstanceListTable({ savedReport: boolean = false ) => { clearFilters(); - const selectedReport = selection.selectedItem; setProcessInstanceReportSelection(selectedReport); let queryParamString = ''; if (selectedReport) { - queryParamString = `&report_identifier=${selectedReport.id}`; + queryParamString = `?report_id=${selectedReport.id}`; } setErrorMessage(null); setProcessInstanceReportJustSaved(savedReport); - navigate(`/admin/process-instances?${queryParamString}`); + navigate(`/admin/process-instances${queryParamString}`); }; const reportColumns = () => { @@ -619,16 +611,17 @@ export default function ProcessInstanceListTable({ }); }; + // TODO onSuccess reload/select the new report in the report search + const onSaveReportSuccess = (result: any) => { + processInstanceReportDidChange( + { + selectedItem: result, + }, + true + ); + }; + const saveAsReportComponent = () => { - // TODO onSuccess reload/select the new report in the report search - const callback = (identifier: string) => { - processInstanceReportDidChange( - { - selectedItem: { id: identifier, display_name: identifier }, - }, - true - ); - }; const { valid, startFromSeconds, @@ -642,9 +635,10 @@ export default function ProcessInstanceListTable({ } return ( { + if (reportColumnToOperateOn) { + const reportColumnToOperateOnCopy = { + ...reportColumnToOperateOn, + }; + reportColumnToOperateOnCopy.condition_value = event.target.value; + setReportColumnToOperateOn(reportColumnToOperateOnCopy); + } + }; + const reportColumnForm = () => { if (reportColumnFormMode === '') { return null; @@ -732,6 +736,22 @@ export default function ProcessInstanceListTable({ }} />, ]; + if (reportColumnToOperateOn && reportColumnToOperateOn.filterable) { + console.log('reportColumnToOperateOn', reportColumnToOperateOn); + formElements.push( + + ); + } if (reportColumnFormMode === 'new') { formElements.push( + - - - {saveAsReportComponent()} - - ); }; @@ -1079,7 +1148,11 @@ export default function ProcessInstanceListTable({ /> , ]; - if (processInstanceReportSelection && showFilterOptions) { + if ( + processInstanceReportSelection && + showFilterOptions && + reportMetadata + ) { columns.push( { - return `${truncateString(processInstanceReport.identifier, 20)} (${ + return `${truncateString(processInstanceReport.identifier, 20)} (Id: ${ processInstanceReport.id })`; }; diff --git a/spiffworkflow-frontend/src/config.tsx b/spiffworkflow-frontend/src/config.tsx index 5e7e96fe..b0816a39 100644 --- a/spiffworkflow-frontend/src/config.tsx +++ b/spiffworkflow-frontend/src/config.tsx @@ -14,6 +14,7 @@ export const PROCESS_STATUSES = [ 'complete', 'error', 'suspended', + 'terminated', ]; // with time: yyyy-MM-dd HH:mm:ss diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 1c708fe1..6b02ea35 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -346,3 +346,10 @@ td.actions-cell { .combo-box-in-modal { height: 300px; } + +.cds--btn.narrow-button { + max-width: 10rem; + min-width: 5rem; + word-break: normal; + +} diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 8d7abc45..a75b9a82 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -63,15 +63,27 @@ export interface MessageInstance { message_correlations?: MessageCorrelations; } +export interface ReportFilter { + field_name: string; + field_value: string; + operator?: string; +} + export interface ReportColumn { Header: string; accessor: string; filterable: boolean; - condition_value?: string; +} + +export interface ReportColumnForEditing extends ReportColumn { + filter_field_value: string; + filter_operator: string; } export interface ReportMetadata { columns: ReportColumn[]; + filter_by: ReportFilter[]; + order_by: string[]; } export interface ProcessInstanceReport { From 6b75fc32a3ff811a2c821fb3588bff2c9608fce6 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 2 Dec 2022 15:46:05 -0500 Subject: [PATCH 23/40] added some support to add process model metadata. need to fix frontend w/ burnettk --- .../models/process_model.py | 3 + .../routes/process_api_blueprint.py | 38 +++++---- .../integration/test_process_api.py | 2 + .../components/ProcessInstanceListTable.tsx | 29 +++---- .../src/components/ProcessModelForm.tsx | 82 ++++++++++++++++++- spiffworkflow-frontend/src/interfaces.ts | 5 ++ 6 files changed, 130 insertions(+), 29 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py index 4f5ee2ad..278b5ef6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py @@ -38,6 +38,7 @@ class ProcessModelInfo: fault_or_suspend_on_exception: str = NotificationType.fault.value exception_notification_addresses: list[str] = field(default_factory=list) parent_groups: list[dict] | None = None + metadata_extraction_paths: dict[str, str] | None = None def __post_init__(self) -> None: """__post_init__.""" @@ -76,6 +77,8 @@ class ProcessModelInfoSchema(Schema): exception_notification_addresses = marshmallow.fields.List( marshmallow.fields.String ) + metadata_extraction_paths = marshmallow.fields.Dict(keys=marshmallow.fields.Str(required=False), values=marshmallow.fields.Str(required=False), required=False) + @post_load def make_spec( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index d19472cc..7e73a285 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -261,19 +261,26 @@ def process_model_create( modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] ) -> flask.wrappers.Response: """Process_model_create.""" - process_model_info = ProcessModelInfoSchema().load(body) + body_include_list = [ + "id", + "display_name", + "primary_file_name", + "primary_process_id", + "description", + "metadata_extraction_paths", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + if modified_process_group_id is None: raise ApiError( error_code="process_group_id_not_specified", message="Process Model could not be created when process_group_id path param is unspecified", status_code=400, ) - if process_model_info is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body: {body}", - status_code=400, - ) unmodified_process_group_id = un_modify_modified_process_model_id( modified_process_group_id @@ -286,6 +293,14 @@ def process_model_create( status_code=400, ) + process_model_info = ProcessModelInfo(**body_filtered) # type: ignore + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + ProcessModelService.add_process_model(process_model_info) return Response( json.dumps(ProcessModelInfoSchema().dump(process_model_info)), @@ -299,7 +314,6 @@ def process_model_delete( ) -> flask.wrappers.Response: """Process_model_delete.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" ProcessModelService().process_model_delete(process_model_identifier) return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") @@ -314,6 +328,7 @@ def process_model_update( "primary_file_name", "primary_process_id", "description", + "metadata_extraction_paths", ] body_filtered = { include_item: body[include_item] @@ -321,7 +336,6 @@ def process_model_update( if include_item in body } - # process_model_identifier = f"{process_group_id}/{process_model_id}" process_model = get_process_model(process_model_identifier) ProcessModelService.update_process_model(process_model, body_filtered) return ProcessModelInfoSchema().dump(process_model) @@ -330,10 +344,7 @@ def process_model_update( def process_model_show(modified_process_model_identifier: str) -> Any: """Process_model_show.""" process_model_identifier = modified_process_model_identifier.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" process_model = get_process_model(process_model_identifier) - # TODO: Temporary. Should not need the next line once models have correct ids - # process_model.id = process_model_identifier files = sorted(SpecFileService.get_files(process_model)) process_model.files = files for file in process_model.files: @@ -425,7 +436,6 @@ def process_model_file_update( ) -> flask.wrappers.Response: """Process_model_file_update.""" process_model_identifier = modified_process_model_id.replace(":", "/") - # process_model_identifier = f"{process_group_id}/{process_model_id}" process_model = get_process_model(process_model_identifier) request_file = get_file_from_request() @@ -1142,7 +1152,7 @@ def process_instance_report_show( per_page: int = 100, ) -> flask.wrappers.Response: """Process_instance_report_show.""" - process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id) + process_instances = ProcessInstanceModel.query.order_by( ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore ).paginate( page=page, per_page=per_page, error_out=False diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 215e44d4..b30652a4 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -333,6 +333,7 @@ class TestProcessApi(BaseTest): process_model.display_name = "Updated Display Name" process_model.primary_file_name = "superduper.bpmn" process_model.primary_process_id = "superduper" + process_model.metadata_extraction_paths = {'extraction1': 'path1'} modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.put( @@ -346,6 +347,7 @@ class TestProcessApi(BaseTest): assert response.json["display_name"] == "Updated Display Name" assert response.json["primary_file_name"] == "superduper.bpmn" assert response.json["primary_process_id"] == "superduper" + assert response.json["metadata_extraction_paths"] == {'extraction1': 'path1'} def test_process_model_list_all( self, diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 92355fe9..2c661719 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -142,7 +142,7 @@ export default function ProcessInstanceListTable({ ReportColumn[] >([]); const [processInstanceReportJustSaved, setProcessInstanceReportJustSaved] = - useState(false); + useState(null); const [showReportColumnForm, setShowReportColumnForm] = useState(false); const [reportColumnToOperateOn, setReportColumnToOperateOn] = @@ -367,10 +367,14 @@ export default function ProcessInstanceListTable({ const processInstanceReportSaveTag = () => { if (processInstanceReportJustSaved) { + let titleOperation = 'Updated'; + if (processInstanceReportJustSaved === 'new') { + titleOperation = 'Created'; + } return ( { + const processInstanceReportDidChange = (selection: any, mode?: string) => { clearFilters(); const selectedReport = selection.selectedItem; setProcessInstanceReportSelection(selectedReport); @@ -600,7 +601,7 @@ export default function ProcessInstanceListTable({ } setErrorMessage(null); - setProcessInstanceReportJustSaved(savedReport); + setProcessInstanceReportJustSaved(mode || null); navigate(`/admin/process-instances${queryParamString}`); }; @@ -615,12 +616,12 @@ export default function ProcessInstanceListTable({ }; // TODO onSuccess reload/select the new report in the report search - const onSaveReportSuccess = (result: any) => { + const onSaveReportSuccess = (result: any, mode: string) => { processInstanceReportDidChange( { selectedItem: result, }, - true + mode ); }; @@ -638,7 +639,7 @@ export default function ProcessInstanceListTable({ } return ( onSaveReportSuccess(result, 'new')} buttonClassName="narrow-button" columnArray={reportColumns()} orderBy="" @@ -705,7 +706,7 @@ export default function ProcessInstanceListTable({ } else { newReportFilters.splice(existingReportFilterIndex, 1); } - } else { + } else if (reportColumnForEditing.filter_field_value) { newReportFilters = newReportFilters.concat([newReportFilter]); } } @@ -1157,7 +1158,7 @@ export default function ProcessInstanceListTable({ onSaveReportSuccess(result, 'edit')} columnArray={reportColumns()} orderBy="" buttonText="Save" diff --git a/spiffworkflow-frontend/src/components/ProcessModelForm.tsx b/spiffworkflow-frontend/src/components/ProcessModelForm.tsx index 396f1ea0..0866b60d 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelForm.tsx @@ -2,9 +2,11 @@ import { useState } from 'react'; import { useNavigate } from 'react-router-dom'; // @ts-ignore import { Button, ButtonSet, Form, Stack, TextInput } from '@carbon/react'; +// @ts-ignore +import { AddAlt } from '@carbon/icons-react'; import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers'; import HttpService from '../services/HttpService'; -import { ProcessModel } from '../interfaces'; +import { MetadataExtractionPaths, ProcessModel } from '../interfaces'; type OwnProps = { mode: string; @@ -23,6 +25,7 @@ export default function ProcessModelForm({ const [idHasBeenUpdatedByUser, setIdHasBeenUpdatedByUser] = useState(false); const [displayNameInvalid, setDisplayNameInvalid] = useState(false); + useState(false); const navigate = useNavigate(); const navigateToProcessModel = (result: ProcessModel) => { @@ -64,6 +67,7 @@ export default function ProcessModelForm({ const postBody = { display_name: processModel.display_name, description: processModel.description, + metadata_extraction_paths: processModel.metadata_extraction_paths, }; if (mode === 'new') { Object.assign(postBody, { @@ -87,6 +91,66 @@ export default function ProcessModelForm({ setProcessModel(processModelToCopy); }; + const metadataExtractionPathForm = ( + metadataKey: string, + metadataPath: string + ) => { + return ( + <> + { + const cep: MetadataExtractionPaths = + processModel.metadata_extraction_paths || {}; + delete cep[metadataKey]; + cep[event.target.value] = metadataPath; + updateProcessModel({ metadata_extraction_paths: cep }); + }} + /> + { + const cep: MetadataExtractionPaths = + processModel.metadata_extraction_paths || {}; + cep[metadataKey] = event.target.value; + updateProcessModel({ metadata_extraction_paths: cep }); + }} + /> + + ); + }; + + const metadataExtractionPathFormArea = () => { + if (processModel.metadata_extraction_paths) { + console.log( + 'processModel.metadata_extraction_paths', + processModel.metadata_extraction_paths + ); + return Object.keys(processModel.metadata_extraction_paths).map( + (metadataKey: string) => { + return metadataExtractionPathForm( + metadataKey, + processModel.metadata_extraction_paths + ? processModel.metadata_extraction_paths[metadataKey] + : '' + ); + } + ); + } + return null; + }; + + const addBlankMetadataExtractionPath = () => { + const cep: MetadataExtractionPaths = + processModel.metadata_extraction_paths || {}; + Object.assign(cep, { '': '' }); + updateProcessModel({ metadata_extraction_paths: cep }); + }; + const onDisplayNameChanged = (newDisplayName: any) => { setDisplayNameInvalid(false); const updateDict = { display_name: newDisplayName }; @@ -145,6 +209,22 @@ export default function ProcessModelForm({ /> ); + textInputs.push(<>{metadataExtractionPathFormArea()}); + textInputs.push( + + ); + return textInputs; }; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index a75b9a82..3b428b56 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -98,6 +98,10 @@ export interface ProcessGroupLite { display_name: string; } +export interface MetadataExtractionPaths { + [key: string]: string; +} + export interface ProcessModel { id: string; description: string; @@ -105,6 +109,7 @@ export interface ProcessModel { primary_file_name: string; files: ProcessFile[]; parent_groups?: ProcessGroupLite[]; + metadata_extraction_paths?: MetadataExtractionPaths; } export interface ProcessGroup { From bfb2c5627ad714ab537be01d560884c3d241a347 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 2 Dec 2022 16:03:43 -0500 Subject: [PATCH 24/40] using an array for metadata extraction paths now instead of dictionaries w/ burnettk --- .../models/process_model.py | 11 +++-- .../routes/process_api_blueprint.py | 27 +++++++----- .../process_instance_report_service.py | 15 +++++-- .../integration/test_process_api.py | 8 +++- .../src/components/ProcessModelForm.tsx | 44 +++++++++---------- spiffworkflow-frontend/src/interfaces.ts | 7 +-- 6 files changed, 67 insertions(+), 45 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py index 278b5ef6..e8d5eed1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py @@ -38,7 +38,7 @@ class ProcessModelInfo: fault_or_suspend_on_exception: str = NotificationType.fault.value exception_notification_addresses: list[str] = field(default_factory=list) parent_groups: list[dict] | None = None - metadata_extraction_paths: dict[str, str] | None = None + metadata_extraction_paths: list[dict[str, str]] | None = None def __post_init__(self) -> None: """__post_init__.""" @@ -77,8 +77,13 @@ class ProcessModelInfoSchema(Schema): exception_notification_addresses = marshmallow.fields.List( marshmallow.fields.String ) - metadata_extraction_paths = marshmallow.fields.Dict(keys=marshmallow.fields.Str(required=False), values=marshmallow.fields.Str(required=False), required=False) - + metadata_extraction_paths = marshmallow.fields.List( + marshmallow.fields.Dict( + keys=marshmallow.fields.Str(required=False), + values=marshmallow.fields.Str(required=False), + required=False, + ) + ) @post_load def make_spec( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 7e73a285..70653026 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -952,17 +952,26 @@ def process_instance_list( continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) - filter_for_column = next((f for f in process_instance_report.report_metadata['filter_by'] if f['field_name'] == column['accessor']), None) + filter_for_column = next( + ( + f + for f in process_instance_report.report_metadata["filter_by"] + if f["field_name"] == column["accessor"] + ), + None, + ) isouter = True - conditions = [ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, - instance_metadata_alias.key == column["accessor"]] + conditions = [ + ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, + instance_metadata_alias.key == column["accessor"], + ] if filter_for_column: isouter = False - conditions.append(instance_metadata_alias.value == filter_for_column["field_value"]) + conditions.append( + instance_metadata_alias.value == filter_for_column["field_value"] + ) process_instance_query = process_instance_query.join( - instance_metadata_alias, - and_(*conditions), - isouter=isouter + instance_metadata_alias, and_(*conditions), isouter=isouter ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) process_instances = ( @@ -1154,9 +1163,7 @@ def process_instance_report_show( """Process_instance_report_show.""" process_instances = ProcessInstanceModel.query.order_by( ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate( - page=page, per_page=per_page, error_out=False - ) + ).paginate(page=page, per_page=per_page, error_out=False) process_instance_report = ProcessInstanceReportModel.query.filter_by( id=report_id, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index d9096d63..84d5d675 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -84,7 +84,11 @@ class ProcessInstanceReportService: # TODO replace with system reports that are loaded on launch (or similar) temp_system_metadata_map = { - "default": {"columns": cls.builtin_column_options(), "filter_by": [], 'order_by': ['-start_in_seconds', '-id']}, + "default": { + "columns": cls.builtin_column_options(), + "filter_by": [], + "order_by": ["-start_in_seconds", "-id"], + }, "system_report_instances_initiated_by_me": { "columns": [ {"Header": "id", "accessor": "id"}, @@ -96,13 +100,15 @@ class ProcessInstanceReportService: {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, {"Header": "status", "accessor": "status"}, ], - "filter_by": [{"field_name": "initiated_by_me", "field_value": True}],'order_by': ['-start_in_seconds', '-id'] + "filter_by": [{"field_name": "initiated_by_me", "field_value": True}], + "order_by": ["-start_in_seconds", "-id"], }, "system_report_instances_with_tasks_completed_by_me": { "columns": cls.builtin_column_options(), "filter_by": [ {"field_name": "with_tasks_completed_by_me", "field_value": True} - ],'order_by': ['-start_in_seconds', '-id'] + ], + "order_by": ["-start_in_seconds", "-id"], }, "system_report_instances_with_tasks_completed_by_my_groups": { "columns": cls.builtin_column_options(), @@ -111,7 +117,8 @@ class ProcessInstanceReportService: "field_name": "with_tasks_completed_by_my_group", "field_value": True, } - ],'order_by': ['-start_in_seconds', '-id'] + ], + "order_by": ["-start_in_seconds", "-id"], }, } diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index b30652a4..d49eb7c5 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -333,7 +333,9 @@ class TestProcessApi(BaseTest): process_model.display_name = "Updated Display Name" process_model.primary_file_name = "superduper.bpmn" process_model.primary_process_id = "superduper" - process_model.metadata_extraction_paths = {'extraction1': 'path1'} + process_model.metadata_extraction_paths = [ + {"key": "extraction1", "path": "path1"} + ] modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.put( @@ -347,7 +349,9 @@ class TestProcessApi(BaseTest): assert response.json["display_name"] == "Updated Display Name" assert response.json["primary_file_name"] == "superduper.bpmn" assert response.json["primary_process_id"] == "superduper" - assert response.json["metadata_extraction_paths"] == {'extraction1': 'path1'} + assert response.json["metadata_extraction_paths"] == [ + {"key": "extraction1", "path": "path1"} + ] def test_process_model_list_all( self, diff --git a/spiffworkflow-frontend/src/components/ProcessModelForm.tsx b/spiffworkflow-frontend/src/components/ProcessModelForm.tsx index 0866b60d..7e4e1169 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelForm.tsx @@ -6,7 +6,7 @@ import { Button, ButtonSet, Form, Stack, TextInput } from '@carbon/react'; import { AddAlt } from '@carbon/icons-react'; import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers'; import HttpService from '../services/HttpService'; -import { MetadataExtractionPaths, ProcessModel } from '../interfaces'; +import { MetadataExtractionPath, ProcessModel } from '../interfaces'; type OwnProps = { mode: string; @@ -92,31 +92,34 @@ export default function ProcessModelForm({ }; const metadataExtractionPathForm = ( - metadataKey: string, - metadataPath: string + index: number, + metadataExtractionPath: MetadataExtractionPath ) => { return ( <> { - const cep: MetadataExtractionPaths = - processModel.metadata_extraction_paths || {}; - delete cep[metadataKey]; - cep[event.target.value] = metadataPath; + const cep: MetadataExtractionPath[] = + processModel.metadata_extraction_paths || []; + const newMeta = { ...metadataExtractionPath }; + newMeta.key = event.target.value; + cep[index] = newMeta; updateProcessModel({ metadata_extraction_paths: cep }); }} /> { - const cep: MetadataExtractionPaths = - processModel.metadata_extraction_paths || {}; - cep[metadataKey] = event.target.value; + const cep: MetadataExtractionPath[] = + processModel.metadata_extraction_paths || []; + const newMeta = { ...metadataExtractionPath }; + newMeta.path = event.target.value; + cep[index] = newMeta; updateProcessModel({ metadata_extraction_paths: cep }); }} /> @@ -130,14 +133,9 @@ export default function ProcessModelForm({ 'processModel.metadata_extraction_paths', processModel.metadata_extraction_paths ); - return Object.keys(processModel.metadata_extraction_paths).map( - (metadataKey: string) => { - return metadataExtractionPathForm( - metadataKey, - processModel.metadata_extraction_paths - ? processModel.metadata_extraction_paths[metadataKey] - : '' - ); + return processModel.metadata_extraction_paths.map( + (metadataExtractionPath: MetadataExtractionPath, index: number) => { + return metadataExtractionPathForm(index, metadataExtractionPath); } ); } @@ -145,9 +143,9 @@ export default function ProcessModelForm({ }; const addBlankMetadataExtractionPath = () => { - const cep: MetadataExtractionPaths = - processModel.metadata_extraction_paths || {}; - Object.assign(cep, { '': '' }); + const cep: MetadataExtractionPath[] = + processModel.metadata_extraction_paths || []; + cep.push({ key: '', path: '' }); updateProcessModel({ metadata_extraction_paths: cep }); }; diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 3b428b56..6c9ff905 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -98,8 +98,9 @@ export interface ProcessGroupLite { display_name: string; } -export interface MetadataExtractionPaths { - [key: string]: string; +export interface MetadataExtractionPath { + key: string; + path: string; } export interface ProcessModel { @@ -109,7 +110,7 @@ export interface ProcessModel { primary_file_name: string; files: ProcessFile[]; parent_groups?: ProcessGroupLite[]; - metadata_extraction_paths?: MetadataExtractionPaths; + metadata_extraction_paths?: MetadataExtractionPath[]; } export interface ProcessGroup { From 1b3bf52be350ebab4229e16317b4177af5531970 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 2 Dec 2022 16:53:01 -0500 Subject: [PATCH 25/40] fix tests --- .../services/process_instance_processor.py | 7 ++++- .../integration/test_process_api.py | 29 ++++++++++--------- 2 files changed, 21 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index d1df6742..df54fa8a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -178,7 +178,12 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore ) return Script.generate_augmented_list(script_attributes_context) - def evaluate(self, task: SpiffTask, expression: str, external_methods=None) -> Any: + def evaluate( + self, + task: SpiffTask, + expression: str, + external_methods: Optional[dict[str, Any]] = None, + ) -> Any: """Evaluate.""" return self._evaluate(expression, task.data, task, external_methods) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index d49eb7c5..2ca5a059 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1732,14 +1732,14 @@ class TestProcessApi(BaseTest): ], } - ProcessInstanceReportModel.create_with_attributes( + report = ProcessInstanceReportModel.create_with_attributes( identifier="sure", report_metadata=report_metadata, user=with_super_admin_user, ) response = client.get( - "/v1.0/process-instances/reports/sure", + f"/v1.0/process-instances/reports/{report.id}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1778,14 +1778,14 @@ class TestProcessApi(BaseTest): ], } - ProcessInstanceReportModel.create_with_attributes( + report = ProcessInstanceReportModel.create_with_attributes( identifier="sure", report_metadata=report_metadata, user=with_super_admin_user, ) response = client.get( - "/v1.0/process-instances/reports/sure?grade_level=1", + f"/v1.0/process-instances/reports/{report.id}?grade_level=1", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1800,13 +1800,13 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_process_instance_report_show_with_default_list.""" response = client.get( - "/v1.0/process-instances/reports/sure?grade_level=1", + "/v1.0/process-instances/reports/13000000?grade_level=1", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 404 data = json.loads(response.get_data(as_text=True)) + print(f"data: {data}") assert data["error_code"] == "unknown_process_instance_report" def setup_testing_instance( @@ -2643,15 +2643,16 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.status_code == 200 assert response.json == [ - {"Header": "id", "accessor": "id"}, + {"Header": "Id", "accessor": "id", "filterable": False}, { - "Header": "process_model_display_name", + "Header": "Process", "accessor": "process_model_display_name", + "filterable": False, }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "username", "accessor": "username"}, - {"Header": "status", "accessor": "status"}, - {"Header": "key1", "accessor": "key1"}, - {"Header": "key2", "accessor": "key2"}, + {"Header": "Start", "accessor": "start_in_seconds", "filterable": False}, + {"Header": "End", "accessor": "end_in_seconds", "filterable": False}, + {"Header": "Username", "accessor": "username", "filterable": False}, + {"Header": "Status", "accessor": "status", "filterable": False}, + {"Header": "key1", "accessor": "key1", "filterable": True}, + {"Header": "key2", "accessor": "key2", "filterable": True}, ] From b0e68b0385136c33987d5200c890f066bdde1e17 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 2 Dec 2022 17:15:22 -0500 Subject: [PATCH 26/40] add extraction, needs test --- .../services/process_instance_processor.py | 49 +++++++++++++++---- .../services/process_instance_service.py | 15 ------ .../integration/test_process_api.py | 1 - 3 files changed, 40 insertions(+), 25 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index df54fa8a..21fae67e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -81,6 +81,7 @@ from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance import MessageModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, @@ -576,6 +577,34 @@ class ProcessInstanceProcessor: db.session.add(details_model) db.session.commit() + def extract_metadata(self, process_model_info: ProcessModelInfo) -> dict: + if process_model_info.metadata_extraction_paths is None: + return + if len(metadata_extraction_paths) > 0: + return + + current_data = self.get_current_data() + for metadata_extraction_path in metadata_extraction_paths: + key = metadata_extraction_path["key"] + path = metadata_extraction_path["path"] + path_segments = path.split(".") + data_for_key = current_data + for path_segment in path_segments: + data_for_key = data_for_key[path_segment] + + pim = ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=self.process_instance_model.id, + key=key, + ).first() + if pim is None: + pim = ProcessInstanceMetadataModel( + process_instance_id=self.process_instance_model.id, + key=key, + ) + pim.value = data_for_key + db.session.add(pim) + db.session.commit() + def save(self) -> None: """Saves the current state of this processor to the database.""" self.process_instance_model.bpmn_json = self.serialize() @@ -602,6 +631,15 @@ class ProcessInstanceProcessor: process_instance_id=self.process_instance_model.id ).all() ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks() + process_model_display_name = "" + process_model_info = self.process_model_service.get_process_model( + self.process_instance_model.process_model_identifier + ) + if process_model_info is not None: + process_model_display_name = process_model_info.display_name + + self.extract_metadata(process_model_info) + for ready_or_waiting_task in ready_or_waiting_tasks: # filter out non-usertasks task_spec = ready_or_waiting_task.task_spec @@ -620,13 +658,6 @@ class ProcessInstanceProcessor: if "formUiSchemaFilename" in properties: ui_form_file_name = properties["formUiSchemaFilename"] - process_model_display_name = "" - process_model_info = self.process_model_service.get_process_model( - self.process_instance_model.process_model_identifier - ) - if process_model_info is not None: - process_model_display_name = process_model_info.display_name - active_task = None for at in active_tasks: if at.task_id == str(ready_or_waiting_task.id): @@ -1151,8 +1182,8 @@ class ProcessInstanceProcessor: def get_current_data(self) -> dict[str, Any]: """Get the current data for the process. - Return either most recent task data or the process data - if the process instance is complete + Return either the most recent task data or--if the process instance is complete-- + the process data. """ if self.process_instance_model.status == "complete": return self.get_data() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index f98eaae1..46bd252b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -322,18 +322,3 @@ class ProcessInstanceService: ) return task - - @staticmethod - def serialize_flat_with_task_data( - process_instance: ProcessInstanceModel, - ) -> dict[str, Any]: - """NOTE: This is crazy slow. Put the latest task data in the database.""" - """Serialize_flat_with_task_data.""" - # results = {} - # try: - # processor = ProcessInstanceProcessor(process_instance) - # process_instance.data = processor.get_current_data() - # results = process_instance.serialized_flat - # except ApiError: - results = process_instance.serialized - return results diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 2ca5a059..6e0e73d2 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1806,7 +1806,6 @@ class TestProcessApi(BaseTest): ) assert response.status_code == 404 data = json.loads(response.get_data(as_text=True)) - print(f"data: {data}") assert data["error_code"] == "unknown_process_instance_report" def setup_testing_instance( From 32c709d34d51182cf324e8066afb00ab0ca8c7ec Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 3 Dec 2022 11:24:21 -0500 Subject: [PATCH 27/40] lint --- .../services/process_instance_processor.py | 10 +++++++--- .../integration/test_process_api.py | 1 + 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 21fae67e..efc7bc4d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -81,7 +81,9 @@ from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance import MessageModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus -from spiffworkflow_backend.models.process_instance_metadata import ProcessInstanceMetadataModel +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, @@ -577,8 +579,10 @@ class ProcessInstanceProcessor: db.session.add(details_model) db.session.commit() - def extract_metadata(self, process_model_info: ProcessModelInfo) -> dict: - if process_model_info.metadata_extraction_paths is None: + def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: + """Extract_metadata.""" + metadata_extraction_paths = process_model_info.metadata_extraction_paths + if metadata_extraction_paths is None: return if len(metadata_extraction_paths) > 0: return diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 6e0e73d2..0b2c254b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -1800,6 +1800,7 @@ class TestProcessApi(BaseTest): with_super_admin_user: UserModel, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: + """Test_process_instance_report_show_with_bad_identifier.""" response = client.get( "/v1.0/process-instances/reports/13000000?grade_level=1", headers=self.logged_in_headers(with_super_admin_user), From f8313f66dfa5b03c645000a3d6371dc43232d76e Mon Sep 17 00:00:00 2001 From: burnettk Date: Sat, 3 Dec 2022 20:16:20 -0500 Subject: [PATCH 28/40] add order_by to make this query deterministic --- .../routes/process_api_blueprint.py | 7 ++++++- .../unit/test_process_instance_report.py | 10 +++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 70653026..d211f168 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1004,7 +1004,12 @@ def process_instance_list( def process_instance_report_column_list() -> flask.wrappers.Response: """Process_instance_report_column_list.""" table_columns = ProcessInstanceReportService.builtin_column_options() - columns_for_metadata = db.session.query(ProcessInstanceMetadataModel.key).distinct().all() # type: ignore + columns_for_metadata = ( + db.session.query(ProcessInstanceMetadataModel.key) + .order_by(ProcessInstanceMetadataModel.key) + .distinct() # type: ignore + .all() + ) columns_for_metadata_strings = [ {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report.py index 48239507..0a5985f2 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report.py @@ -37,7 +37,7 @@ def test_generate_report_with_filter_by_with_variable_substitution( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_filter_by_with_variable_substitution.""" process_instances = setup_process_instances_for_reports report_metadata = { "filter_by": [ @@ -61,7 +61,7 @@ def test_generate_report_with_order_by_and_one_field( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_order_by_and_one_field.""" process_instances = setup_process_instances_for_reports report_metadata = {"order_by": ["test_score"]} results = do_report_with_metadata_and_instances(report_metadata, process_instances) @@ -75,7 +75,7 @@ def test_generate_report_with_order_by_and_two_fields( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_order_by_and_two_fields.""" process_instances = setup_process_instances_for_reports report_metadata = {"order_by": ["grade_level", "test_score"]} results = do_report_with_metadata_and_instances(report_metadata, process_instances) @@ -89,7 +89,7 @@ def test_generate_report_with_order_by_desc( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_order_by_desc.""" process_instances = setup_process_instances_for_reports report_metadata = {"order_by": ["grade_level", "-test_score"]} results = do_report_with_metadata_and_instances(report_metadata, process_instances) @@ -103,7 +103,7 @@ def test_generate_report_with_columns( with_db_and_bpmn_file_cleanup: None, setup_process_instances_for_reports: list[ProcessInstanceModel], ) -> None: - """Test_user_can_be_given_permission_to_administer_process_group.""" + """Test_generate_report_with_columns.""" process_instances = setup_process_instances_for_reports report_metadata = { "columns": [ From 391134ad333ee80df56487791137955c8f54fe51 Mon Sep 17 00:00:00 2001 From: burnettk Date: Sun, 4 Dec 2022 14:40:34 -0500 Subject: [PATCH 29/40] remove dup test process model --- .../tests/data/hello_world/hello_world.bpmn | 6 ++- .../process_instance_metadata.bpmn | 40 ------------------- .../integration/test_process_api.py | 17 ++++---- 3 files changed, 14 insertions(+), 49 deletions(-) delete mode 100644 spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn diff --git a/spiffworkflow-backend/tests/data/hello_world/hello_world.bpmn b/spiffworkflow-backend/tests/data/hello_world/hello_world.bpmn index 1e5bc853..4be5adba 100644 --- a/spiffworkflow-backend/tests/data/hello_world/hello_world.bpmn +++ b/spiffworkflow-backend/tests/data/hello_world/hello_world.bpmn @@ -19,7 +19,11 @@ Flow_0bazl8x Flow_1mcaszp - a = 1 + a = 1 +b = 2 +outer = {} +outer["inner"] = 'sweet1' + Flow_1mcaszp diff --git a/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn b/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn deleted file mode 100644 index f371a350..00000000 --- a/spiffworkflow-backend/tests/data/test-process-instance-metadata-report/process_instance_metadata.bpmn +++ /dev/null @@ -1,40 +0,0 @@ - - - - - Flow_0fmt4q1 - - - - Flow_0fmt4q1 - Flow_0hhrkce - save_process_instance_metadata({"key1": "value1", "key2": "value2"}) - - - Flow_0hhrkce - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 0b2c254b..a17a3875 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2563,9 +2563,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id="test-process-instance-metadata-report", - bpmn_file_name="process_instance_metadata.bpmn", - process_model_source_directory="test-process-instance-metadata-report", + process_model_id="save_process_instance_metadata/save_process_instance_metadata", + bpmn_file_name="save_process_instance_metadata.bpmn", + process_model_source_directory="save_process_instance_metadata", ) process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=with_super_admin_user @@ -2576,7 +2576,7 @@ class TestProcessApi(BaseTest): process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( process_instance_id=process_instance.id ).all() - assert len(process_instance_metadata) == 2 + assert len(process_instance_metadata) == 3 report_metadata = { "columns": [ @@ -2620,9 +2620,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id="test-process-instance-metadata-report", - bpmn_file_name="process_instance_metadata.bpmn", - process_model_source_directory="test-process-instance-metadata-report", + process_model_id="save_process_instance_metadata/save_process_instance_metadata", + bpmn_file_name="save_process_instance_metadata.bpmn", + process_model_source_directory="save_process_instance_metadata", ) process_instance = self.create_process_instance_from_process_model( process_model=process_model, user=with_super_admin_user @@ -2633,7 +2633,7 @@ class TestProcessApi(BaseTest): process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by( process_instance_id=process_instance.id ).all() - assert len(process_instance_metadata) == 2 + assert len(process_instance_metadata) == 3 response = client.get( "/v1.0/process-instances/reports/columns", @@ -2655,4 +2655,5 @@ class TestProcessApi(BaseTest): {"Header": "Status", "accessor": "status", "filterable": False}, {"Header": "key1", "accessor": "key1", "filterable": True}, {"Header": "key2", "accessor": "key2", "filterable": True}, + {"Header": "key3", "accessor": "key3", "filterable": True}, ] From c63b7720c84a9af2c350812de8266c4f82dab5da Mon Sep 17 00:00:00 2001 From: burnettk Date: Sun, 4 Dec 2022 22:35:16 -0500 Subject: [PATCH 30/40] test for automatic saving of process instance metadata on instance save --- .../services/process_instance_processor.py | 2 +- .../nested-task-data-structure.bpmn | 55 +++++++++++++++++++ .../unit/test_process_model.py | 51 +++++++++++++++++ 3 files changed, 107 insertions(+), 1 deletion(-) create mode 100644 spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index efc7bc4d..bdf71740 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -584,7 +584,7 @@ class ProcessInstanceProcessor: metadata_extraction_paths = process_model_info.metadata_extraction_paths if metadata_extraction_paths is None: return - if len(metadata_extraction_paths) > 0: + if len(metadata_extraction_paths) <= 0: return current_data = self.get_current_data() diff --git a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn new file mode 100644 index 00000000..4588bef0 --- /dev/null +++ b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn @@ -0,0 +1,55 @@ + + + + + Flow_1ohrjz9 + + + + Flow_1flxgry + + + + Flow_1ohrjz9 + Flow_18gs4jt + outer = {} +invoice_number = 123 +outer["inner"] = 'sweet1' + + + + Flow_18gs4jt + Flow_1flxgry + outer["inner"] = 'sweet2' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py index 09421bc7..9eb6901b 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_model.py @@ -5,12 +5,16 @@ from flask_bpmn.models.db import db from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) +from spiffworkflow_backend.services.process_model_service import ProcessModelService class TestProcessModel(BaseTest): @@ -122,6 +126,53 @@ class TestProcessModel(BaseTest): processor.do_engine_steps(save=True) assert process_instance.status == "complete" + def test_extract_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_run_process_model_with_call_activities.""" + self.create_process_group( + client, with_super_admin_user, "test_group", "test_group" + ) + process_model = load_test_spec( + "test_group/hello_world", + process_model_source_directory="nested-task-data-structure", + ) + ProcessModelService.update_process_model( + process_model, + { + "metadata_extraction_paths": [ + {"key": "awesome_var", "path": "outer.inner"}, + {"key": "invoice_number", "path": "invoice_number"}, + ] + }, + ) + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" + + process_instance_metadata_awesome_var = ( + ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id, key="awesome_var" + ).first() + ) + assert process_instance_metadata_awesome_var is not None + assert process_instance_metadata_awesome_var.value == "sweet2" + process_instance_metadata_awesome_var = ( + ProcessInstanceMetadataModel.query.filter_by( + process_instance_id=process_instance.id, key="invoice_number" + ).first() + ) + assert process_instance_metadata_awesome_var is not None + assert process_instance_metadata_awesome_var.value == "123" + def create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: """Create_test_process_model.""" return ProcessModelInfo( From f920edbb56997002c821614df887e47b1ea51e02 Mon Sep 17 00:00:00 2001 From: burnettk Date: Mon, 5 Dec 2022 09:08:56 -0500 Subject: [PATCH 31/40] bump nox stuff and spiff --- .../.github/workflows/constraints.txt | 4 ++-- spiffworkflow-backend/poetry.lock | 19 +++++++++++++++---- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/spiffworkflow-backend/.github/workflows/constraints.txt b/spiffworkflow-backend/.github/workflows/constraints.txt index 70c8f365..7ccc8711 100644 --- a/spiffworkflow-backend/.github/workflows/constraints.txt +++ b/spiffworkflow-backend/.github/workflows/constraints.txt @@ -1,5 +1,5 @@ pip==22.2.2 -nox==2022.8.7 -nox-poetry==1.0.1 +nox==2022.11.21 +nox-poetry==1.0.2 poetry==1.2.2 virtualenv==20.16.5 diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index 2e4df58d..a23004b4 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -1851,7 +1851,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "062eaf15d28c66f8cf07f68409429560251b12c7" +resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134" [[package]] name = "SQLAlchemy" @@ -2563,6 +2563,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, @@ -2571,6 +2572,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, @@ -2579,6 +2581,7 @@ greenlet = [ {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, @@ -2877,10 +2880,7 @@ orjson = [ {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"}, - {file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, - {file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, @@ -2989,7 +2989,18 @@ psycopg2 = [ {file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"}, ] pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ From 424eb2412e745d43ca9a62c8a1cd7b4dda1321b3 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 10:59:27 -0500 Subject: [PATCH 32/40] added support to order reports by given column and metadata headers w/ burnettk --- .../models/process_instance_report.py | 4 + .../routes/process_api_blueprint.py | 40 +++++++-- .../nested-task-data-structure.bpmn | 3 +- .../integration/test_process_api.py | 83 +++++++++++++++++++ 4 files changed, 120 insertions(+), 10 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index 3c0e8646..ad29dd06 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -75,6 +75,10 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): created_at_in_seconds = db.Column(db.Integer) updated_at_in_seconds = db.Column(db.Integer) + @classmethod + def default_order_by(cls) -> list[str]: + return ["-start_in_seconds", "-id"] + @classmethod def add_fixtures(cls) -> None: """Add_fixtures.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index d211f168..d9a7f68a 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -3,6 +3,7 @@ import json import random import string import uuid +import re from typing import Any from typing import Dict from typing import Optional @@ -944,6 +945,7 @@ def process_instance_list( UserGroupAssignmentModel.user_id == g.user.id ) + instance_metadata_aliases = {} stock_columns = ProcessInstanceReportService.get_column_names_for_model( ProcessInstanceModel ) @@ -951,15 +953,18 @@ def process_instance_list( if column["accessor"] in stock_columns: continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) + instance_metadata_aliases[column['accessor']] = instance_metadata_alias - filter_for_column = next( - ( - f - for f in process_instance_report.report_metadata["filter_by"] - if f["field_name"] == column["accessor"] - ), - None, - ) + filter_for_column = None + if 'filter_by' in process_instance_report.report_metadata: + filter_for_column = next( + ( + f + for f in process_instance_report.report_metadata["filter_by"] + if f["field_name"] == column["accessor"] + ), + None, + ) isouter = True conditions = [ ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, @@ -974,11 +979,28 @@ def process_instance_list( instance_metadata_alias, and_(*conditions), isouter=isouter ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) + order_by_query_array = [] + order_by_array = process_instance_report.report_metadata['order_by'] + if len(order_by_array) < 1: + order_by_array = ProcessInstanceReportModel.default_order_by() + for order_by_option in order_by_array: + attribute = re.sub('^-', '', order_by_option) + if attribute in stock_columns: + if order_by_option.startswith('-'): + order_by_query_array.append(getattr(ProcessInstanceModel, attribute).desc()) + else: + order_by_query_array.append(getattr(ProcessInstanceModel, attribute).asc()) + elif attribute in instance_metadata_aliases: + if order_by_option.startswith('-'): + order_by_query_array.append(instance_metadata_aliases[attribute].value.desc()) + else: + order_by_query_array.append(instance_metadata_aliases[attribute].value.asc()) + process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) .add_columns(ProcessInstanceModel.id) .order_by( - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + *order_by_query_array ) .paginate(page=page, per_page=per_page, error_out=False) ) diff --git a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn index 4588bef0..7452216a 100644 --- a/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn +++ b/spiffworkflow-backend/tests/data/nested-task-data-structure/nested-task-data-structure.bpmn @@ -14,7 +14,8 @@ Flow_18gs4jt outer = {} invoice_number = 123 -outer["inner"] = 'sweet1' +outer["inner"] = 'sweet1' +outer['time'] = time.time_ns() diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index a17a3875..05e0977a 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2657,3 +2657,86 @@ class TestProcessApi(BaseTest): {"Header": "key2", "accessor": "key2", "filterable": True}, {"Header": "key3", "accessor": "key3", "filterable": True}, ] + + def test_process_instance_list_can_order_by_metadata( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + self.create_process_group( + client, with_super_admin_user, "test_group", "test_group" + ) + process_model = load_test_spec( + "test_group/hello_world", + process_model_source_directory="nested-task-data-structure", + ) + ProcessModelService.update_process_model( + process_model, + { + "metadata_extraction_paths": [ + {"key": "time_ns", "path": "outer.time"}, + ] + }, + ) + + process_instance_one = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance_one) + processor.do_engine_steps(save=True) + assert process_instance_one.status == "complete" + process_instance_two = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance_two) + processor.do_engine_steps(save=True) + assert process_instance_two.status == "complete" + + report_metadata = { + "columns": [ + {"Header": "id", "accessor": "id"}, + {"Header": "Time", "accessor": "time_ns"}, + ], + "order_by": ["time_ns"], + } + report_one = ProcessInstanceReportModel.create_with_attributes( + identifier="report_one", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_id={report_one.id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json['results'][0]['id'] == process_instance_one.id + assert response.json['results'][1]['id'] == process_instance_two.id + + report_metadata = { + "columns": [ + {"Header": "id", "accessor": "id"}, + {"Header": "Time", "accessor": "time_ns"}, + ], + "order_by": ["-time_ns"], + } + report_two = ProcessInstanceReportModel.create_with_attributes( + identifier="report_two", + report_metadata=report_metadata, + user=with_super_admin_user, + ) + + response = client.get( + f"/v1.0/process-instances?report_id={report_two.id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json['results'][1]['id'] == process_instance_one.id + assert response.json['results'][0]['id'] == process_instance_two.id From 463e763665adfb832254584981858f4d003c74e5 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 14:07:26 -0500 Subject: [PATCH 33/40] some updates to fix up saving perspectives w/ burnettk --- .../ProcessInstanceListSaveAsReport.tsx | 84 +++++++---- .../components/ProcessInstanceListTable.tsx | 46 ++---- .../ProcessInstanceReportSearch.tsx | 7 +- .../src/components/ProcessModelForm.tsx | 134 ++++++++++++------ spiffworkflow-frontend/src/index.css | 9 ++ 5 files changed, 165 insertions(+), 115 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx index de5ea22c..a3d50d94 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListSaveAsReport.tsx @@ -2,8 +2,8 @@ import { useState } from 'react'; import { Button, TextInput, - Form, Stack, + Modal, // @ts-ignore } from '@carbon/react'; import { @@ -42,26 +42,31 @@ export default function ProcessInstanceListSaveAsReport({ startToSeconds, endFromSeconds, endToSeconds, - buttonText = 'Save as Perspective', buttonClassName, + buttonText = 'Save as Perspective', reportMetadata, }: OwnProps) { const [identifier, setIdentifier] = useState( processInstanceReportSelection?.identifier || '' ); + const [showSaveForm, setShowSaveForm] = useState(false); - const hasIdentifier = () => { - return identifier?.length > 0; + const isEditMode = () => { + return ( + processInstanceReportSelection && + processInstanceReportSelection.identifier === identifier + ); }; const responseHandler = (result: any) => { if (result) { - onSuccess(result); + onSuccess(result, isEditMode() ? 'edit' : 'new'); } }; - const isEditMode = () => { - return !!processInstanceReportSelection; + const handleSaveFormClose = () => { + setIdentifier(processInstanceReportSelection?.identifier || ''); + setShowSaveForm(false); }; const addProcessInstanceReport = (event: any) => { @@ -148,36 +153,53 @@ export default function ProcessInstanceListSaveAsReport({ }, }, }); + handleSaveFormClose(); }; let textInputComponent = null; - if (!isEditMode()) { - textInputComponent = ( - setIdentifier(e.target.value)} - /> - ); + textInputComponent = ( + setIdentifier(e.target.value)} + /> + ); + + let descriptionText = + 'Save the current columns and filters as a perspective so you can come back to this view in the future.'; + if (processInstanceReportSelection) { + descriptionText = + 'Keep the identifier the same and click Save to update the current perspective. Change the identifier if you want to save the current view with a new name.'; } return ( - - + + +

{descriptionText}

{textInputComponent} - -
- + + +
); } diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 2c661719..ffbaf5ae 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -639,13 +639,14 @@ export default function ProcessInstanceListTable({ } return ( onSaveReportSuccess(result, 'new')} - buttonClassName="narrow-button" + onSuccess={onSaveReportSuccess} + buttonClassName="button-white-background narrow-button" columnArray={reportColumns()} orderBy="" - buttonText="Save New Perspective" + buttonText="Save" processModelSelection={processModelSelection} processStatusSelection={processStatusSelection} + processInstanceReportSelection={processInstanceReportSelection} reportMetadata={reportMetadata} startFromSeconds={startFromSeconds} startToSeconds={startToSeconds} @@ -871,8 +872,10 @@ export default function ProcessInstanceListTable({ reportColumnToReportColumnForEditing(reportColumn); let tagType = 'cool-gray'; + let tagTypeClass = ''; if (reportColumnForEditing.filterable) { tagType = 'green'; + tagTypeClass = 'tag-type-green'; } let reportColumnLabel = reportColumnForEditing.Header; if (reportColumnForEditing.filter_field_value) { @@ -883,7 +886,7 @@ export default function ProcessInstanceListTable({ + + + + + ); return textInputs; diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 6b02ea35..f4094785 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -169,6 +169,10 @@ h1.with-icons { margin-top: 1em; } +.with-extra-top-margin { + margin-top: 1.3em; +} + .with-tiny-top-margin { margin-top: 4px; } @@ -353,3 +357,8 @@ td.actions-cell { word-break: normal; } + +.tag-type-green:hover { + background-color: #00FF00; +} + From 55afc22148983b6becd4df18283955ea434dd0cf Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 14:10:07 -0500 Subject: [PATCH 34/40] pyl w/ burnettk --- .../models/process_instance_report.py | 1 + .../routes/process_api_blueprint.py | 34 +++++++++++-------- .../integration/test_process_api.py | 9 ++--- .../components/ProcessInstanceListTable.tsx | 2 +- 4 files changed, 27 insertions(+), 19 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py index ad29dd06..1f22a383 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_report.py @@ -77,6 +77,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): @classmethod def default_order_by(cls) -> list[str]: + """Default_order_by.""" return ["-start_in_seconds", "-id"] @classmethod diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index d9a7f68a..6843fb15 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,9 +1,9 @@ """APIs for dealing with process groups, process models, and process instances.""" import json import random +import re import string import uuid -import re from typing import Any from typing import Dict from typing import Optional @@ -953,10 +953,10 @@ def process_instance_list( if column["accessor"] in stock_columns: continue instance_metadata_alias = aliased(ProcessInstanceMetadataModel) - instance_metadata_aliases[column['accessor']] = instance_metadata_alias + instance_metadata_aliases[column["accessor"]] = instance_metadata_alias filter_for_column = None - if 'filter_by' in process_instance_report.report_metadata: + if "filter_by" in process_instance_report.report_metadata: filter_for_column = next( ( f @@ -980,28 +980,34 @@ def process_instance_list( ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) order_by_query_array = [] - order_by_array = process_instance_report.report_metadata['order_by'] + order_by_array = process_instance_report.report_metadata["order_by"] if len(order_by_array) < 1: order_by_array = ProcessInstanceReportModel.default_order_by() for order_by_option in order_by_array: - attribute = re.sub('^-', '', order_by_option) + attribute = re.sub("^-", "", order_by_option) if attribute in stock_columns: - if order_by_option.startswith('-'): - order_by_query_array.append(getattr(ProcessInstanceModel, attribute).desc()) + if order_by_option.startswith("-"): + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).desc() + ) else: - order_by_query_array.append(getattr(ProcessInstanceModel, attribute).asc()) + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).asc() + ) elif attribute in instance_metadata_aliases: - if order_by_option.startswith('-'): - order_by_query_array.append(instance_metadata_aliases[attribute].value.desc()) + if order_by_option.startswith("-"): + order_by_query_array.append( + instance_metadata_aliases[attribute].value.desc() + ) else: - order_by_query_array.append(instance_metadata_aliases[attribute].value.asc()) + order_by_query_array.append( + instance_metadata_aliases[attribute].value.asc() + ) process_instances = ( process_instance_query.group_by(ProcessInstanceModel.id) .add_columns(ProcessInstanceModel.id) - .order_by( - *order_by_query_array - ) + .order_by(*order_by_query_array) .paginate(page=page, per_page=per_page, error_out=False) ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index 05e0977a..cd2d37c6 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -2665,6 +2665,7 @@ class TestProcessApi(BaseTest): with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: + """Test_process_instance_list_can_order_by_metadata.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -2714,8 +2715,8 @@ class TestProcessApi(BaseTest): assert response.status_code == 200 assert response.json is not None assert len(response.json["results"]) == 2 - assert response.json['results'][0]['id'] == process_instance_one.id - assert response.json['results'][1]['id'] == process_instance_two.id + assert response.json["results"][0]["id"] == process_instance_one.id + assert response.json["results"][1]["id"] == process_instance_two.id report_metadata = { "columns": [ @@ -2738,5 +2739,5 @@ class TestProcessApi(BaseTest): assert response.status_code == 200 assert response.json is not None assert len(response.json["results"]) == 2 - assert response.json['results'][1]['id'] == process_instance_one.id - assert response.json['results'][0]['id'] == process_instance_two.id + assert response.json["results"][1]["id"] == process_instance_one.id + assert response.json["results"][0]["id"] == process_instance_two.id diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index ffbaf5ae..cc5ad75a 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1027,7 +1027,7 @@ export default function ProcessInstanceListTable({
- {saveAsReportComponent()} + {saveAsReportComponent()} From 8b0717be2dd47538c8abd6606698ae90929db508 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 14:56:55 -0500 Subject: [PATCH 35/40] updated tasks endpoint to task-data for easier permission setting w/ burnettk --- spiffworkflow-backend/src/spiffworkflow_backend/api.yml | 2 +- .../config/permissions/development.yml | 7 ++++++- .../config/permissions/terraform_deployed_environment.yml | 7 ++++++- .../src/components/ProcessInstanceListTable.tsx | 4 +++- spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx | 2 +- spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx | 2 +- spiffworkflow-frontend/src/routes/TaskShow.tsx | 4 ++-- 7 files changed, 20 insertions(+), 8 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 5be50b8d..fe783918 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1101,7 +1101,7 @@ paths: items: $ref: "#/components/schemas/Task" - /process-instances/{modified_process_model_id}/{process_instance_id}/tasks: + /task-data/{modified_process_model_id}/{process_instance_id}: parameters: - name: modified_process_model_id in: path diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index e17e3f11..b404aa97 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -12,7 +12,6 @@ groups: mike, jason, j, - amir, jarrad, elizabeth, jon, @@ -98,6 +97,12 @@ permissions: allowed_permissions: [read] uri: /v1.0/processes + task-data-read: + groups: [demo] + users: [] + allowed_permissions: [read] + uri: /v1.0/task-data/* + manage-procurement-admin: groups: ["Project Lead"] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml index e60946b3..ce2e2dba 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml @@ -12,7 +12,6 @@ groups: mike, jason, j, - amir, jarrad, elizabeth, jon, @@ -98,6 +97,12 @@ permissions: allowed_permissions: [read] uri: /v1.0/processes + task-data-read: + groups: [demo] + users: [] + allowed_permissions: [read] + uri: /v1.0/task-data/* + manage-procurement-admin: groups: ["Project Lead"] diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index cc5ad75a..621c595c 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1166,6 +1166,9 @@ export default function ProcessInstanceListTable({ return ( <> + + {reportSearchComponent()} + { processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, processInstanceActionPath: `/v1.0/process-models/${params.process_model_id}/process-instances`, processInstanceListPath: '/v1.0/process-instances', - processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/tasks`, + processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index c407c771..0b0aca14 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -85,7 +85,7 @@ export default function ProcessInstanceShow() { } if (ability.can('GET', targetUris.processInstanceTaskListPath)) { HttpService.makeCallToBackend({ - path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}/tasks${taskParams}`, + path: `${targetUris.processInstanceTaskListPath}${taskParams}`, successCallback: setTasks, failureCallback: processTaskFailure, }); diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 768043cd..88e23de7 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -39,9 +39,9 @@ export default function TaskShow() { const processResult = (result: any) => { setTask(result); HttpService.makeCallToBackend({ - path: `/process-instances/${modifyProcessIdentifierForPathParam( + path: `/task-data/${modifyProcessIdentifierForPathParam( result.process_model_identifier - )}/${params.process_instance_id}/tasks`, + )}/${params.process_instance_id}`, successCallback: setUserTasks, }); }; From bc7c5920b2e9133ffc9a62485386611130e78c73 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 16:06:08 -0500 Subject: [PATCH 36/40] cleaned up more api routes for permissions w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 46 +++++------ .../config/permissions/development.yml | 6 +- .../routes/process_api_blueprint.py | 6 +- .../helpers/base_test.py | 2 +- .../integration/test_logging_service.py | 2 +- .../integration/test_process_api.py | 11 +-- .../src/components/ProcessInstanceRun.tsx | 2 +- .../src/components/ProcessModelForm.tsx | 1 - .../src/hooks/UriListForPermissions.tsx | 5 +- .../src/routes/ProcessInstanceLogList.tsx | 8 +- .../src/routes/ProcessInstanceShow.tsx | 81 ++++++++++++------- .../src/routes/TaskShow.tsx | 72 ++++++++++------- 12 files changed, 139 insertions(+), 103 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index fe783918..a8204d39 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -646,7 +646,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-models/{modified_process_model_id}/process-instances: + /process-instances/{modified_process_model_id}: parameters: - name: modified_process_model_id in: path @@ -654,7 +654,6 @@ paths: description: The unique id of an existing process model. schema: type: string - # process_instance_create post: operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create summary: Creates an process instance from a process model and returns the instance @@ -668,28 +667,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-instances/{process_instance_id}: - parameters: - - name: process_instance_id - in: path - required: true - description: The unique id of an existing process instance. - schema: - type: integer - delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete - summary: Deletes a single process instance - tags: - - Process Instances - responses: - "200": - description: The process instance was deleted. - content: - application/json: - schema: - $ref: "#/components/schemas/OkTrue" - - /process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}: + /process-instances/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier in: path @@ -715,6 +693,18 @@ paths: application/json: schema: $ref: "#/components/schemas/Workflow" + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete + summary: Deletes a single process instance + tags: + - Process Instances + responses: + "200": + description: The process instance was deleted. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" /process-instances/{modified_process_model_identifier}/{process_instance_id}/run: parameters: @@ -743,7 +733,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-instances/{process_instance_id}/terminate: + /process-instances/{modified_process_model_identifier}/{process_instance_id}/terminate: parameters: - name: process_instance_id in: path @@ -764,7 +754,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instances/{process_instance_id}/suspend: + /process-instances/{modified_process_model_identifier}/{process_instance_id}/suspend: parameters: - name: process_instance_id in: path @@ -785,7 +775,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instances/{process_instance_id}/resume: + /process-instances/{modified_process_model_identifier}/{process_instance_id}/resume: parameters: - name: process_instance_id in: path @@ -1326,7 +1316,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-instances/{process_instance_id}/logs: + /logs/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index b404aa97..9a2f2284 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -175,17 +175,17 @@ permissions: uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* core1-admin-models-instantiate: - groups: ["core-contributor"] + groups: ["core-contributor", "Finance Team"] users: [] allowed_permissions: [create] uri: /v1.0/process-models/misc:category_number_one:process-model-with-form/process-instances core1-admin-instances: - groups: ["core-contributor"] + groups: ["core-contributor", "Finance Team"] users: [] allowed_permissions: [create, read] uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:* core1-admin-instances-slash: - groups: ["core-contributor"] + groups: ["core-contributor", "Finance Team"] users: [] allowed_permissions: [create, read] uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 6843fb15..77fe594c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -565,6 +565,7 @@ def process_instance_run( def process_instance_terminate( process_instance_id: int, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Process_instance_run.""" process_instance = ProcessInstanceService().get_process_instance( @@ -577,6 +578,7 @@ def process_instance_terminate( def process_instance_suspend( process_instance_id: int, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Process_instance_suspend.""" process_instance = ProcessInstanceService().get_process_instance( @@ -589,6 +591,7 @@ def process_instance_suspend( def process_instance_resume( process_instance_id: int, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: """Process_instance_resume.""" process_instance = ProcessInstanceService().get_process_instance( @@ -600,6 +603,7 @@ def process_instance_resume( def process_instance_log_list( + modified_process_model_identifier: str, process_instance_id: int, page: int = 1, per_page: int = 100, @@ -1071,7 +1075,7 @@ def process_instance_show( return make_response(jsonify(process_instance), 200) -def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response: +def process_instance_delete(process_instance_id: int, modified_process_model_identifier: str) -> flask.wrappers.Response: """Create_process_instance.""" process_instance = find_process_instance_by_id_or_raise(process_instance_id) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 8d56853b..48982fc6 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -265,7 +265,7 @@ class BaseTest: ) modified_process_model_id = test_process_model_id.replace("/", ":") response = client.post( - f"/v1.0/process-models/{modified_process_model_id}/process-instances", + f"/v1.0/process-instances/{modified_process_model_id}", headers=headers, ) assert response.status_code == 201 diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index 2f56d1d6..f9dd4452 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -57,7 +57,7 @@ class TestLoggingService(BaseTest): assert response.status_code == 200 log_response = client.get( - f"/v1.0/process-instances/{process_instance_id}/logs", + f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=headers, ) assert log_response.status_code == 200 diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index cd2d37c6..9d719a23 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -912,7 +912,7 @@ class TestProcessApi(BaseTest): modified_process_model_identifier = process_model_identifier.replace("/", ":") response = client.post( - f"/v1.0/process-models/{modified_process_model_identifier}/process-instances", + f"/v1.0/process-instances/{modified_process_model_identifier}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 201 @@ -1154,10 +1154,11 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) show_response = client.get( - f"/v1.0/process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}", + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert show_response.json is not None + assert show_response.status_code == 200 file_system_root = FileSystemService.root_path() file_path = ( f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" @@ -1320,7 +1321,7 @@ class TestProcessApi(BaseTest): assert response.json is not None response = client.post( - f"/v1.0/process-instances/{process_instance_id}/terminate", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/terminate", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1367,7 +1368,7 @@ class TestProcessApi(BaseTest): assert response.json is not None delete_response = client.delete( - f"/v1.0/process-instances/{process_instance_id}", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert delete_response.status_code == 200 @@ -2366,7 +2367,7 @@ class TestProcessApi(BaseTest): assert process_instance.status == "user_input_required" client.post( - f"/v1.0/process-instances/{process_instance_id}/suspend", + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/suspend", headers=self.logged_in_headers(with_super_admin_user), ) process_instance = ProcessInstanceService().get_process_instance( diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx index 87406f80..dafe20d5 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx @@ -83,7 +83,7 @@ export default function ProcessInstanceRun({ processModel.id ); - const processInstanceActionPath = `/v1.0/process-models/${modifiedProcessModelId}/process-instances`; + const processInstanceActionPath = `/v1.0/process-instances/${modifiedProcessModelId}`; let permissionRequestData: PermissionsToCheck = { [processInstanceActionPath]: ['POST'], }; diff --git a/spiffworkflow-frontend/src/components/ProcessModelForm.tsx b/spiffworkflow-frontend/src/components/ProcessModelForm.tsx index 1599aff5..7cfd4d61 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelForm.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelForm.tsx @@ -194,7 +194,6 @@ export default function ProcessModelForm({ onChange={(event: any) => { onDisplayNameChanged(event.target.value); }} - onBlur={(event: any) => console.log('event', event)} />, ]; diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index 9496e9e0..67958723 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -9,10 +9,11 @@ export const useUriListForPermissions = () => { messageInstanceListPath: '/v1.0/messages', processGroupListPath: '/v1.0/process-groups', processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, - processInstanceActionPath: `/v1.0/process-models/${params.process_model_id}/process-instances`, + processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}`, processInstanceListPath: '/v1.0/process-instances', - processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', + processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index f41caf94..7b09c89f 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -10,6 +10,7 @@ import { convertSecondsToFormattedDateTime, } from '../helpers'; import HttpService from '../services/HttpService'; +import { useUriListForPermissions } from '../hooks/UriListForPermissions'; export default function ProcessInstanceLogList() { const params = useParams(); @@ -19,6 +20,7 @@ export default function ProcessInstanceLogList() { const modifiedProcessModelId = modifyProcessIdentifierForPathParam( `${params.process_model_id}` ); + const { targetUris } = useUriListForPermissions(); useEffect(() => { const setProcessInstanceLogListFromResult = (result: any) => { @@ -27,7 +29,7 @@ export default function ProcessInstanceLogList() { }; const { page, perPage } = getPageInfoFromSearchParams(searchParams); HttpService.makeCallToBackend({ - path: `/process-instances/${params.process_instance_id}/logs?per_page=${perPage}&page=${page}`, + path: `${targetUris.processInstanceLogListPath}?per_page=${perPage}&page=${page}`, successCallback: setProcessInstanceLogListFromResult, }); }, [searchParams, params]); @@ -46,7 +48,7 @@ export default function ProcessInstanceLogList() {
- + - - + {isDetailedView && ( + <> + + + + + )} - + - - + {isDetailedView && ( + <> + + + + + )} @@ -73,11 +89,12 @@ export default function ProcessInstanceLogList() {
Instance IdProcess ModelIdProcess Process InstanceMessage ModelName Type Failure CauseCorrelations Status Created At
{row.id} {row.message_identifier} {row.message_type}{row.failure_cause || '-'} {row.status}Process Instance Name TypeFailure CauseCorrelationsDetails Status Created At
{convertSecondsToFormattedDateTime(rowToUse.timestamp)} @@ -86,7 +88,7 @@ export default function ProcessInstanceLogList() { }, [ `Process Instance: ${params.process_instance_id}`, - `/admin/process-models/${params.process_model_id}/process-instances/${params.process_instance_id}`, + `/admin/process-instances/${params.process_model_id}/${params.process_instance_id}`, ], ['Logs'], ]} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 0b0aca14..481f8d6f 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -59,6 +59,11 @@ export default function ProcessInstanceShow() { const permissionRequestData: PermissionsToCheck = { [targetUris.messageInstanceListPath]: ['GET'], [targetUris.processInstanceTaskListPath]: ['GET'], + [targetUris.processInstanceActionPath]: ['DELETE'], + [targetUris.processInstanceLogListPath]: ['GET'], + [`${targetUris.processInstanceActionPath}/suspend`]: ['PUT'], + [`${targetUris.processInstanceActionPath}/terminate`]: ['PUT'], + [`${targetUris.processInstanceActionPath}/resume`]: ['PUT'], }; const { ability, permissionsLoaded } = usePermissionFetcher( permissionRequestData @@ -97,7 +102,7 @@ export default function ProcessInstanceShow() { const deleteProcessInstance = () => { HttpService.makeCallToBackend({ - path: `/process-instances/${params.process_instance_id}`, + path: targetUris.processInstanceActionPath, successCallback: navigateToProcessInstances, httpMethod: 'DELETE', }); @@ -110,7 +115,7 @@ export default function ProcessInstanceShow() { const terminateProcessInstance = () => { HttpService.makeCallToBackend({ - path: `/process-instances/${params.process_instance_id}/terminate`, + path: `${targetUris.processInstanceActionPath}/terminate`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -118,7 +123,7 @@ export default function ProcessInstanceShow() { const suspendProcessInstance = () => { HttpService.makeCallToBackend({ - path: `/process-instances/${params.process_instance_id}/suspend`, + path: `${targetUris.processInstanceActionPath}/suspend`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -126,7 +131,7 @@ export default function ProcessInstanceShow() { const resumeProcessInstance = () => { HttpService.makeCallToBackend({ - path: `/process-instances/${params.process_instance_id}/resume`, + path: `${targetUris.processInstanceActionPath}/resume`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -209,7 +214,7 @@ export default function ProcessInstanceShow() { if (currentEndDate) { currentEndDateTag = ( - + Completed:{' '} @@ -235,7 +240,7 @@ export default function ProcessInstanceShow() { return ( <> - + Started:{' '} @@ -246,7 +251,7 @@ export default function ProcessInstanceShow() { {currentEndDateTag} - + Status:{' '} @@ -259,14 +264,20 @@ export default function ProcessInstanceShow() { - + + { const elements = []; - elements.push(terminateButton(processInstanceToUse)); - elements.push(suspendButton(processInstanceToUse)); - elements.push(resumeButton(processInstanceToUse)); - elements.push( - - ); + if ( + ability.can('POST', `${targetUris.processInstanceActionPath}/terminate`) + ) { + elements.push(terminateButton(processInstanceToUse)); + } + if ( + ability.can('POST', `${targetUris.processInstanceActionPath}/suspend`) + ) { + elements.push(suspendButton(processInstanceToUse)); + } + if (ability.can('POST', `${targetUris.processInstanceActionPath}/resume`)) { + elements.push(resumeButton(processInstanceToUse)); + } + if (ability.can('DELETE', targetUris.processInstanceActionPath)) { + elements.push( + + ); + } return elements; }; diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 88e23de7..9e0f65c5 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -26,6 +26,9 @@ import Form from '../themes/carbon'; import HttpService from '../services/HttpService'; import ErrorContext from '../contexts/ErrorContext'; import { modifyProcessIdentifierForPathParam } from '../helpers'; +import { useUriListForPermissions } from '../hooks/UriListForPermissions'; +import { PermissionsToCheck } from '../interfaces'; +import { usePermissionFetcher } from '../hooks/PermissionService'; export default function TaskShow() { const [task, setTask] = useState(null); @@ -35,24 +38,36 @@ export default function TaskShow() { const setErrorMessage = (useContext as any)(ErrorContext)[1]; - useEffect(() => { - const processResult = (result: any) => { - setTask(result); - HttpService.makeCallToBackend({ - path: `/task-data/${modifyProcessIdentifierForPathParam( - result.process_model_identifier - )}/${params.process_instance_id}`, - successCallback: setUserTasks, - }); - }; + const { targetUris } = useUriListForPermissions(); + const permissionRequestData: PermissionsToCheck = { + [targetUris.processInstanceTaskListPath]: ['GET'], + }; + const { ability, permissionsLoaded } = usePermissionFetcher( + permissionRequestData + ); - HttpService.makeCallToBackend({ - path: `/tasks/${params.process_instance_id}/${params.task_id}`, - successCallback: processResult, - // This causes the page to continuously reload - // failureCallback: setErrorMessage, - }); - }, [params]); + useEffect(() => { + if (permissionsLoaded) { + const processResult = (result: any) => { + setTask(result); + if (ability.can('GET', targetUris.processInstanceTaskListPath)) { + HttpService.makeCallToBackend({ + path: `/task-data/${modifyProcessIdentifierForPathParam( + result.process_model_identifier + )}/${params.process_instance_id}`, + successCallback: setUserTasks, + }); + } + }; + + HttpService.makeCallToBackend({ + path: `/tasks/${params.process_instance_id}/${params.task_id}`, + successCallback: processResult, + // This causes the page to continuously reload + // failureCallback: setErrorMessage, + }); + } + }, [params, permissionsLoaded, ability, targetUris]); const processSubmitResult = (result: any) => { setErrorMessage(null); @@ -116,17 +131,18 @@ export default function TaskShow() { } return null; }); + return ( + + + {userTasksElement} + + + ); } - return ( - - - {userTasksElement} - - - ); + return null; }; const formElement = (taskToUse: any) => { @@ -207,7 +223,7 @@ export default function TaskShow() { ); }; - if (task && userTasks) { + if (task) { const taskToUse = task as any; let statusString = ''; if (taskToUse.state !== 'READY') { From e889bfc71636ebb2ca935b3c5ae6e65187732272 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 16:35:20 -0500 Subject: [PATCH 37/40] more api cleanup w/ burnettk --- .../src/spiffworkflow_backend/api.yml | 80 ++++++++++--------- .../config/permissions/development.yml | 6 ++ .../routes/process_api_blueprint.py | 43 ++++++---- .../src/routes/ProcessInstanceShow.tsx | 8 +- .../src/routes/ProcessModelEditDiagram.tsx | 2 +- 5 files changed, 80 insertions(+), 59 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index a8204d39..e9da26b4 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -338,9 +338,9 @@ paths: schema: $ref: "#/components/schemas/ProcessModel" - /process-models/{modified_process_model_id}/files: + /process-models/{modified_process_model_identifier}/files: parameters: - - name: modified_process_model_id + - name: modified_process_model_identifier in: path required: true description: The process_model_id, modified to replace slashes (/) @@ -565,33 +565,6 @@ paths: items: $ref: "#/components/schemas/Workflow" - /process-instances/{process_instance_id}/task/{task_id}/update: - parameters: - - name: process_instance_id - in: path - required: true - description: The unique id of the process instance - schema: - type: string - - name: task_id - in: path - required: true - description: The unique id of the task - schema: - type: string - post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data - summary: Update the task data for requested instance and task - tags: - - Process Instances - responses: - "200": - description: Task Updated Successfully - content: - application/json: - schema: - $ref: "#/components/schemas/Workflow" - /process-models/{process_group_id}/{process_model_id}/script-unit-tests: parameters: - name: process_group_id @@ -646,9 +619,9 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-instances/{modified_process_model_id}: + /process-instances/{modified_process_model_identifier}: parameters: - - name: modified_process_model_id + - name: modified_process_model_identifier in: path required: true description: The unique id of an existing process model. @@ -912,9 +885,9 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-models/{modified_process_model_id}/files/{file_name}: + /process-models/{modified_process_model_identifier}/files/{file_name}: parameters: - - name: modified_process_model_id + - name: modified_process_model_identifier in: path required: true description: The modified process model id @@ -1091,9 +1064,9 @@ paths: items: $ref: "#/components/schemas/Task" - /task-data/{modified_process_model_id}/{process_instance_id}: + /task-data/{modified_process_model_identifier}/{process_instance_id}: parameters: - - name: modified_process_model_id + - name: modified_process_model_identifier in: path required: true description: The modified id of an existing process model @@ -1132,11 +1105,44 @@ paths: items: $ref: "#/components/schemas/Task" - /service_tasks: + /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: task_id + in: path + required: true + description: The unique id of the task. + schema: + type: string + put: + operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data + summary: Update the task data for requested instance and task + tags: + - Process Instances + responses: + "200": + description: Task Updated Successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /service-tasks: get: tags: - Service Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.service_tasks_show + operationId: spiffworkflow_backend.routes.process_api_blueprint.service_task_list summary: Gets all available service task connectors responses: "200": diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index 9a2f2284..4c748fd9 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -69,6 +69,12 @@ permissions: users: [] allowed_permissions: [create, read, update, delete] uri: /v1.0/tasks/* + service-tasks: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /v1.0/service-tasks + # read all for everybody read-all-process-groups: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 77fe594c..f780a97d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -158,9 +158,9 @@ def modify_process_model_id(process_model_id: str) -> str: return process_model_id.replace("/", ":") -def un_modify_modified_process_model_id(modified_process_model_id: str) -> str: +def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: """Un_modify_modified_process_model_id.""" - return modified_process_model_id.replace(":", "/") + return modified_process_model_identifier.replace(":", "/") def process_group_add(body: dict) -> flask.wrappers.Response: @@ -411,9 +411,9 @@ def process_list() -> Any: return SpecReferenceSchema(many=True).dump(references) -def get_file(modified_process_model_id: str, file_name: str) -> Any: +def get_file(modified_process_model_identifier: str, file_name: str) -> Any: """Get_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) files = SpecFileService.get_files(process_model, file_name) if len(files) == 0: @@ -433,10 +433,10 @@ def get_file(modified_process_model_id: str, file_name: str) -> Any: def process_model_file_update( - modified_process_model_id: str, file_name: str + modified_process_model_identifier: str, file_name: str ) -> flask.wrappers.Response: """Process_model_file_update.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) request_file = get_file_from_request() @@ -462,10 +462,10 @@ def process_model_file_update( def process_model_file_delete( - modified_process_model_id: str, file_name: str + modified_process_model_identifier: str, file_name: str ) -> flask.wrappers.Response: """Process_model_file_delete.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) try: SpecFileService.delete_file(process_model, file_name) @@ -481,9 +481,9 @@ def process_model_file_delete( return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def add_file(modified_process_model_id: str) -> flask.wrappers.Response: +def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response: """Add_file.""" - process_model_identifier = modified_process_model_id.replace(":", "/") + process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model = get_process_model(process_model_identifier) request_file = get_file_from_request() if not request_file.filename: @@ -504,10 +504,12 @@ def add_file(modified_process_model_id: str) -> flask.wrappers.Response: ) -def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response: +def process_instance_create( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: """Create_process_instance.""" process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_id + modified_process_model_identifier ) process_instance = ( ProcessInstanceService.create_process_instance_from_process_model_identifier( @@ -1075,7 +1077,9 @@ def process_instance_show( return make_response(jsonify(process_instance), 200) -def process_instance_delete(process_instance_id: int, modified_process_model_identifier: str) -> flask.wrappers.Response: +def process_instance_delete( + process_instance_id: int, modified_process_model_identifier: str +) -> flask.wrappers.Response: """Create_process_instance.""" process_instance = find_process_instance_by_id_or_raise(process_instance_id) @@ -1157,8 +1161,8 @@ def process_instance_report_delete( return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") -def service_tasks_show() -> flask.wrappers.Response: - """Service_tasks_show.""" +def service_task_list() -> flask.wrappers.Response: + """Service_task_list.""" available_connectors = ServiceTaskService.available_connectors() return Response( json.dumps(available_connectors), status=200, mimetype="application/json" @@ -1365,7 +1369,7 @@ def get_tasks( def process_instance_task_list( - modified_process_model_id: str, + modified_process_model_identifier: str, process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0, @@ -1926,7 +1930,12 @@ def _update_form_schema_with_task_data_as_needed( _update_form_schema_with_task_data_as_needed(o, task_data) -def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response: +def update_task_data( + process_instance_id: str, + modified_process_model_identifier: str, + task_id: str, + body: Dict, +) -> Response: """Update task data.""" process_instance = ProcessInstanceModel.query.filter( ProcessInstanceModel.id == int(process_instance_id) diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 481f8d6f..3e17ce81 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -81,7 +81,7 @@ export default function ProcessInstanceShow() { setTasksCallHadError(true); }; HttpService.makeCallToBackend({ - path: `/process-models/${modifiedProcessModelId}/process-instances/${params.process_instance_id}`, + path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}`, successCallback: setProcessInstance, }); let taskParams = '?all_tasks=true'; @@ -179,7 +179,7 @@ export default function ProcessInstanceShow() { { HttpService.makeCallToBackend({ - path: `/service_tasks`, + path: `/service-tasks`, successCallback: makeApiHandler(event), }); }; From cc547ed441a97a78d03f3b9b4b51a5e2d6fe7284 Mon Sep 17 00:00:00 2001 From: jasquat Date: Mon, 5 Dec 2022 16:49:27 -0500 Subject: [PATCH 38/40] make the frontend uris match the api calls better w/ burnettk --- .../src/components/ProcessInstanceListTable.tsx | 2 +- .../src/components/ProcessInstanceRun.tsx | 8 ++++---- .../src/components/ProcessModelListTiles.tsx | 4 ++-- .../src/components/TasksForMyOpenProcesses.tsx | 2 +- .../src/components/TasksWaitingForMe.tsx | 2 +- .../src/components/TasksWaitingForMyGroups.tsx | 2 +- .../src/hooks/UriListForPermissions.tsx | 3 ++- spiffworkflow-frontend/src/routes/AdminRoutes.tsx | 6 +++--- spiffworkflow-frontend/src/routes/MessageInstanceList.tsx | 8 ++++---- spiffworkflow-frontend/src/routes/MyTasks.tsx | 6 +++--- .../src/routes/ProcessInstanceLogList.tsx | 2 +- spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx | 2 +- spiffworkflow-frontend/src/routes/ProcessModelShow.tsx | 6 +++--- 13 files changed, 27 insertions(+), 26 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 621c595c..1bc0fdea 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1059,7 +1059,7 @@ export default function ProcessInstanceListTable({ return ( {id} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx index dafe20d5..05b643da 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceRun.tsx @@ -83,9 +83,9 @@ export default function ProcessInstanceRun({ processModel.id ); - const processInstanceActionPath = `/v1.0/process-instances/${modifiedProcessModelId}`; + const processInstanceCreatePath = `/v1.0/process-instances/${modifiedProcessModelId}`; let permissionRequestData: PermissionsToCheck = { - [processInstanceActionPath]: ['POST'], + [processInstanceCreatePath]: ['POST'], }; if (!checkPermissions) { @@ -117,14 +117,14 @@ export default function ProcessInstanceRun({ const processInstanceCreateAndRun = () => { HttpService.makeCallToBackend({ - path: processInstanceActionPath, + path: processInstanceCreatePath, successCallback: processModelRun, httpMethod: 'POST', }); }; if (checkPermissions) { return ( - + diff --git a/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx b/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx index 4787fe94..1412635c 100644 --- a/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx +++ b/spiffworkflow-frontend/src/components/ProcessModelListTiles.tsx @@ -54,9 +54,9 @@ export default function ProcessModelListTiles({

Process Instance {processInstance.id} kicked off ( view diff --git a/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx b/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx index a81779c7..deb2030e 100644 --- a/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx +++ b/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx @@ -55,7 +55,7 @@ export default function MyOpenProcesses() {

{rowToUse.process_instance_id} diff --git a/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx b/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx index 92420224..7d06b7a3 100644 --- a/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx +++ b/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx @@ -47,7 +47,7 @@ export default function TasksWaitingForMe() { {rowToUse.process_instance_id} diff --git a/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx b/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx index 51c38e94..565cd4a5 100644 --- a/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx +++ b/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx @@ -55,7 +55,7 @@ export default function TasksWaitingForMyGroups() { {rowToUse.process_instance_id} diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index 67958723..eff30a82 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -9,7 +9,8 @@ export const useUriListForPermissions = () => { messageInstanceListPath: '/v1.0/messages', processGroupListPath: '/v1.0/process-groups', processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, - processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}`, + processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, + processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`, processInstanceListPath: '/v1.0/process-instances', processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index 91ae7ab0..da6cae35 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -71,11 +71,11 @@ export default function AdminRoutes() { element={} /> } /> } /> } /> } /> } /> diff --git a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx index b77b744c..a9ec6b69 100644 --- a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx @@ -102,9 +102,9 @@ export default function MessageInstanceList() { {row.process_instance_id} @@ -163,9 +163,9 @@ export default function MessageInstanceList() { }, [ `Process Instance: ${searchParams.get('process_instance_id')}`, - `/admin/process-models/${searchParams.get( + `/admin/process-instances/${searchParams.get( 'process_model_id' - )}/process-instances/${searchParams.get('process_instance_id')}`, + )}/${searchParams.get('process_instance_id')}`, ], ['Messages'], ]} diff --git a/spiffworkflow-frontend/src/routes/MyTasks.tsx b/spiffworkflow-frontend/src/routes/MyTasks.tsx index 51f5f3e9..4c1cbc9b 100644 --- a/spiffworkflow-frontend/src/routes/MyTasks.tsx +++ b/spiffworkflow-frontend/src/routes/MyTasks.tsx @@ -55,9 +55,9 @@ export default function MyTasks() {

Process Instance {processInstance.id} kicked off ( view @@ -95,7 +95,7 @@ export default function MyTasks() {

{rowToUse.process_instance_id} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 7b09c89f..61420295 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -32,7 +32,7 @@ export default function ProcessInstanceLogList() { path: `${targetUris.processInstanceLogListPath}?per_page=${perPage}&page=${page}`, successCallback: setProcessInstanceLogListFromResult, }); - }, [searchParams, params]); + }, [searchParams, params, targetUris]); const buildTable = () => { const rows = processInstanceLogs.map((row) => { diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 3e17ce81..9a0495d1 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -273,7 +273,7 @@ export default function ProcessInstanceShow() { size="sm" className="button-white-background" data-qa="process-instance-log-list-link" - href={`/admin/process-models/${modifiedProcessModelId}/process-instances/${params.process_instance_id}/logs`} + href={`/admin/logs/${modifiedProcessModelId}/${params.process_instance_id}`} > Logs diff --git a/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx b/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx index 2882e183..96a65656 100644 --- a/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx @@ -66,7 +66,7 @@ export default function ProcessModelShow() { const permissionRequestData: PermissionsToCheck = { [targetUris.processModelShowPath]: ['PUT', 'DELETE'], [targetUris.processInstanceListPath]: ['GET'], - [targetUris.processInstanceActionPath]: ['POST'], + [targetUris.processInstanceCreatePath]: ['POST'], [targetUris.processModelFileCreatePath]: ['POST', 'PUT', 'GET', 'DELETE'], }; const { ability, permissionsLoaded } = usePermissionFetcher( @@ -95,7 +95,7 @@ export default function ProcessModelShow() {

Process Instance {processInstance.id} kicked off ( view @@ -556,7 +556,7 @@ export default function ProcessModelShow() { <> From b17f977e4c23a97039be5be1b59ccecd7dcd3e0e Mon Sep 17 00:00:00 2001 From: jasquat Date: Tue, 6 Dec 2022 09:46:32 -0500 Subject: [PATCH 39/40] get the columsn for the instance list table anytime filter options are displayed if empty --- .../components/ProcessInstanceListTable.tsx | 34 ++++++++++++------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 1bc0fdea..eee5a273 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -372,15 +372,18 @@ export default function ProcessInstanceListTable({ titleOperation = 'Created'; } return ( - + <> + +
+ ); } return null; @@ -935,6 +938,15 @@ export default function ProcessInstanceListTable({ if (!showFilterOptions) { return null; } + + // get the columns anytime we display the filter options if they are empty + if (availableReportColumns.length < 1) { + HttpService.makeCallToBackend({ + path: `/process-instances/reports/columns`, + successCallback: setAvailableReportColumns, + }); + } + return ( <> @@ -1134,10 +1146,6 @@ export default function ProcessInstanceListTable({ const toggleShowFilterOptions = () => { setShowFilterOptions(!showFilterOptions); - HttpService.makeCallToBackend({ - path: `/process-instances/reports/columns`, - successCallback: setAvailableReportColumns, - }); }; const reportSearchComponent = () => { From fff7661bc721828198766c161bae5dc80741141d Mon Sep 17 00:00:00 2001 From: burnettk Date: Tue, 6 Dec 2022 12:38:25 -0500 Subject: [PATCH 40/40] break process instance log list page into two tabs, simple and detailed --- .../src/spiffworkflow_backend/api.yml | 6 ++ .../routes/process_api_blueprint.py | 12 ++-- .../src/routes/ProcessInstanceLogList.tsx | 64 +++++++++++++++---- 3 files changed, 66 insertions(+), 16 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index e9da26b4..6141a861 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1342,6 +1342,12 @@ paths: description: The number of items to show per page. Defaults to page 10. schema: type: integer + - name: detailed + in: query + required: false + description: Show the detailed view, which includes all log entries + schema: + type: boolean get: tags: - Process Instances diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index f780a97d..549c76f0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -609,16 +609,20 @@ def process_instance_log_list( process_instance_id: int, page: int = 1, per_page: int = 100, + detailed: bool = False, ) -> flask.wrappers.Response: """Process_instance_log_list.""" # to make sure the process instance exists process_instance = find_process_instance_by_id_or_raise(process_instance_id) + log_query = SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + if not detailed: + log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore + logs = ( - SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) - .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore .join( UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True ) # isouter since if we don't have a user, we still want the log diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 61420295..37ef5519 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from 'react'; // @ts-ignore -import { Table } from '@carbon/react'; +import { Table, Tabs, TabList, Tab } from '@carbon/react'; import { useParams, useSearchParams, Link } from 'react-router-dom'; import PaginationForTable from '../components/PaginationForTable'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; @@ -14,13 +14,14 @@ import { useUriListForPermissions } from '../hooks/UriListForPermissions'; export default function ProcessInstanceLogList() { const params = useParams(); - const [searchParams] = useSearchParams(); + const [searchParams, setSearchParams] = useSearchParams(); const [processInstanceLogs, setProcessInstanceLogs] = useState([]); const [pagination, setPagination] = useState(null); const modifiedProcessModelId = modifyProcessIdentifierForPathParam( `${params.process_model_id}` ); const { targetUris } = useUriListForPermissions(); + const isDetailedView = searchParams.get('detailed') === 'true'; useEffect(() => { const setProcessInstanceLogListFromResult = (result: any) => { @@ -29,21 +30,31 @@ export default function ProcessInstanceLogList() { }; const { page, perPage } = getPageInfoFromSearchParams(searchParams); HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceLogListPath}?per_page=${perPage}&page=${page}`, + path: `${targetUris.processInstanceLogListPath}?per_page=${perPage}&page=${page}&detailed=${isDetailedView}`, successCallback: setProcessInstanceLogListFromResult, }); - }, [searchParams, params, targetUris]); + }, [ + searchParams, + params, + targetUris.processInstanceLogListPath, + isDetailedView, + ]); const buildTable = () => { const rows = processInstanceLogs.map((row) => { const rowToUse = row as any; return (

{rowToUse.bpmn_process_identifier}{rowToUse.id} {rowToUse.message}{rowToUse.bpmn_task_identifier} {rowToUse.bpmn_task_name}{rowToUse.bpmn_task_type}{rowToUse.bpmn_task_identifier}{rowToUse.bpmn_task_type}{rowToUse.bpmn_process_identifier}{rowToUse.username}
Bpmn Process IdentifierId MessageTask Identifier Task NameTask TypeTask IdentifierTask TypeBpmn Process IdentifierUser Timestamp
); }; + const selectedTabIndex = isDetailedView ? 1 : 0; if (pagination) { const { page, perPage } = getPageInfoFromSearchParams(searchParams); return ( -
+ <> + + + { + searchParams.set('detailed', 'false'); + setSearchParams(searchParams); + }} + > + Simple + + { + searchParams.set('detailed', 'true'); + setSearchParams(searchParams); + }} + > + Detailed + + + +
-
+ ); } return null;