From e4bf22f5cf27063c825a221f1146f655701d86df Mon Sep 17 00:00:00 2001 From: jasquat <2487833+jasquat@users.noreply.github.com> Date: Mon, 3 Jul 2023 01:24:54 -0400 Subject: [PATCH] Feature/interstitial summary part 3 (#367) * throw an error if backend gives a 500 from interstitial page w/ burnettk * turn unexpected errors for the interstitial page into event stream responses to avoid error misdirection * pyl * raise the api_error from the original and make the render_data method private * Feature/interstitial summary (#337) * * Process instance logs and messages are now components rather than pages, and are included within tabs on the process instance page, along with the diagram. * Removed the Zoom and Move modules when showing the readonly Diagram. Assured this readonly view is resized to fit the space when possible. * Checkbox Widget no longer displays a duplicate label. * CSS Tweaks * All pages are limited to a max display width of 1440, with auto margins to center the main content on the page. * "Show" pages, like ProcessInstanceShow, TaskShow have the primary content limited to 1000, also with auto-margins. * Paragraphs, headings, blockquotes, list items are limited to a width of 640. * Reduced margin bottom on all breadcrumbs. * Slightly reduced the width and margin of tiles * ordered lists and unordered lists show numbers and bullets now. * End user Instructions component can, optionally, auto-collapse, so that only a portion is displayed, along with a toggle. This is how it is set up for the ProcessInstanceShow page. * Greatly reduced the lag in the interstitial page when doing a re-direct. * run_pyl * kill console * wait for permissionsLoaded too since we are using ability.can * Previous change removed the top level Messages page - this re-adds it. * I am always, ALWAYS shocked at how I can not wrap my head around when and where to use "useEffect". This should cause the show/hide Instructions toggle to only show when useful. * Minor cleanup on the process instance properties display. * linting --------- Co-authored-by: burnettk * Fix a linting error. * minor permissions related fixes for the new ui. * Revert "Revert "Feature/better subworkflow management (#331)"" This reverts commit 48dcde8faf00241201c515b54444fe9fb373c7f4. * do not execute tasks from the process instance show page w/ burnettk * pyl w/ burnettk * Very minor UI tweak to resolve a number of issues mentioned in SPIFF-316 Notion Ticket: * Markdown links now open in a new window * Tables on the home pages are now contained within Grids to better align them with the other content that is in a grid. * Right aligned the "process instance list link" button to it is flush right with the table underneath * Gave a little more breathing room the content on the info/metadata in the process instance view . * updated docker image build action to contain the appropriate version info w/ burnettk * properly resize to the available columns depending on s/m/l --------- Co-authored-by: jasquat Co-authored-by: Dan Funk Co-authored-by: burnettk --- .../docker_image_for_main_builds.yml | 5 + docs/wish_list/wish_list.md | 76 +++- .../bin/execute_tasks_for_process_instance.py | 17 + spiffworkflow-backend/bin/login_with_users | 44 --- .../bin/wait_for_db_to_be_ready.py | 1 - spiffworkflow-backend/pyproject.toml | 1 + .../src/spiffworkflow_backend/api.yml | 6 + .../exceptions/api_error.py | 16 +- .../routes/tasks_controller.py | 107 ++++-- .../services/service_task_service.py | 1 + .../src/components/InstructionsForEndUser.tsx | 89 ++++- .../MessageInstanceList.tsx | 26 +- .../components/ProcessInstanceListTable.tsx | 29 +- .../ProcessInstanceLogList.tsx | 92 ++--- .../src/components/ProcessInterstitial.tsx | 117 ++++--- .../src/components/ReactDiagramEditor.tsx | 17 +- .../src/hooks/UriListForPermissions.tsx | 1 + spiffworkflow-frontend/src/index.css | 13 +- spiffworkflow-frontend/src/index.scss | 34 ++ .../CheckboxesWidget/CheckboxesWidget.tsx | 3 - .../src/routes/AdminRoutes.tsx | 15 +- .../src/routes/MessageListPage.tsx | 5 + .../src/routes/ProcessInstanceShow.tsx | 326 +++++++++--------- .../src/routes/ProcessInterstitialPage.tsx | 4 +- .../src/routes/ProcessModelShow.tsx | 12 +- .../src/routes/TaskShow.tsx | 4 +- 26 files changed, 611 insertions(+), 450 deletions(-) create mode 100644 spiffworkflow-backend/bin/execute_tasks_for_process_instance.py delete mode 100755 spiffworkflow-backend/bin/login_with_users rename spiffworkflow-frontend/src/{routes => components}/MessageInstanceList.tsx (90%) rename spiffworkflow-frontend/src/{routes => components}/ProcessInstanceLogList.tsx (86%) create mode 100644 spiffworkflow-frontend/src/routes/MessageListPage.tsx diff --git a/.github/workflows/docker_image_for_main_builds.yml b/.github/workflows/docker_image_for_main_builds.yml index f66a8441c..5054a0034 100644 --- a/.github/workflows/docker_image_for_main_builds.yml +++ b/.github/workflows/docker_image_for_main_builds.yml @@ -31,6 +31,7 @@ on: push: branches: - main + - spiffdemo jobs: create_frontend_docker_image: @@ -38,6 +39,7 @@ jobs: env: REGISTRY: ghcr.io IMAGE_NAME: sartography/spiffworkflow-frontend + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} permissions: contents: read packages: write @@ -61,6 +63,7 @@ jobs: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} labels: | org.opencontainers.image.description=Frontend component of SpiffWorkflow, a software development platform for building, running, and monitoring executable diagrams + org.opencontainers.image.version=${{ env.BRANCH_NAME }}-${{ steps.date.outputs.date }} tags: | type=ref,event=branch,suffix=-latest type=ref,event=branch,suffix=-${{ steps.date.outputs.date }} @@ -84,6 +87,7 @@ jobs: env: REGISTRY: ghcr.io IMAGE_NAME: sartography/spiffworkflow-backend + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} permissions: contents: read packages: write @@ -107,6 +111,7 @@ jobs: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} labels: | org.opencontainers.image.description=Backend component of SpiffWorkflow, a software development platform for building, running, and monitoring executable diagrams + org.opencontainers.image.version=${{ env.BRANCH_NAME }}-${{ steps.date.outputs.date }} tags: | type=ref,event=branch,suffix=-latest type=ref,event=branch,suffix=-${{ steps.date.outputs.date }} diff --git a/docs/wish_list/wish_list.md b/docs/wish_list/wish_list.md index 7cc4de076..10e4c655a 100644 --- a/docs/wish_list/wish_list.md +++ b/docs/wish_list/wish_list.md @@ -2,7 +2,41 @@ The follow is a list of enhancements we wish to do complete in the near (or even distant future) -## Performance Improvements +## Performance / System Improvements + +### Benchmarking / Performance Testing +Automated tests that assure our performance remains consistent as we add features and functionality. + +### Support Multiple Connector Proxies +Service Tasks have been a huge win, there are multiple reasons that supporting more than one Connector Proxy would be beneficial: + +1. Connect to several separately hosted services +2. Support multiple services written in multiple languages +3. Allow some connectors to be local (http get/post) vs remote (xero/coin gecko) +4. Could support non http based connectors (git interactions could be a workflow) + +### Interstitial Performance +push all processing to background so interstitial is just querying, not running (new item) + +### Authentication Keys +Provide an ability to access API endpoints using an access key - or authentication process that is specifically designed for API calls. (we currently rely on the grabbing the json token to do this, which is not a real solution) + +### Core BPMN features +There are a number of useful BPMN components that we do not currently support. We should evaluate these and determine which ones we should support and how we should support them. We should consider creating a list of unsuported items. + +* Compensation Events (valuable, but difficult) +* Conditional events. +* Event Sub-Processes are not currently supported (low-hanging fruit, easy to add) + +### Decentralized / Distributed Deployments +This is a broad topic and will be covered in a separate document. But consider a SpiffWorkflow implementation that is deployed across a cluster of systems - and manages transactions on a shared Block Chain implementation. Such a structure could assure compliance to a set of blessed BPMN diagrams. Such a system could support highly transparent and auditable processes that could drive a DAO based organization. + + +### Improve Parallel Processing +We should support the parallel execution of tasks within a single process whenever possible to do so. This is not as far-fetched or difficult as it may initially seem. While Python is notoriously bad at parallel execution (the lovely GIL) - we have already taken the most critical steps to assuring it is possible: +1. A team has demonstrated parallel execution using the cure SpiffWorkflow library. +2. We can keep a configurable number of "background" SpiffArena processes running that can pick up waiting tasks. +Given these things are already in place, we just need to lock processes at the task or branch level - so that ready tasks on parallel branches can be picked up by different background processes at the same time. ### BPMN Definitions at save time vs run time Improve performance by pre-processing the BPMN Specification and generating the internal JSON representation so we no longer incur the expense of doing this on a per-process basis. @@ -10,6 +44,12 @@ This will also allow us to do some early and deep validation as well. ## End User Experience +### UI Overview +We could really use a good UI / UX review of the site and take a stab at cleaning up the whole site to follow some consistent design patterns and resolve potential issues. + +### Customizable Home Page (non Status specific) +Allow some way to define custom landing pages that create different experiences for different organizations / needs. + ### Markdown rendering could be better 1. When creating a bulleted or numbered list, no bullets or numbers are displayed. This is a bug in our style sheets - or something that is clearing out all styles. 2. Limit the width of paragraphs to something reasonable. Having a line of text stretch across the entire screen is not a good experience. @@ -21,11 +61,34 @@ Allow defining contact information at the process group and process model level, This information could then be displayed when a process is in a non-functional state - such an error, suspended, or terminiated state. It might also be available in the footer or under a help icon when displaying a process instance. +### Process Heatmap +Allow administrators to see an overlay of a BPMN diagram that shows all the process instances in the system and where they are (20 people are waiting on approval, 15 are in the re-review .....) + ## Modeler Experience +### DMN Editor Sucks +Can we build a better DMN editor? Trisotech seems to do it very well. Would love to have a day or two just to research this area and see if there is just another open source project we can leverage, or if we could build our own tool. + +### Modeler Checker +At run time, or when you save it would be great if we could execute a: +* Validation Report - what is wrong with the model? Is it Valid BPMN? Are there intrinsic errors? +* Linting Report! Does the model follow common naming conventions, styles, are there dead-locks, etc. Many of these tools already exist, we just need to integrate them! + +### Plugins and Extensions +* Track down our previous research and add here. Color picker, etc.... + +### Automated Testing +Incorporate an end-to-end testing system that will allow you to quickly assure that +a bpmn model is working as expected. Imagine Cypress tests that you could define and execute in the modeler. + +### Json Schemas Everywhere! +Our forms are Json Schemas (a description of the data structure) - we could do similar things for Service Tasks, Script Tasks ... such that the modeler is at all times aware of what data is available - making it possible to build and execute a task as it is created. + ### Markdown Support for Process Groups and Models Allow us to define a markdown file for a process group or process model, which would be displayed in the process group or process model in the tile view, or at the top of the details page when a group or model is selected. +### Adding a unit test from within the script editor would be nice + ### Form Builder 1. Let's invest in a much better Form Builder experience, so that it is trivial to build new forms or modify existing simple forms. We don't want to implement everything here - but a simple builder would be very useful. 2. RJSF says it supports markdown in the headers, but it doesn't work fur us. @@ -38,15 +101,4 @@ Right now we allow editing the Display name of a model or group, but it does not change the name of the underlying directory, making it harder and harder over time to look at GitHub or the file system and find what you are seeing in the display. -## System Improvements - -### Support Multiple Connector Proxies -Service Tasks have been a huge win, there are multiple reasons that supporting more than one Connector Proxy would be beneficial: - -1. Connect to several separately hosted services -2. Support mulitple services written in multiple languages -3. Allow some connectors to be local (http get/post) vs remote (xero/coin gecko) -4. Could support non http based connectors (git interactions could be a workflow) - -### Improve Parallel Processing diff --git a/spiffworkflow-backend/bin/execute_tasks_for_process_instance.py b/spiffworkflow-backend/bin/execute_tasks_for_process_instance.py new file mode 100644 index 000000000..545676987 --- /dev/null +++ b/spiffworkflow-backend/bin/execute_tasks_for_process_instance.py @@ -0,0 +1,17 @@ +from spiffworkflow_backend import create_app +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService + + +def main() -> None: + app = create_app() + with app.app_context(): + execution_strategy_name = app.config["SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND"] + process_instance = ProcessInstanceModel.query.filter_by(id=29).first() + ProcessInstanceService.run_process_instance_with_processor( + process_instance, execution_strategy_name=execution_strategy_name + ) + + +if __name__ == "__main__": + main() diff --git a/spiffworkflow-backend/bin/login_with_users b/spiffworkflow-backend/bin/login_with_users deleted file mode 100755 index 3883f0465..000000000 --- a/spiffworkflow-backend/bin/login_with_users +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash - -function error_handler() { - >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." - exit "$2" -} -trap 'error_handler ${LINENO} $?' ERR -set -o errtrace -o errexit -o nounset -o pipefail - -script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" - -if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then - # export KEYCLOAK_BASE_URL=http://localhost:7002 - export KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org -fi -if [[ -z "${BACKEND_BASE_URL:-}" ]]; then - # export BACKEND_BASE_URL=http://localhost:7000 - export BACKEND_BASE_URL=https://api.dev.spiffworkflow.org -fi - -user_list="${1}" -if [[ -z "${1:-}" ]]; then - >&2 echo "usage: $(basename "$0") [user_list]" - exit 1 -fi -REALM_NAME=${2-spiffworkflow} - -while read -r input_line; do - if ! grep -qE '(^#|email)' <<<"$input_line" ; then - username=$(awk -F '@' '{print $1}' <<<"$input_line") - password=$(awk -F ',' '{print $2}' <<<"$input_line") - if [[ -z "$password" ]]; then - password="$username" - fi - access_token=$("${script_dir}/get_token" "$username" "$password" "$REALM_NAME" || echo '') - if [[ -z "$access_token" || "$access_token" == "null" ]]; then - >&2 echo "ERROR: failed to get access token for '$username'" - else - - echo "access_token: ${access_token}" - curl -v -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${access_token}" -H "Authorization: Bearer $access_token" - fi - fi -done <"$user_list" diff --git a/spiffworkflow-backend/bin/wait_for_db_to_be_ready.py b/spiffworkflow-backend/bin/wait_for_db_to_be_ready.py index e94532c3b..5c2955ddb 100644 --- a/spiffworkflow-backend/bin/wait_for_db_to_be_ready.py +++ b/spiffworkflow-backend/bin/wait_for_db_to_be_ready.py @@ -6,7 +6,6 @@ from spiffworkflow_backend.helpers.db_helper import try_to_connect def main() -> None: - """Main.""" app = create_app() start_time = time.time() with app.app_context(): diff --git a/spiffworkflow-backend/pyproject.toml b/spiffworkflow-backend/pyproject.toml index 5994128af..2e823a576 100644 --- a/spiffworkflow-backend/pyproject.toml +++ b/spiffworkflow-backend/pyproject.toml @@ -31,6 +31,7 @@ flask-simple-crypt = "^0.3.3" werkzeug = "*" SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} # SpiffWorkflow = {develop = true, path = "../../spiffworkflow/" } +# SpiffWorkflow = {develop = true, path = "../../SpiffWorkflow/" } sentry-sdk = "^1.10" # sphinx-autoapi = "^2.0" mysql-connector-python = "*" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 585c88b68..780e50810 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1819,6 +1819,12 @@ paths: description: The unique id of an existing process instance. schema: type: integer + - name: execute_tasks + in: query + required: false + description: Execute ready tasks on the process instance. + schema: + type: boolean get: tags: - Tasks diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py index 362bf3855..ee71e4530 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/exceptions/api_error.py @@ -50,6 +50,11 @@ class ApiError(Exception): task_name: str | None = "" task_trace: list | None = field(default_factory=list) + # these are useful if the error response cannot be json but has to be something else + # such as returning content type 'text/event-stream' for the interstitial page + response_headers: dict | None = None + response_message: str | None = None + def __str__(self) -> str: """Instructions to print instance as a string.""" msg = "ApiError: % s. " % self.message @@ -302,4 +307,13 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response: status_code=status_code, ) - return make_response(jsonify(api_exception), api_exception.status_code) + response_message = api_exception.response_message + if response_message is None: + response_message = jsonify(api_exception) + + error_response = make_response(response_message, api_exception.status_code) + if api_exception.response_headers is not None: + for header, value in api_exception.response_headers.items(): + error_response.headers[header] = value + + return error_response diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 8ee6ff7d7..44a954ceb 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -368,7 +368,9 @@ def _render_instructions_for_end_user(task_model: TaskModel, extensions: dict | return "" -def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[str, str | None, None]: +def _interstitial_stream( + process_instance: ProcessInstanceModel, execute_tasks: bool = True +) -> Generator[str, str | None, None]: def get_reportable_tasks() -> Any: return processor.bpmn_process_instance.get_tasks( TaskState.WAITING | TaskState.STARTED | TaskState.READY | TaskState.ERROR @@ -381,11 +383,6 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st extensions = TaskService.get_extensions_from_task_model(task_model) return _render_instructions_for_end_user(task_model, extensions) - def render_data(return_type: str, entity: ApiError | Task | ProcessInstanceModel) -> str: - return_hash: dict = {"type": return_type} - return_hash[return_type] = entity - return f"data: {current_app.json.dumps(return_hash)} \n\n" - processor = ProcessInstanceProcessor(process_instance) reported_ids = [] # A list of all the ids reported by this endpoint so far. tasks = get_reportable_tasks() @@ -399,28 +396,31 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st message=f"Failed to complete an automated task. Error was: {str(e)}", status_code=400, ) - yield render_data("error", api_error) + yield _render_data("error", api_error) raise e if instructions and spiff_task.id not in reported_ids: task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task) task.properties = {"instructionsForEndUser": instructions} - yield render_data("task", task) + yield _render_data("task", task) reported_ids.append(spiff_task.id) if spiff_task.state == TaskState.READY: # do not do any processing if the instance is not currently active if process_instance.status not in ProcessInstanceModel.active_statuses(): - yield render_data("unrunnable_instance", process_instance) - return - try: - processor.do_engine_steps(execution_strategy_name="one_at_a_time") - processor.do_engine_steps(execution_strategy_name="run_until_user_message") - processor.save() # Fixme - maybe find a way not to do this on every loop? - except WorkflowTaskException as wfe: - api_error = ApiError.from_workflow_exception( - "engine_steps_error", "Failed to complete an automated task.", exp=wfe - ) - yield render_data("error", api_error) + yield _render_data("unrunnable_instance", process_instance) return + if execute_tasks: + try: + processor.do_engine_steps(execution_strategy_name="one_at_a_time") + processor.do_engine_steps(execution_strategy_name="run_until_user_message") + processor.save() # Fixme - maybe find a way not to do this on every loop? + except WorkflowTaskException as wfe: + api_error = ApiError.from_workflow_exception( + "engine_steps_error", "Failed to complete an automated task.", exp=wfe + ) + yield _render_data("error", api_error) + return + if execute_tasks is False: + break processor.refresh_waiting_tasks() ready_engine_task_count = get_ready_engine_step_count(processor.bpmn_process_instance) tasks = get_reportable_tasks() @@ -439,34 +439,71 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st message=f"Failed to complete an automated task. Error was: {str(e)}", status_code=400, ) - yield render_data("error", api_error) + yield _render_data("error", api_error) raise e task.properties = {"instructionsForEndUser": instructions} - yield render_data("task", task) + yield _render_data("task", task) def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int: return len([t for t in bpmn_process_instance.get_tasks(TaskState.READY) if not t.task_spec.manual]) -def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[str | None, str | None, None]: - process_instance = _find_process_instance_by_id_or_raise(process_instance_id) +def _dequeued_interstitial_stream( + process_instance_id: int, execute_tasks: bool = True +) -> Generator[str | None, str | None, None]: + try: + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + ProcessInstanceProcessor(process_instance) - # TODO: currently this just redirects back to home if the process has not been started - # need something better to show? - - if not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance): - with ProcessInstanceQueueService.dequeued(process_instance): - yield from _interstitial_stream(process_instance) + # TODO: currently this just redirects back to home if the process has not been started + # need something better to show? + if execute_tasks: + if not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance): + with ProcessInstanceQueueService.dequeued(process_instance): + yield from _interstitial_stream(process_instance, execute_tasks=execute_tasks) + else: + # no reason to get a lock if we are reading only + yield from _interstitial_stream(process_instance, execute_tasks=execute_tasks) + except Exception as ex: + # the stream_with_context method seems to swallow exceptions so also attempt to catch errors here + api_error = ApiError( + error_code="interstitial_error", + message=( + f"Received error trying to run process instance: {process_instance_id}. " + f"Error was: {ex.__class__.__name__}: {str(ex)}" + ), + status_code=500, + ) + yield _render_data("error", api_error) -def interstitial(process_instance_id: int) -> Response: +def interstitial(process_instance_id: int, execute_tasks: bool = True) -> Response: """A Server Side Events Stream for watching the execution of engine tasks.""" - return Response( - stream_with_context(_dequeued_interstitial_stream(process_instance_id)), - mimetype="text/event-stream", - headers={"X-Accel-Buffering": "no"}, - ) + try: + return Response( + stream_with_context(_dequeued_interstitial_stream(process_instance_id, execute_tasks=execute_tasks)), + mimetype="text/event-stream", + headers={"X-Accel-Buffering": "no"}, + ) + except Exception as ex: + api_error = ApiError( + error_code="interstitial_error", + message=( + f"Received error trying to run process instance: {process_instance_id}. " + f"Error was: {ex.__class__.__name__}: {str(ex)}" + ), + status_code=500, + response_headers={"Content-type": "text/event-stream"}, + ) + api_error.response_message = _render_data("error", api_error) + raise api_error from ex + + +def _render_data(return_type: str, entity: ApiError | Task | ProcessInstanceModel) -> str: + return_hash: dict = {"type": return_type} + return_hash[return_type] = entity + return f"data: {current_app.json.dumps(return_hash)} \n\n" def task_save_draft( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index b5e1fd957..6bd199266 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -121,6 +121,7 @@ class ServiceTaskDelegate: error_response = parsed_response["error"] if isinstance(error_response, list | dict): error_response = json.dumps(parsed_response["error"]) + error += error_response if json_parse_error: error += "A critical component (The connector proxy) is not responding correctly." diff --git a/spiffworkflow-frontend/src/components/InstructionsForEndUser.tsx b/spiffworkflow-frontend/src/components/InstructionsForEndUser.tsx index 71e9a6561..ef4e243b9 100644 --- a/spiffworkflow-frontend/src/components/InstructionsForEndUser.tsx +++ b/spiffworkflow-frontend/src/components/InstructionsForEndUser.tsx @@ -1,19 +1,21 @@ -import React from 'react'; +import React, { useEffect, useState } from 'react'; // @ts-ignore import MDEditor from '@uiw/react-md-editor'; +import { Toggle } from '@carbon/react'; type OwnProps = { task: any; defaultMessage?: string; + allowCollapse?: boolean; }; export default function InstructionsForEndUser({ task, defaultMessage = '', + allowCollapse = false, }: OwnProps) { - if (!task) { - return null; - } + const [collapsed, setCollapsed] = useState(false); + const [collapsable, setCollapsable] = useState(false); let instructions = defaultMessage; let { properties } = task; if (!properties) { @@ -23,15 +25,80 @@ export default function InstructionsForEndUser({ if (instructionsForEndUser) { instructions = instructionsForEndUser; } + + const maxLineCount: number = 8; + const maxWordCount: number = 75; + + const lineCount = (arg: string) => { + return arg.split('\n').length; + }; + + const wordCount = (arg: string) => { + return arg.split(' ').length; + }; + + useEffect(() => { + if ( + allowCollapse && + (lineCount(instructions) >= maxLineCount || + wordCount(instructions) > maxWordCount) + ) { + setCollapsable(true); + setCollapsed(true); + } else { + setCollapsable(false); + setCollapsed(false); + } + }, [allowCollapse, instructions]); + + if (!task) { + return null; + } + + const toggleCollapse = () => { + setCollapsed(!collapsed); + }; + + const showCollapseToggle = () => { + if (collapsable) { + return ( + + ); + } + return null; + }; + + let instructionsShown = instructions; + if (collapsed) { + if (wordCount(instructions) > maxWordCount) { + instructionsShown = instructions + .split(' ') + .slice(0, maxWordCount) + .join(' '); + instructionsShown += '...'; + } else if (lineCount(instructions) > maxLineCount) { + instructionsShown = instructions.split('\n').slice(0, 5).join(' '); + instructionsShown += '...'; + } + } + return ( -
- {/* - https://www.npmjs.com/package/@uiw/react-md-editor switches to dark mode by default by respecting @media (prefers-color-scheme: dark) - This makes it look like our site is broken, so until the rest of the site supports dark mode, turn off dark mode for this component. - */} -
- +
+
+ {/* + https://www.npmjs.com/package/@uiw/react-md-editor switches to dark mode by default by respecting @media (prefers-color-scheme: dark) + This makes it look like our site is broken, so until the rest of the site supports dark mode, turn off dark mode for this component. + */} +
+ +
+ {showCollapseToggle()}
); } diff --git a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx b/spiffworkflow-frontend/src/components/MessageInstanceList.tsx similarity index 90% rename from spiffworkflow-frontend/src/routes/MessageInstanceList.tsx rename to spiffworkflow-frontend/src/components/MessageInstanceList.tsx index 67c47ff60..a9cc186a7 100644 --- a/spiffworkflow-frontend/src/routes/MessageInstanceList.tsx +++ b/spiffworkflow-frontend/src/components/MessageInstanceList.tsx @@ -3,23 +3,26 @@ import { useEffect, useState } from 'react'; import { ErrorOutline } from '@carbon/icons-react'; // @ts-ignore import { Table, Modal, Button } from '@carbon/react'; -import { Link, useParams, useSearchParams } from 'react-router-dom'; -import PaginationForTable from '../components/PaginationForTable'; -import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; +import { Link, useSearchParams } from 'react-router-dom'; +import PaginationForTable from './PaginationForTable'; +import ProcessBreadcrumb from './ProcessBreadcrumb'; import { convertSecondsToFormattedDateTime, getPageInfoFromSearchParams, modifyProcessIdentifierForPathParam, } from '../helpers'; import HttpService from '../services/HttpService'; -import { FormatProcessModelDisplayName } from '../components/MiniComponents'; +import { FormatProcessModelDisplayName } from './MiniComponents'; import { MessageInstance } from '../interfaces'; -export default function MessageInstanceList() { - const params = useParams(); - const [searchParams] = useSearchParams(); +type OwnProps = { + processInstanceId?: number; +}; + +export default function MessageInstanceList({ processInstanceId }: OwnProps) { const [messageIntances, setMessageInstances] = useState([]); const [pagination, setPagination] = useState(null); + const [searchParams] = useSearchParams(); const [messageInstanceForModal, setMessageInstanceForModal] = useState(null); @@ -31,16 +34,15 @@ export default function MessageInstanceList() { }; const { page, perPage } = getPageInfoFromSearchParams(searchParams); let queryParamString = `per_page=${perPage}&page=${page}`; - if (searchParams.get('process_instance_id')) { - queryParamString += `&process_instance_id=${searchParams.get( - 'process_instance_id' - )}`; + if (processInstanceId) { + queryParamString += `&process_instance_id=${processInstanceId}`; } + HttpService.makeCallToBackend({ path: `/messages?${queryParamString}`, successCallback: setMessageInstanceListFromResult, }); - }, [searchParams, params]); + }, [processInstanceId, searchParams]); const handleCorrelationDisplayClose = () => { setMessageInstanceForModal(null); diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index bb2d09b4b..41b0e4cb9 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -1705,6 +1705,7 @@ export default function ProcessInstanceListTable({ sm={{ span: 1, offset: 3 }} md={{ span: 1, offset: 7 }} lg={{ span: 1, offset: 15 }} + style={{ textAlign: 'right' }} > - - - - - {processInstance.process_metadata && - processInstance.process_metadata.length > 0 ? ( - - ) : null} - - - + {(processInstance.process_metadata || []).map( + (processInstanceMetadata) => ( + + + {processInstanceMetadata.key}: + + + {processInstanceMetadata.value} + + + ) + )} ); }; @@ -943,41 +910,6 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - const processInstanceMetadataArea = () => { - if ( - !processInstance || - (processInstance.process_metadata && - processInstance.process_metadata.length < 1) - ) { - return null; - } - const metadataComponents: any[] = []; - (processInstance.process_metadata || []).forEach( - (processInstanceMetadata: ProcessInstanceMetadata) => { - metadataComponents.push( - - - {processInstanceMetadata.key} - - - {processInstanceMetadata.value} - - - ); - } - ); - return ( - setShowProcessInstanceMetadata(false)} - > - {metadataComponents} - - ); - }; - const taskUpdateDisplayArea = () => { if (!taskToDisplay) { return null; @@ -1094,76 +1026,126 @@ export default function ProcessInstanceShow({ variant }: OwnProps) { ); }; - if (processInstance && (tasks || tasksCallHadError)) { + if (processInstance && (tasks || tasksCallHadError) && permissionsLoaded) { const processModelId = unModifyProcessIdentifierForPathParam( params.process_model_id ? params.process_model_id : '' ); + const getTabs = () => { + const canViewLogs = ability.can( + 'GET', + targetUris.processInstanceLogListPath + ); + const canViewMsgs = ability.can( + 'GET', + targetUris.messageInstanceListPath + ); + + const getMessageDisplay = () => { + if (canViewMsgs) { + return ; + } + return null; + }; + + return ( + + + Diagram + Milestones + Events + Messages + + + + +
+ + + + + + + + {getMessageDisplay()} + + + ); + }; + return ( <> - - -

- Process Instance Id: {processInstance.id} -

- {buttonIcons()} -
- -
-
- - - - - - {getInfoTag()} -
- {taskUpdateDisplayArea()} - {processDataDisplayArea()} - {processInstanceMetadataArea()} -
- {viewMostRecentStateComponent()} - - -
+
+ + +

+ Process Instance Id: {processInstance.id} +

+ {buttonIcons()} +
+ {getInfoTag()} + + + + + + + {taskUpdateDisplayArea()} + {processDataDisplayArea()} +
+ {viewMostRecentStateComponent()} +
+ {getTabs()} ); } diff --git a/spiffworkflow-frontend/src/routes/ProcessInterstitialPage.tsx b/spiffworkflow-frontend/src/routes/ProcessInterstitialPage.tsx index 7d633103d..7cd9b1ac3 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInterstitialPage.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInterstitialPage.tsx @@ -16,7 +16,7 @@ export default function ProcessInterstitialPage({ variant }: OwnProps) { } return ( - <> +
- +
); } diff --git a/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx b/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx index f51c5085e..605d58d4e 100644 --- a/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessModelShow.tsx @@ -70,7 +70,7 @@ export default function ProcessModelShow() { [targetUris.processModelShowPath]: ['PUT', 'DELETE'], [targetUris.processModelTestsPath]: ['POST'], [targetUris.processModelPublishPath]: ['POST'], - [targetUris.processInstanceListPath]: ['GET'], + [targetUris.processInstanceListForMePath]: ['POST'], [targetUris.processInstanceCreatePath]: ['POST'], [targetUris.processModelFileCreatePath]: ['POST', 'PUT', 'GET', 'DELETE'], }; @@ -616,7 +616,7 @@ export default function ProcessModelShow() { if (processModel) { return ( - <> +
{fileUploadModal()} {confirmOverwriteFileDialog()} {processModelFilesSection()} - + My Process Instances} filtersEnabled={false} @@ -714,7 +718,7 @@ export default function ProcessModelShow() { /> - +
); } return null; diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index b7b13aba3..999162dbe 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -502,7 +502,7 @@ export default function TaskShow() { } return ( -
+
{formElement()} -
+
); }