diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock
index c65195356..707c5b3c3 100644
--- a/spiffworkflow-backend/poetry.lock
+++ b/spiffworkflow-backend/poetry.lock
@@ -2989,7 +2989,18 @@ psycopg2 = [
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
]
pyasn1 = [
+ {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
+ {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
+ {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
+ {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
+ {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
+ {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
+ {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
+ {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
+ {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
+ {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
+ {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pycodestyle = [
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
index 9aaa1ddbd..65010a01b 100755
--- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml
@@ -1049,6 +1049,39 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
+ /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified process model id
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of an existing process instance.
+ schema:
+ type: integer
+ - name: spiff_step
+ in: query
+ required: false
+ description: Reset the process to this state
+ schema:
+ type: integer
+ post:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_reset
+ summary: Reset a process instance to an earlier step
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Empty ok true response on successful resume.
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OkTrue"
+
/process-instances/reports:
parameters:
- name: page
@@ -1472,6 +1505,66 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
+ /send-event/{modified_process_model_identifier}/{process_instance_id}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of the process instance
+ schema:
+ type: string
+ post:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.send_bpmn_event
+ summary: Send a BPMN event to the process
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Event Sent Successfully
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
+ /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
+ parameters:
+ - name: modified_process_model_identifier
+ in: path
+ required: true
+ description: The modified id of an existing process model
+ schema:
+ type: string
+ - name: process_instance_id
+ in: path
+ required: true
+ description: The unique id of the process instance
+ schema:
+ type: string
+ - name: task_id
+ in: path
+ required: true
+ description: The unique id of the task.
+ schema:
+ type: string
+ post:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task
+ summary: Mark a task complete without executing it
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Event Sent Successfully
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/Workflow"
+
/service-tasks:
get:
tags:
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py
index 60deda842..896c79e13 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py
@@ -118,6 +118,7 @@ class Task:
form_schema: Union[str, None] = None,
form_ui_schema: Union[str, None] = None,
parent: Optional[str] = None,
+ event_definition: Union[dict[str, Any], None] = None,
call_activity_process_identifier: Optional[str] = None,
):
"""__init__."""
@@ -130,6 +131,7 @@ class Task:
self.documentation = documentation
self.lane = lane
self.parent = parent
+ self.event_definition = event_definition
self.call_activity_process_identifier = call_activity_process_identifier
self.data = data
@@ -189,6 +191,7 @@ class Task:
"form_schema": self.form_schema,
"form_ui_schema": self.form_ui_schema,
"parent": self.parent,
+ "event_definition": self.event_definition,
"call_activity_process_identifier": self.call_activity_process_identifier,
}
@@ -290,6 +293,7 @@ class TaskSchema(Schema):
"process_instance_id",
"form_schema",
"form_ui_schema",
+ "event_definition",
]
multi_instance_type = EnumField(MultiInstanceType)
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py
index 6518b7cf8..7ad041d23 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py
@@ -26,11 +26,15 @@ from spiffworkflow_backend.models.process_instance import (
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
+from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
+from spiffworkflow_backend.services.process_instance_service import (
+ ProcessInstanceService,
+)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@@ -87,6 +91,46 @@ def process_list() -> Any:
return SpecReferenceSchema(many=True).dump(references)
+def process_instance_reset(
+ process_instance_id: int,
+ modified_process_model_identifier: str,
+ spiff_step: int = 0,
+) -> flask.wrappers.Response:
+ """Process_instance_reset."""
+ process_instance = ProcessInstanceService().get_process_instance(
+ process_instance_id
+ )
+ step_detail = (
+ db.session.query(SpiffStepDetailsModel)
+ .filter(
+ SpiffStepDetailsModel.process_instance_id == process_instance.id,
+ SpiffStepDetailsModel.spiff_step == spiff_step,
+ )
+ .first()
+ )
+ if step_detail is not None and process_instance.bpmn_json is not None:
+ bpmn_json = json.loads(process_instance.bpmn_json)
+ bpmn_json["tasks"] = step_detail.task_json["tasks"]
+ bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
+ process_instance.bpmn_json = json.dumps(bpmn_json)
+
+ db.session.add(process_instance)
+ try:
+ db.session.commit()
+ except Exception as e:
+ db.session.rollback()
+ raise ApiError(
+ error_code="reset_process_instance_error",
+ message=f"Could not update the Instance. Original error is {e}",
+ ) from e
+
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
+ )
+
+
def process_data_show(
process_instance_id: int,
process_data_identifier: str,
@@ -195,6 +239,107 @@ def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any])
return return_value
+def update_task_data(
+ process_instance_id: str,
+ modified_process_model_identifier: str,
+ task_id: str,
+ body: Dict,
+) -> Response:
+ """Update task data."""
+ process_instance = ProcessInstanceModel.query.filter(
+ ProcessInstanceModel.id == int(process_instance_id)
+ ).first()
+ if process_instance:
+ if process_instance.status != "suspended":
+ raise ProcessInstanceTaskDataCannotBeUpdatedError(
+ f"The process instance needs to be suspended to udpate the task-data. It is currently: {process_instance.status}"
+ )
+
+ process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json)
+ if "new_task_data" in body:
+ new_task_data_str: str = body["new_task_data"]
+ new_task_data_dict = json.loads(new_task_data_str)
+ if task_id in process_instance_bpmn_json_dict["tasks"]:
+ process_instance_bpmn_json_dict["tasks"][task_id][
+ "data"
+ ] = new_task_data_dict
+ process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict)
+ db.session.add(process_instance)
+ try:
+ db.session.commit()
+ except Exception as e:
+ db.session.rollback()
+ raise ApiError(
+ error_code="update_task_data_error",
+ message=f"Could not update the Instance. Original error is {e}",
+ ) from e
+ else:
+ raise ApiError(
+ error_code="update_task_data_error",
+ message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.",
+ )
+ else:
+ raise ApiError(
+ error_code="update_task_data_error",
+ message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.",
+ )
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
+ )
+
+
+def send_bpmn_event(
+ modified_process_model_identifier: str,
+ process_instance_id: str,
+ body: Dict,
+) -> Response:
+ """Send a bpmn event to a workflow."""
+ process_instance = ProcessInstanceModel.query.filter(
+ ProcessInstanceModel.id == int(process_instance_id)
+ ).first()
+ if process_instance:
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.send_bpmn_event(body)
+ else:
+ raise ApiError(
+ error_code="send_bpmn_event_error",
+ message=f"Could not send event to Instance: {process_instance_id}",
+ )
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
+ )
+
+
+def manual_complete_task(
+ modified_process_model_identifier: str,
+ process_instance_id: str,
+ task_id: str,
+ body: Dict,
+) -> Response:
+ """Mark a task complete without executing it."""
+ execute = body.get("execute", True)
+ process_instance = ProcessInstanceModel.query.filter(
+ ProcessInstanceModel.id == int(process_instance_id)
+ ).first()
+ if process_instance:
+ processor = ProcessInstanceProcessor(process_instance)
+ processor.manual_complete_task(task_id, execute)
+ else:
+ raise ApiError(
+ error_code="complete_task",
+ message=f"Could not complete Task {task_id} in Instance {process_instance_id}",
+ )
+ return Response(
+ json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
+ status=200,
+ mimetype="application/json",
+ )
+
+
def _commit_and_push_to_git(message: str) -> None:
"""Commit_and_push_to_git."""
if current_app.config["GIT_COMMIT_ON_SAVE"]:
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
index 4f13f46ea..b93bcecd2 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py
@@ -531,7 +531,7 @@ def process_instance_task_list(
step_detail = (
db.session.query(SpiffStepDetailsModel)
.filter(
- SpiffStepDetailsModel.process_instance.id == process_instance.id,
+ SpiffStepDetailsModel.process_instance_id == process_instance.id,
SpiffStepDetailsModel.spiff_step == spiff_step,
)
.first()
@@ -552,7 +552,7 @@ def process_instance_task_list(
tasks = []
for spiff_task in spiff_tasks:
- task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
+ task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
if get_task_data:
task.data = spiff_task.data
tasks.append(task)
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
index 6d0c9cca4..c80da91f3 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
@@ -158,7 +158,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
form_schema_file_name = properties["formJsonSchemaFilename"]
if "formUiSchemaFilename" in properties:
form_ui_schema_file_name = properties["formUiSchemaFilename"]
- task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
+ processor = ProcessInstanceProcessor(process_instance)
+ task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
task.data = spiff_task.data
task.process_model_display_name = process_model.display_name
task.process_model_identifier = process_model.id
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
index 1e548a766..f9e6c6207 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py
@@ -17,6 +17,7 @@ from typing import Optional
from typing import Tuple
from typing import TypedDict
from typing import Union
+from uuid import UUID
import dateparser
import pytz
@@ -43,6 +44,9 @@ from SpiffWorkflow.spiff.serializer.task_spec_converters import (
CallActivityTaskConverter,
)
from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
+from SpiffWorkflow.spiff.serializer.task_spec_converters import (
+ EventBasedGatewayConverter,
+)
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
IntermediateCatchEventConverter,
)
@@ -265,6 +269,7 @@ class ProcessInstanceProcessor:
EndEventConverter,
IntermediateCatchEventConverter,
IntermediateThrowEventConverter,
+ EventBasedGatewayConverter,
ManualTaskConverter,
NoneTaskConverter,
ReceiveTaskConverter,
@@ -278,6 +283,7 @@ class ProcessInstanceProcessor:
]
)
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
+ _event_serializer = EventBasedGatewayConverter()
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
VALIDATION_PROCESS_KEY = "validate_only"
@@ -616,7 +622,7 @@ class ProcessInstanceProcessor:
db.session.add(pim)
db.session.commit()
- def save(self) -> None:
+ def _save(self) -> None:
"""Saves the current state of this processor to the database."""
self.process_instance_model.bpmn_json = self.serialize()
@@ -638,6 +644,9 @@ class ProcessInstanceProcessor:
db.session.add(self.process_instance_model)
db.session.commit()
+ def save(self) -> None:
+ """Saves the current state and moves on to the next state."""
+ self._save()
human_tasks = HumanTaskModel.query.filter_by(
process_instance_id=self.process_instance_model.id
).all()
@@ -706,6 +715,44 @@ class ProcessInstanceProcessor:
db.session.add(at)
db.session.commit()
+ def serialize_task_spec(self, task_spec: SpiffTask) -> Any:
+ """Get a serialized version of a task spec."""
+ # The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask
+ # Not sure why mypy accepts this but whatever.
+ return self._serializer.spec_converter.convert(task_spec)
+
+ def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
+ """Send an event to the workflow."""
+ payload = event_data.pop("payload", None)
+ event_definition = self._event_serializer.restore(event_data)
+ if payload is not None:
+ event_definition.payload = payload
+ current_app.logger.info(
+ f"Event of type {event_definition.event_type} sent to process instance {self.process_instance_model.id}"
+ )
+ self.bpmn_process_instance.catch(event_definition)
+ self.do_engine_steps(save=True)
+
+ def manual_complete_task(self, task_id: str, execute: bool) -> None:
+ """Mark the task complete optionally executing it."""
+ spiff_task = self.bpmn_process_instance.get_task(UUID(task_id))
+ if execute:
+ current_app.logger.info(
+ f"Manually executing Task {spiff_task.task_spec.name} of process instance {self.process_instance_model.id}"
+ )
+ spiff_task.complete()
+ else:
+ current_app.logger.info(
+ f"Skipping Task {spiff_task.task_spec.name} of process instance {self.process_instance_model.id}"
+ )
+ spiff_task._set_state(TaskState.COMPLETED)
+ for child in spiff_task.children:
+ child.task_spec._update(child)
+ self.bpmn_process_instance.last_task = spiff_task
+ self._save()
+ # Saving the workflow seems to reset the status
+ self.suspend()
+
@staticmethod
def get_parser() -> MyCustomParser:
"""Get_parser."""
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py
index 0dec5e44d..c97222893 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py
@@ -125,7 +125,7 @@ class ProcessInstanceService:
if next_task_trying_again is not None:
process_instance_api.next_task = (
ProcessInstanceService.spiff_task_to_api_task(
- next_task_trying_again, add_docs_and_forms=True
+ processor, next_task_trying_again, add_docs_and_forms=True
)
)
@@ -281,7 +281,9 @@ class ProcessInstanceService:
@staticmethod
def spiff_task_to_api_task(
- spiff_task: SpiffTask, add_docs_and_forms: bool = False
+ processor: ProcessInstanceProcessor,
+ spiff_task: SpiffTask,
+ add_docs_and_forms: bool = False,
) -> Task:
"""Spiff_task_to_api_task."""
task_type = spiff_task.task_spec.spec_type
@@ -315,6 +317,8 @@ class ProcessInstanceService:
if spiff_task.parent:
parent_id = spiff_task.parent.id
+ serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec)
+
task = Task(
spiff_task.id,
spiff_task.task_spec.name,
@@ -328,6 +332,7 @@ class ProcessInstanceService:
process_identifier=spiff_task.task_spec._wf_spec.name,
properties=props,
parent=parent_id,
+ event_definition=serialized_task_spec.get("event_definition"),
call_activity_process_identifier=call_activity_process_identifier,
)
diff --git a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn
new file mode 100644
index 000000000..9f2f26bf4
--- /dev/null
+++ b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn
@@ -0,0 +1,137 @@
+
+
+
+
+
+
+
+ Flow_1l15rbh
+
+
+
+ Flow_1l15rbh
+ Flow_0d35i06
+ Flow_0tzaigt
+ Flow_1vld4r2
+
+
+
+ Flow_0d35i06
+ Flow_1w3n49n
+
+
+
+ Flow_0tzaigt
+ Flow_1q47ol8
+
+
+
+
+
+
+ Flow_1q47ol8
+
+
+
+
+ Flow_1w3n49n
+
+
+
+ Flow_1vld4r2
+ Flow_13ai5vv
+
+ timedelta(hours=1)
+
+
+
+
+ Click the button.
+
+ Flow_13ai5vv
+ Flow_1vwnf3n
+
+
+ Flow_1vwnf3n
+
+
+
+
+ result
+
+
+
+
+ result
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py
index abcc4715e..b9c0f1b72 100644
--- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py
+++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py
@@ -4,6 +4,7 @@ import json
import os
import time
from typing import Any
+from typing import Dict
import pytest
from flask.app import Flask
@@ -2537,6 +2538,148 @@ class TestProcessApi(BaseTest):
print("test_script_unit_test_run")
+ def test_send_event(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_script_unit_test_run."""
+ process_group_id = "test_group"
+ process_model_id = "process_navigation"
+ bpmn_file_name = "process_navigation.bpmn"
+ bpmn_file_location = "process_navigation"
+ process_model_identifier = self.create_group_and_model_with_bpmn(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
+
+ headers = self.logged_in_headers(with_super_admin_user)
+ response = self.create_process_instance_from_process_model_id_with_api(
+ client, process_model_identifier, headers
+ )
+ process_instance_id = response.json["id"]
+
+ client.post(
+ f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ # This is exactly the same the test above, but some reason I to a totally irrelevant type.
+ data: Dict = {
+ "correlation_properties": [],
+ "expression": None,
+ "external": True,
+ "internal": False,
+ "payload": {"message": "message 1"},
+ "name": "Message 1",
+ "typename": "MessageEventDefinition",
+ }
+ response = client.post(
+ f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(data),
+ )
+ assert response.json["status"] == "complete"
+
+ response = client.get(
+ f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?all_tasks=true",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert response.status_code == 200
+ end = next(task for task in response.json if task["name"] == "End")
+ assert end["data"]["result"] == {"message": "message 1"}
+
+ def test_manual_complete_task(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_script_unit_test_run."""
+ process_group_id = "test_group"
+ process_model_id = "process_navigation"
+ bpmn_file_name = "process_navigation.bpmn"
+ bpmn_file_location = "process_navigation"
+ process_model_identifier = self.create_group_and_model_with_bpmn(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
+
+ headers = self.logged_in_headers(with_super_admin_user)
+ response = self.create_process_instance_from_process_model_id_with_api(
+ client, process_model_identifier, headers
+ )
+ process_instance_id = response.json["id"]
+
+ client.post(
+ f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ data = {
+ "dateTime": "timedelta(hours=1)",
+ "external": True,
+ "internal": True,
+ "label": "Event_0e4owa3",
+ "typename": "TimerEventDefinition",
+ }
+ response = client.post(
+ f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(data),
+ )
+
+ response = client.get(
+ f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ assert len(response.json) == 1
+ task = response.json[0]
+
+ response = client.post(
+ f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ )
+ assert response.json["status"] == "suspended"
+
def setup_initial_groups_for_move_tests(
self, client: FlaskClient, with_super_admin_user: UserModel
) -> None:
diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx
index 574eb4e9e..f8e5f07f8 100644
--- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx
+++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx
@@ -16,7 +16,10 @@ export const useUriListForPermissions = () => {
processInstanceReportListPath: '/v1.0/process-instances/reports',
processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`,
processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`,
+ processInstanceResetPath: `/v1.0/process-instance-reset/${params.process_model_id}/${params.process_instance_id}`,
processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`,
+ processInstanceSendEventPath: `/v1.0/send-event/${params.process_model_id}/${params.process_instance_id}`,
+ processInstanceCompleteTaskPath: `/v1.0/complete-task/${params.process_model_id}/${params.process_instance_id}`,
processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`,
processInstanceTaskListForMePath: `/v1.0/process-instances/for-me/${params.process_model_id}/${params.process_instance_id}/task-info`,
processInstanceTerminatePath: `/v1.0/process-instance-terminate/${params.process_model_id}/${params.process_instance_id}`,
diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx
index 6047393c5..678ebdf2a 100644
--- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx
+++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx
@@ -25,6 +25,7 @@ import {
ButtonSet,
Tag,
Modal,
+ Dropdown,
Stack,
// @ts-ignore
} from '@carbon/react';
@@ -66,6 +67,11 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
const [processDataToDisplay, setProcessDataToDisplay] =
useState(null);
const [editingTaskData, setEditingTaskData] = useState(false);
+ const [selectingEvent, setSelectingEvent] = useState(false);
+ const [eventToSend, setEventToSend] = useState({});
+ const [eventPayload, setEventPayload] = useState('{}');
+ const [eventTextEditorEnabled, setEventTextEditorEnabled] =
+ useState(false);
const setErrorObject = (useContext as any)(ErrorContext)[1];
@@ -84,10 +90,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
[`${targetUris.processInstanceResumePath}`]: ['POST'],
[`${targetUris.processInstanceSuspendPath}`]: ['POST'],
[`${targetUris.processInstanceTerminatePath}`]: ['POST'],
+ [targetUris.processInstanceResetPath]: ['POST'],
[targetUris.messageInstanceListPath]: ['GET'],
[targetUris.processInstanceActionPath]: ['DELETE'],
[targetUris.processInstanceLogListPath]: ['GET'],
[targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'],
+ [targetUris.processInstanceSendEventPath]: ['POST'],
+ [targetUris.processInstanceCompleteTaskPath]: ['POST'],
[targetUris.processModelShowPath]: ['PUT'],
[taskListPath]: ['GET'],
};
@@ -253,6 +262,14 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
return spiffStepLink(, 1);
};
+ const resetProcessInstance = () => {
+ HttpService.makeCallToBackend({
+ path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`,
+ successCallback: refreshPage,
+ httpMethod: 'POST',
+ });
+ };
+
const getInfoTag = () => {
if (!processInstance) {
return null;
@@ -508,9 +525,62 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
);
};
- const cancelEditingTaskData = () => {
+ const canSendEvent = (task: any) => {
+ // We actually could allow this for any waiting events
+ const taskTypes = ['Event Based Gateway'];
+ return (
+ processInstance &&
+ processInstance.status === 'waiting' &&
+ ability.can('POST', targetUris.processInstanceSendEventPath) &&
+ taskTypes.filter((t) => t === task.type).length > 0 &&
+ task.state === 'WAITING' &&
+ showingLastSpiffStep()
+ );
+ };
+
+ const canCompleteTask = (task: any) => {
+ return (
+ processInstance &&
+ processInstance.status === 'suspended' &&
+ ability.can('POST', targetUris.processInstanceCompleteTaskPath) &&
+ task.state === 'READY' &&
+ showingLastSpiffStep()
+ );
+ };
+
+ const canResetProcess = (task: any) => {
+ return (
+ ability.can('POST', targetUris.processInstanceResetPath) &&
+ processInstance &&
+ processInstance.status === 'suspended' &&
+ task.state === 'READY' &&
+ !showingLastSpiffStep()
+ );
+ };
+
+ const getEvents = (task: any) => {
+ const handleMessage = (eventDefinition: any) => {
+ if (eventDefinition.typename === 'MessageEventDefinition') {
+ const newEvent = eventDefinition;
+ delete newEvent.message_var;
+ newEvent.payload = {};
+ return newEvent;
+ }
+ return eventDefinition;
+ };
+ if (task.event_definition && task.event_definition.event_definitions)
+ return task.event_definition.event_definitions.map((e: any) =>
+ handleMessage(e)
+ );
+ if (task.event_definition) return [handleMessage(task.event_definition)];
+ return [];
+ };
+
+ const cancelUpdatingTask = () => {
setEditingTaskData(false);
+ setSelectingEvent(false);
initializeTaskDataToDisplay(taskToDisplay);
+ setEventPayload('{}');
setErrorObject(null);
};
@@ -550,7 +620,30 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
});
};
- const taskDataButtons = (task: any) => {
+ const sendEvent = () => {
+ if ('payload' in eventToSend)
+ eventToSend.payload = JSON.parse(eventPayload);
+ HttpService.makeCallToBackend({
+ path: `/send-event/${modifiedProcessModelId}/${params.process_instance_id}`,
+ httpMethod: 'POST',
+ successCallback: saveTaskDataResult,
+ failureCallback: saveTaskDataFailure,
+ postBody: eventToSend,
+ });
+ };
+
+ const completeTask = (execute: boolean) => {
+ const taskToUse: any = taskToDisplay;
+ HttpService.makeCallToBackend({
+ path: `/task-complete/${modifiedProcessModelId}/${params.process_instance_id}/${taskToUse.id}`,
+ httpMethod: 'POST',
+ successCallback: saveTaskDataResult,
+ failureCallback: saveTaskDataFailure,
+ postBody: { execute },
+ });
+ };
+
+ const taskDisplayButtons = (task: any) => {
const buttons = [];
if (
@@ -579,22 +672,36 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
);
}
- if (canEditTaskData(task)) {
- if (editingTaskData) {
- buttons.push(
-
- );
- buttons.push(
-
- );
- } else {
+ if (editingTaskData) {
+ buttons.push(
+
+ );
+ buttons.push(
+
+ );
+ } else if (selectingEvent) {
+ buttons.push(
+
+ );
+ buttons.push(
+
+ );
+ } else {
+ if (canEditTaskData(task)) {
buttons.push(