Feature/data object category (#796)

* WIP: some initial code for category in data objects

* attempt to get the data object for the given bpmn process and instance of it w/ burnettk

* updates for data objects

* fixed tests

* made suggestions by code rabbit and moved logic to get process data file out of shared method since it has a completely different implentation

* remove commented out code

* updated SpiffWorkflow for data object category

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2023-12-11 10:42:01 -05:00 committed by GitHub
parent 91d33305de
commit c00d810704
12 changed files with 168 additions and 80 deletions

View File

@ -11,11 +11,6 @@ from spiffworkflow_backend.services.process_instance_processor import (
def main(process_instance_id: str) -> None: def main(process_instance_id: str) -> None:
"""Main.""" """Main."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "local_development"
if os.environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR") is None:
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = "hey"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
app = create_app() app = create_app()
with app.app_context(): with app.app_context():
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first() process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
@ -24,9 +19,8 @@ def main(process_instance_id: str) -> None:
if not process_instance: if not process_instance:
raise Exception(f"Could not find a process instance with id: {process_instance_id}") raise Exception(f"Could not find a process instance with id: {process_instance_id}")
bpmn_process_dict = ProcessInstanceProcessor._get_full_bpmn_process_dict(process_instance, {}) processor = ProcessInstanceProcessor(process_instance)
with open(file_path, "w", encoding="utf-8") as f: processor.dump_to_disk(file_path)
f.write(json.dumps(bpmn_process_dict, indent=2))
print(f"Saved to {file_path}") print(f"Saved to {file_path}")

View File

@ -14,4 +14,4 @@ script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false
poet run python "$script" poet run python "$script" "$@"

View File

@ -2527,7 +2527,7 @@ lxml = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "c9786c543d3cd0ccffce35dfab115be0ee346f0b" resolved_reference = "e4ceff40d9d8a0d182b19483b9f1e9d8850d5765"
[[package]] [[package]]
name = "spiffworkflow-connector-command" name = "spiffworkflow-connector-command"

View File

@ -1892,8 +1892,14 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-data/{modified_process_model_identifier}/{process_data_identifier}/{process_instance_id}: /process-data/{category}/{modified_process_model_identifier}/{process_data_identifier}/{process_instance_id}:
parameters: parameters:
- name: category
in: path
required: true
description: The category of the data object.
schema:
type: string
- name: modified_process_model_identifier - name: modified_process_model_identifier
in: path in: path
required: true required: true
@ -1912,6 +1918,18 @@ paths:
description: The identifier of the process data. description: The identifier of the process data.
schema: schema:
type: string type: string
- name: process_identifier
in: query
required: false
description: The identifier of the process the data object is in.
schema:
type: string
- name: bpmn_process_guid
in: query
required: false
description: The guid of the bpmn process to get the data object for.
schema:
type: string
get: get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_show operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_show
summary: Fetch the process data value. summary: Fetch the process data value.

View File

@ -1,5 +1,6 @@
import json import json
from typing import Any from typing import Any
from uuid import UUID
import flask.wrappers import flask.wrappers
from flask import Blueprint from flask import Blueprint
@ -109,36 +110,59 @@ def process_caller_list(bpmn_process_identifiers: list[str]) -> Any:
def _process_data_fetcher( def _process_data_fetcher(
process_instance_id: int, process_instance_id: int,
process_data_identifier: str, process_data_identifier: str,
download_file_data: bool, category: str,
bpmn_process_guid: str | None = None,
process_identifier: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
if download_file_data: if process_identifier and bpmn_process_guid is None:
file_data = ProcessInstanceFileDataModel.query.filter_by( raise ApiError(
digest=process_data_identifier, error_code="missing_required_parameter",
process_instance_id=process_instance_id, message="process_identifier was given but bpmn_process_guid was not. Both must be provided if either is required.",
).first() status_code=404,
if file_data is None: )
raise ApiError( if process_identifier is None and bpmn_process_guid:
error_code="process_instance_file_data_not_found", raise ApiError(
message=f"Could not find file data related to the digest: {process_data_identifier}", error_code="missing_required_parameter",
) message="bpmn_process_guid was given but process_identifier was not. Both must be provided if either is required.",
mimetype = file_data.mimetype status_code=404,
filename = file_data.filename
file_contents = file_data.contents
return Response(
file_contents,
mimetype=mimetype,
headers={"Content-disposition": f"attachment; filename={filename}"},
) )
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
all_process_data = processor.get_data()
process_data_value = all_process_data.get(process_data_identifier)
if process_data_value is None: bpmn_process_instance = processor.bpmn_process_instance
script_engine_last_result = processor._script_engine.environment.last_result() bpmn_process_data = processor.get_data()
process_data_value = script_engine_last_result.get(process_data_identifier) if process_identifier and bpmn_process_instance.spec.name != process_identifier:
bpmn_process_instance = processor.bpmn_process_instance.subprocesses.get(UUID(bpmn_process_guid))
if bpmn_process_instance is None:
raise ApiError(
error_code="bpmn_process_not_found",
message=f"Cannot find a bpmn process with guid '{bpmn_process_guid}' for process instance {process_instance.id}",
status_code=404,
)
bpmn_process_data = bpmn_process_instance.data
data_objects = bpmn_process_instance.spec.data_objects
data_object = data_objects.get(process_data_identifier)
if data_object is None:
raise ApiError(
error_code="data_object_not_found",
message=(
f"Cannot find a data object with identifier '{process_data_identifier}' for bpmn process '{process_identifier}'"
f" in process instance {process_instance.id}"
),
status_code=404,
)
if hasattr(data_object, "category") and data_object.category is not None:
if data_object.category != category:
raise ApiError(
error_code="data_object_category_mismatch",
message=f"The desired data object has category '{data_object.category}' instead of the expected '{category}'",
status_code=400,
)
process_data_value = bpmn_process_data.get(process_data_identifier)
return make_response( return make_response(
jsonify( jsonify(
@ -152,26 +176,47 @@ def _process_data_fetcher(
def process_data_show( def process_data_show(
category: str,
process_instance_id: int, process_instance_id: int,
process_data_identifier: str, process_data_identifier: str,
modified_process_model_identifier: str, modified_process_model_identifier: str,
bpmn_process_guid: str | None = None,
process_identifier: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
return _process_data_fetcher( return _process_data_fetcher(
process_instance_id, process_instance_id=process_instance_id,
process_data_identifier, process_data_identifier=process_data_identifier,
download_file_data=False, category=category,
bpmn_process_guid=bpmn_process_guid,
process_identifier=process_identifier,
) )
def process_data_file_download( def process_data_file_download(
category: str,
process_instance_id: int, process_instance_id: int,
process_data_identifier: str, process_data_identifier: str,
modified_process_model_identifier: str, modified_process_model_identifier: str,
bpmn_process_guid: str | None = None,
process_identifier: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
return _process_data_fetcher( file_data = ProcessInstanceFileDataModel.query.filter_by(
process_instance_id, digest=process_data_identifier,
process_data_identifier, process_instance_id=process_instance_id,
download_file_data=True, ).first()
if file_data is None:
raise ApiError(
error_code="process_instance_file_data_not_found",
message=f"Could not find file data related to the digest: {process_data_identifier}",
)
mimetype = file_data.mimetype
filename = file_data.filename
file_contents = file_data.contents
return Response(
file_contents,
mimetype=mimetype,
headers={"Content-disposition": f"attachment; filename={filename}"},
) )

View File

@ -1,7 +1,6 @@
from typing import Any from typing import Any
from SpiffWorkflow.bpmn.parser.BpmnParser import full_tag # type: ignore from SpiffWorkflow.bpmn.parser.BpmnParser import full_tag # type: ignore
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from SpiffWorkflow.spiff.parser.task_spec import ServiceTaskParser # type: ignore from SpiffWorkflow.spiff.parser.task_spec import ServiceTaskParser # type: ignore
@ -10,11 +9,10 @@ from spiffworkflow_backend.services.service_task_service import CustomServiceTas
from spiffworkflow_backend.specs.start_event import StartEvent from spiffworkflow_backend.specs.start_event import StartEvent
class MyCustomParser(BpmnDmnParser): # type: ignore class MyCustomParser(SpiffBpmnParser): # type: ignore
"""A BPMN and DMN parser that can also parse spiffworkflow-backend specific extensions.""" """A BPMN and DMN parser that can also parse spiffworkflow-backend specific extensions."""
OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES OVERRIDE_PARSER_CLASSES = SpiffBpmnParser.OVERRIDE_PARSER_CLASSES
OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES)
OVERRIDE_PARSER_CLASSES.update({full_tag("serviceTask"): (ServiceTaskParser, CustomServiceTask)}) OVERRIDE_PARSER_CLASSES.update({full_tag("serviceTask"): (ServiceTaskParser, CustomServiceTask)})
StartEvent.register_parser_class(OVERRIDE_PARSER_CLASSES) StartEvent.register_parser_class(OVERRIDE_PARSER_CLASSES)

View File

@ -166,13 +166,13 @@ class TaskDataBasedScriptEngineEnvironment(TaskDataEnvironment): # type: ignore
self, self,
script: str, script: str,
context: dict[str, Any], context: dict[str, Any],
external_methods: dict[str, Any] | None = None, external_context: dict[str, Any] | None = None,
) -> bool: ) -> bool:
super().execute(script, context, external_methods) super().execute(script, context, external_context)
self._last_result = context self._last_result = context
return True return True
def user_defined_state(self, external_methods: dict[str, Any] | None = None) -> dict[str, Any]: def user_defined_state(self, external_context: dict[str, Any] | None = None) -> dict[str, Any]:
return {} return {}
def last_result(self) -> dict[str, Any]: def last_result(self) -> dict[str, Any]:
@ -206,11 +206,11 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
self, self,
expression: str, expression: str,
context: dict[str, Any], context: dict[str, Any],
external_methods: dict[str, Any] | None = None, external_context: dict[str, Any] | None = None,
) -> Any: ) -> Any:
state = {} state = {}
state.update(self.globals) state.update(self.globals)
state.update(external_methods or {}) state.update(external_context or {})
state.update(self.state) state.update(self.state)
state.update(context) state.update(context)
return eval(expression, state) # noqa return eval(expression, state) # noqa
@ -219,10 +219,10 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
self, self,
script: str, script: str,
context: dict[str, Any], context: dict[str, Any],
external_methods: dict[str, Any] | None = None, external_context: dict[str, Any] | None = None,
) -> bool: ) -> bool:
self.state.update(self.globals) self.state.update(self.globals)
self.state.update(external_methods or {}) self.state.update(external_context or {})
self.state.update(context) self.state.update(context)
try: try:
exec(script, self.state) # noqa exec(script, self.state) # noqa
@ -235,16 +235,16 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
for key_to_drop in context_keys_to_drop: for key_to_drop in context_keys_to_drop:
context.pop(key_to_drop) context.pop(key_to_drop)
self.state = self.user_defined_state(external_methods) self.state = self.user_defined_state(external_context)
# the task data needs to be updated with the current state so data references can be resolved properly. # the task data needs to be updated with the current state so data references can be resolved properly.
# the state will be removed later once the task is completed. # the state will be removed later once the task is completed.
context.update(self.state) context.update(self.state)
def user_defined_state(self, external_methods: dict[str, Any] | None = None) -> dict[str, Any]: def user_defined_state(self, external_context: dict[str, Any] | None = None) -> dict[str, Any]:
keys_to_filter = self.non_user_defined_keys keys_to_filter = self.non_user_defined_keys
if external_methods is not None: if external_context is not None:
keys_to_filter |= set(external_methods.keys()) keys_to_filter |= set(external_context.keys())
return {k: v for k, v in self.state.items() if k not in keys_to_filter and not callable(v)} return {k: v for k, v in self.state.items() if k not in keys_to_filter and not callable(v)}
@ -346,24 +346,24 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
self, self,
task: SpiffTask, task: SpiffTask,
expression: str, expression: str,
external_methods: dict[str, Any] | None = None, external_context: dict[str, Any] | None = None,
) -> Any: ) -> Any:
return self._evaluate(expression, task.data, task, external_methods) return self._evaluate(expression, task.data, task, external_context)
def _evaluate( def _evaluate(
self, self,
expression: str, expression: str,
context: dict[str, Any], context: dict[str, Any],
task: SpiffTask | None = None, task: SpiffTask | None = None,
external_methods: dict[str, Any] | None = None, external_context: dict[str, Any] | None = None,
) -> Any: ) -> Any:
methods = self.__get_augment_methods(task) methods = self.__get_augment_methods(task)
if external_methods: if external_context:
methods.update(external_methods) methods.update(external_context)
"""Evaluate the given expression, within the context of the given task and return the result.""" """Evaluate the given expression, within the context of the given task and return the result."""
try: try:
return super()._evaluate(expression, context, external_methods=methods) return super()._evaluate(expression, context, external_context=methods)
except Exception as exception: except Exception as exception:
if task is None: if task is None:
raise WorkflowException( raise WorkflowException(
@ -376,12 +376,12 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
exception=exception, exception=exception,
) from exception ) from exception
def execute(self, task: SpiffTask, script: str, external_methods: Any = None) -> bool: def execute(self, task: SpiffTask, script: str, external_context: Any = None) -> bool:
try: try:
# reset failing task just in case # reset failing task just in case
methods = self.__get_augment_methods(task) methods = self.__get_augment_methods(task)
if external_methods: if external_context:
methods.update(external_methods) methods.update(external_context)
# do not run script if it is blank # do not run script if it is blank
if script: if script:
super().execute(task, script, methods) super().execute(task, script, methods)

View File

@ -45,7 +45,7 @@ class StartEvent(DefaultStartEvent): # type: ignore
time_delta = TimerEventDefinition.get_timedelta_from_start(parsed_duration, now_in_utc) time_delta = TimerEventDefinition.get_timedelta_from_start(parsed_duration, now_in_utc)
elif isinstance(self.timer_definition, CycleTimerEventDefinition): elif isinstance(self.timer_definition, CycleTimerEventDefinition):
cycles, start, cycle_duration = TimerEventDefinition.parse_iso_recurring_interval(evaluated_expression) cycles, start, cycle_duration = TimerEventDefinition.parse_iso_recurring_interval(evaluated_expression)
time_delta = start - now_in_utc + cycle_duration time_delta = start - now_in_utc
duration = int(cycle_duration.total_seconds()) duration = int(cycle_duration.total_seconds())
start_delay_in_seconds = int(time_delta.total_seconds()) start_delay_in_seconds = int(time_delta.total_seconds())

View File

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev"> <bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_hjecbuk" isExecutable="true"> <bpmn:process id="Process_hjecbuk" isExecutable="true">
<bpmn:startEvent id="StartEvent_1"> <bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0hnphp9</bpmn:outgoing> <bpmn:outgoing>Flow_0hnphp9</bpmn:outgoing>
@ -28,7 +28,11 @@
</bpmn:dataInputAssociation> </bpmn:dataInputAssociation>
</bpmn:manualTask> </bpmn:manualTask>
<bpmn:dataObjectReference id="DataObjectReference_10g8dit" name="The Data Object Var" dataObjectRef="the_data_object_var" /> <bpmn:dataObjectReference id="DataObjectReference_10g8dit" name="The Data Object Var" dataObjectRef="the_data_object_var" />
<bpmn:dataObject id="the_data_object_var" /> <bpmn:dataObject id="the_data_object_var">
<bpmn:extensionElements>
<spiffworkflow:category>the_cat</spiffworkflow:category>
</bpmn:extensionElements>
</bpmn:dataObject>
</bpmn:process> </bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1"> <bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_hjecbuk"> <bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_hjecbuk">

View File

@ -3316,7 +3316,7 @@ class TestProcessApi(BaseTest):
assert process_instance_one.status == "user_input_required" assert process_instance_one.status == "user_input_required"
response = client.get( response = client.get(
f"/v1.0/process-data/{self.modify_process_identifier_for_path_param(process_model.id)}/the_data_object_var/{process_instance_one.id}", f"/v1.0/process-data/the_cat/{self.modify_process_identifier_for_path_param(process_model.id)}/the_data_object_var/{process_instance_one.id}",
headers=self.logged_in_headers(with_super_admin_user), headers=self.logged_in_headers(with_super_admin_user),
) )

View File

@ -8364,7 +8364,7 @@
}, },
"node_modules/bpmn-js-spiffworkflow": { "node_modules/bpmn-js-spiffworkflow": {
"version": "0.0.8", "version": "0.0.8",
"resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#04186b9032c2fa4fee08c9d6fffd955f3bc8f080", "resolved": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#9762eb631de107aac584fce1c056070cdaed171e",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"inherits": "^2.0.4", "inherits": "^2.0.4",
@ -38244,7 +38244,7 @@
} }
}, },
"bpmn-js-spiffworkflow": { "bpmn-js-spiffworkflow": {
"version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#04186b9032c2fa4fee08c9d6fffd955f3bc8f080", "version": "git+ssh://git@github.com/sartography/bpmn-js-spiffworkflow.git#9762eb631de107aac584fce1c056070cdaed171e",
"from": "bpmn-js-spiffworkflow@github:sartography/bpmn-js-spiffworkflow#main", "from": "bpmn-js-spiffworkflow@github:sartography/bpmn-js-spiffworkflow#main",
"requires": { "requires": {
"inherits": "^2.0.4", "inherits": "^2.0.4",

View File

@ -704,19 +704,48 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
setProcessDataToDisplay(processData); setProcessDataToDisplay(processData);
}; };
const makeProcessDataCallFromShapeElement = (shapeElement: any) => {
const { dataObjectRef } = shapeElement.businessObject;
let category = 'default';
if ('extensionElements' in dataObjectRef) {
const categoryExtension = dataObjectRef.extensionElements.values.find(
(extension: any) => {
return extension.$type === 'spiffworkflow:category';
}
);
if (categoryExtension) {
category = categoryExtension.$body;
}
}
const dataObjectIdentifer = dataObjectRef.id;
const parentProcess = shapeElement.businessObject.$parent;
const processIdentifier = parentProcess.id;
let additionalParams = '';
if (tasks) {
const matchingTask: Task | undefined = tasks.find((task: Task) => {
return task.bpmn_identifier === processIdentifier;
});
if (matchingTask) {
additionalParams = `?process_identifier=${processIdentifier}&bpmn_process_guid=${matchingTask.guid}`;
}
}
HttpService.makeCallToBackend({
path: `/process-data/${category}/${params.process_model_id}/${dataObjectIdentifer}/${params.process_instance_id}${additionalParams}`,
httpMethod: 'GET',
successCallback: handleProcessDataShowResponse,
failureCallback: addError,
onUnauthorized: (result: any) =>
handleProcessDataShowReponseUnauthorized(dataObjectIdentifer, result),
});
};
const handleClickedDiagramTask = ( const handleClickedDiagramTask = (
shapeElement: any, shapeElement: any,
bpmnProcessIdentifiers: any bpmnProcessIdentifiers: any
) => { ) => {
if (shapeElement.type === 'bpmn:DataObjectReference') { if (shapeElement.type === 'bpmn:DataObjectReference') {
const dataObjectIdentifer = shapeElement.businessObject.dataObjectRef.id; makeProcessDataCallFromShapeElement(shapeElement);
HttpService.makeCallToBackend({
path: `/process-data/${params.process_model_id}/${dataObjectIdentifer}/${params.process_instance_id}`,
httpMethod: 'GET',
successCallback: handleProcessDataShowResponse,
onUnauthorized: (result: any) =>
handleProcessDataShowReponseUnauthorized(dataObjectIdentifer, result),
});
} else if (tasks) { } else if (tasks) {
const matchingTask: Task | undefined = tasks.find((task: Task) => { const matchingTask: Task | undefined = tasks.find((task: Task) => {
return ( return (