Feature/extensions api (#423)
* added api and test to run a process model and return task data synchronously w/ burnettk * added test to make sure we can access db models from extensions api w/ burnettk * added extensions api to elevated permissions * fixed permission tests * do not add extensions permission to all permission for a pg or pm w/ burnettk * added configs for extensions api w/ burnettk * added the basis for an extensions list api w/ burnettk * added tests for extenstions api and do not use serialized as a property * allow giving a body to an extension when running and some support in frontend to use extensions * added ability to display markdown and rjsf on extensions page * added ability to submit the extension form and display the resulting task data * made frontend extension urls have 2 pieces of information so we can specify multiple routes for the same process-model w/ burnettk * do not save process instances when running extensions w/ burnettk * add extension input to a task not the process w/ burnettk * pyl w/ burnettk --------- Co-authored-by: jasquat <jasquat@users.noreply.github.com> Co-authored-by: burnettk <burnettk@users.noreply.github.com>
This commit is contained in:
parent
8427f9d2fb
commit
796dc9dbec
|
@ -35,14 +35,14 @@ from spiffworkflow_backend.services.background_processing_service import Backgro
|
||||||
class MyJSONEncoder(DefaultJSONProvider):
|
class MyJSONEncoder(DefaultJSONProvider):
|
||||||
def default(self, obj: Any) -> Any:
|
def default(self, obj: Any) -> Any:
|
||||||
if hasattr(obj, "serialized"):
|
if hasattr(obj, "serialized"):
|
||||||
return obj.serialized
|
return obj.serialized()
|
||||||
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
|
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
|
||||||
return_dict = {}
|
return_dict = {}
|
||||||
row_mapping = obj._mapping
|
row_mapping = obj._mapping
|
||||||
for row_key in row_mapping.keys():
|
for row_key in row_mapping.keys():
|
||||||
row_value = row_mapping[row_key]
|
row_value = row_mapping[row_key]
|
||||||
if hasattr(row_value, "serialized"):
|
if hasattr(row_value, "serialized"):
|
||||||
return_dict.update(row_value.serialized)
|
return_dict.update(row_value.serialized())
|
||||||
elif hasattr(row_value, "__dict__"):
|
elif hasattr(row_value, "__dict__"):
|
||||||
return_dict.update(row_value.__dict__)
|
return_dict.update(row_value.__dict__)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -796,6 +796,58 @@ paths:
|
||||||
items:
|
items:
|
||||||
$ref: "#/components/schemas/Workflow"
|
$ref: "#/components/schemas/Workflow"
|
||||||
|
|
||||||
|
/extensions:
|
||||||
|
get:
|
||||||
|
operationId: spiffworkflow_backend.routes.extensions_controller.extension_list
|
||||||
|
summary: Returns the list of available extensions
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/AwesomeUnspecifiedPayload"
|
||||||
|
tags:
|
||||||
|
- Extensions
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Resulting extensions
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Workflow"
|
||||||
|
|
||||||
|
/extensions/{modified_process_model_identifier}:
|
||||||
|
parameters:
|
||||||
|
- name: modified_process_model_identifier
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The unique id of an existing process model.
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
get:
|
||||||
|
operationId: spiffworkflow_backend.routes.extensions_controller.extension_show
|
||||||
|
summary: Returns the metadata for a given extension
|
||||||
|
tags:
|
||||||
|
- Extensions
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Resulting extension metadata
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Workflow"
|
||||||
|
post:
|
||||||
|
operationId: spiffworkflow_backend.routes.extensions_controller.extension_run
|
||||||
|
summary: Run an extension for a given process model
|
||||||
|
tags:
|
||||||
|
- Extensions
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: Resulting task data
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Workflow"
|
||||||
|
|
||||||
/process-models/{modified_process_model_identifier}/script-unit-tests:
|
/process-models/{modified_process_model_identifier}/script-unit-tests:
|
||||||
parameters:
|
parameters:
|
||||||
- name: modified_process_model_identifier
|
- name: modified_process_model_identifier
|
||||||
|
|
|
@ -86,6 +86,17 @@ def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_extension_api_configs(app: Flask) -> None:
|
||||||
|
if (
|
||||||
|
app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED"]
|
||||||
|
and len(app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX"]) < 1
|
||||||
|
):
|
||||||
|
raise ConfigurationError(
|
||||||
|
"SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED is set to true but"
|
||||||
|
" SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX is an empty value."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# see the message in the ConfigurationError below for why we are checking this.
|
# see the message in the ConfigurationError below for why we are checking this.
|
||||||
# we really do not want this to raise when there is not a problem, so there are lots of return statements littered throughout.
|
# we really do not want this to raise when there is not a problem, so there are lots of return statements littered throughout.
|
||||||
def _check_for_incompatible_frontend_and_backend_urls(app: Flask) -> None:
|
def _check_for_incompatible_frontend_and_backend_urls(app: Flask) -> None:
|
||||||
|
@ -193,3 +204,4 @@ def setup_config(app: Flask) -> None:
|
||||||
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
||||||
_set_up_tenant_specific_fields_as_list_of_strings(app)
|
_set_up_tenant_specific_fields_as_list_of_strings(app)
|
||||||
_check_for_incompatible_frontend_and_backend_urls(app)
|
_check_for_incompatible_frontend_and_backend_urls(app)
|
||||||
|
_check_extension_api_configs(app)
|
||||||
|
|
|
@ -8,6 +8,13 @@ from os import environ
|
||||||
FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY")
|
FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY")
|
||||||
|
|
||||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR")
|
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR")
|
||||||
|
SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX = environ.get(
|
||||||
|
"SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX", default="extensions"
|
||||||
|
)
|
||||||
|
SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED = (
|
||||||
|
environ.get("SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", default="false")
|
||||||
|
) == "true"
|
||||||
|
|
||||||
cors_allow_all = "*"
|
cors_allow_all = "*"
|
||||||
SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
|
SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
|
||||||
r",\s*",
|
r",\s*",
|
||||||
|
|
|
@ -15,3 +15,7 @@ SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||||
)
|
)
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
|
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||||
|
|
||||||
|
SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED = (
|
||||||
|
environ.get("SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", default="true")
|
||||||
|
) == "true"
|
||||||
|
|
|
@ -94,7 +94,6 @@ class File:
|
||||||
)
|
)
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
@property
|
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
dictionary = self.__dict__
|
dictionary = self.__dict__
|
||||||
if isinstance(self.file_contents, bytes):
|
if isinstance(self.file_contents, bytes):
|
||||||
|
|
|
@ -46,7 +46,6 @@ class ProcessGroup:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@property
|
|
||||||
def serialized(self) -> dict:
|
def serialized(self) -> dict:
|
||||||
original_dict = dataclasses.asdict(self)
|
original_dict = dataclasses.asdict(self)
|
||||||
return {x: original_dict[x] for x in original_dict if x not in ["sort_index"]}
|
return {x: original_dict[x] for x in original_dict if x not in ["sort_index"]}
|
||||||
|
|
|
@ -1,13 +1,11 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
import marshmallow
|
import marshmallow
|
||||||
from marshmallow import INCLUDE
|
from marshmallow import INCLUDE
|
||||||
from marshmallow import Schema
|
from marshmallow import Schema
|
||||||
from marshmallow_enum import EnumField # type: ignore
|
from marshmallow_enum import EnumField # type: ignore
|
||||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
|
||||||
from sqlalchemy import ForeignKey
|
from sqlalchemy import ForeignKey
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
from sqlalchemy.orm import validates
|
from sqlalchemy.orm import validates
|
||||||
|
@ -101,7 +99,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
bpmn_xml_file_contents: str | None = None
|
bpmn_xml_file_contents: str | None = None
|
||||||
process_model_with_diagram_identifier: str | None = None
|
process_model_with_diagram_identifier: str | None = None
|
||||||
|
|
||||||
@property
|
# full, none
|
||||||
|
persistence_level: str = "full"
|
||||||
|
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
"""Return object data in serializeable format."""
|
"""Return object data in serializeable format."""
|
||||||
return {
|
return {
|
||||||
|
@ -122,23 +122,13 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
def serialized_with_metadata(self) -> dict[str, Any]:
|
def serialized_with_metadata(self) -> dict[str, Any]:
|
||||||
process_instance_attributes = self.serialized
|
process_instance_attributes = self.serialized()
|
||||||
process_instance_attributes["process_metadata"] = self.process_metadata
|
process_instance_attributes["process_metadata"] = self.process_metadata
|
||||||
process_instance_attributes["process_model_with_diagram_identifier"] = (
|
process_instance_attributes["process_model_with_diagram_identifier"] = (
|
||||||
self.process_model_with_diagram_identifier
|
self.process_model_with_diagram_identifier
|
||||||
)
|
)
|
||||||
return process_instance_attributes
|
return process_instance_attributes
|
||||||
|
|
||||||
@property
|
|
||||||
def serialized_flat(self) -> dict:
|
|
||||||
"""Return object in serializeable format with data merged together with top-level attributes.
|
|
||||||
|
|
||||||
Top-level attributes like process_model_identifier and status win over data attributes.
|
|
||||||
"""
|
|
||||||
serialized_top_level_attributes = self.serialized
|
|
||||||
serialized_top_level_attributes.pop("data", None)
|
|
||||||
return cast(dict, DeepMerge.merge(self.data, serialized_top_level_attributes))
|
|
||||||
|
|
||||||
@validates("status")
|
@validates("status")
|
||||||
def validate_status(self, key: str, value: Any) -> Any:
|
def validate_status(self, key: str, value: Any) -> Any:
|
||||||
return self.validate_enum_field(key, value, ProcessInstanceStatus)
|
return self.validate_enum_field(key, value, ProcessInstanceStatus)
|
||||||
|
|
|
@ -77,6 +77,16 @@ class ProcessModelInfo:
|
||||||
|
|
||||||
return identifier.replace("/", ":")
|
return identifier.replace("/", ":")
|
||||||
|
|
||||||
|
def serialized(self) -> dict[str, Any]:
|
||||||
|
file_objects = self.files
|
||||||
|
dictionary = self.__dict__
|
||||||
|
if file_objects is not None:
|
||||||
|
serialized_files = []
|
||||||
|
for file in file_objects:
|
||||||
|
serialized_files.append(file.serialized())
|
||||||
|
dictionary["files"] = serialized_files
|
||||||
|
return dictionary
|
||||||
|
|
||||||
|
|
||||||
class ProcessModelInfoSchema(Schema):
|
class ProcessModelInfoSchema(Schema):
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|
|
@ -20,7 +20,6 @@ class SecretModel(SpiffworkflowBaseDBModel):
|
||||||
created_at_in_seconds: int = db.Column(db.Integer)
|
created_at_in_seconds: int = db.Column(db.Integer)
|
||||||
|
|
||||||
# value is not included in the serialized output because it is sensitive
|
# value is not included in the serialized output because it is sensitive
|
||||||
@property
|
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
return {
|
return {
|
||||||
"id": self.id,
|
"id": self.id,
|
||||||
|
|
|
@ -170,7 +170,6 @@ class Task:
|
||||||
self.properties = {}
|
self.properties = {}
|
||||||
self.error_message = error_message
|
self.error_message = error_message
|
||||||
|
|
||||||
@property
|
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
"""Return object data in serializeable format."""
|
"""Return object data in serializeable format."""
|
||||||
multi_instance_type = None
|
multi_instance_type = None
|
||||||
|
|
|
@ -0,0 +1,148 @@
|
||||||
|
import flask.wrappers
|
||||||
|
from flask import current_app
|
||||||
|
from flask import g
|
||||||
|
from flask import jsonify
|
||||||
|
from flask import make_response
|
||||||
|
|
||||||
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||||
|
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||||
|
from spiffworkflow_backend.routes.process_api_blueprint import _un_modify_modified_process_model_id
|
||||||
|
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||||
|
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||||
|
from spiffworkflow_backend.services.process_instance_processor import CustomBpmnScriptEngine
|
||||||
|
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
|
||||||
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
|
||||||
|
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsNotEnqueuedError
|
||||||
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
|
|
||||||
|
|
||||||
|
def extension_run(
|
||||||
|
modified_process_model_identifier: str,
|
||||||
|
body: dict | None = None,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
_raise_unless_extensions_api_enabled()
|
||||||
|
|
||||||
|
process_model_identifier = _get_process_model_identifier(modified_process_model_identifier)
|
||||||
|
|
||||||
|
try:
|
||||||
|
process_model = _get_process_model(process_model_identifier)
|
||||||
|
except ApiError as ex:
|
||||||
|
if ex.error_code == "process_model_cannot_be_found":
|
||||||
|
raise ApiError(
|
||||||
|
error_code="invalid_process_model_extension",
|
||||||
|
message=(
|
||||||
|
f"Process Model '{process_model_identifier}' cannot be run as an extension. It must be in the"
|
||||||
|
" correct Process Group:"
|
||||||
|
f" {current_app.config['SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX']}"
|
||||||
|
),
|
||||||
|
status_code=403,
|
||||||
|
) from ex
|
||||||
|
raise ex
|
||||||
|
|
||||||
|
if process_model.primary_file_name is None:
|
||||||
|
raise ApiError(
|
||||||
|
error_code="process_model_missing_primary_bpmn_file",
|
||||||
|
message=(
|
||||||
|
f"Process Model '{process_model_identifier}' does not have a primary"
|
||||||
|
" bpmn file. One must be set in order to instantiate this model."
|
||||||
|
),
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
process_instance = ProcessInstanceModel(
|
||||||
|
status=ProcessInstanceStatus.not_started.value,
|
||||||
|
process_initiator_id=g.user.id,
|
||||||
|
process_model_identifier=process_model.id,
|
||||||
|
process_model_display_name=process_model.display_name,
|
||||||
|
persistence_level="none",
|
||||||
|
)
|
||||||
|
|
||||||
|
processor = None
|
||||||
|
try:
|
||||||
|
processor = ProcessInstanceProcessor(
|
||||||
|
process_instance, script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False)
|
||||||
|
)
|
||||||
|
if body and "extension_input" in body:
|
||||||
|
processor.do_engine_steps(save=False, execution_strategy_name="one_at_a_time")
|
||||||
|
next_task = processor.next_task()
|
||||||
|
next_task.update_data(body["extension_input"])
|
||||||
|
processor.do_engine_steps(save=False, execution_strategy_name="greedy")
|
||||||
|
except (
|
||||||
|
ApiError,
|
||||||
|
ProcessInstanceIsNotEnqueuedError,
|
||||||
|
ProcessInstanceIsAlreadyLockedError,
|
||||||
|
) as e:
|
||||||
|
ErrorHandlingService.handle_error(process_instance, e)
|
||||||
|
raise e
|
||||||
|
except Exception as e:
|
||||||
|
ErrorHandlingService.handle_error(process_instance, e)
|
||||||
|
# FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes.
|
||||||
|
# we need to recurse through all last tasks if the last task is a call activity or subprocess.
|
||||||
|
if processor is not None:
|
||||||
|
task = processor.bpmn_process_instance.last_task
|
||||||
|
raise ApiError.from_task(
|
||||||
|
error_code="unknown_exception",
|
||||||
|
message=f"An unknown error occurred. Original error: {e}",
|
||||||
|
status_code=400,
|
||||||
|
task=task,
|
||||||
|
) from e
|
||||||
|
raise e
|
||||||
|
|
||||||
|
task_data = {}
|
||||||
|
if processor is not None:
|
||||||
|
task_data = processor.get_data()
|
||||||
|
|
||||||
|
return make_response(jsonify(task_data), 200)
|
||||||
|
|
||||||
|
|
||||||
|
def extension_list() -> flask.wrappers.Response:
|
||||||
|
_raise_unless_extensions_api_enabled()
|
||||||
|
process_model_extensions = ProcessModelService.get_process_models_for_api(
|
||||||
|
process_group_id=current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX"],
|
||||||
|
recursive=True,
|
||||||
|
filter_runnable_as_extension=True,
|
||||||
|
include_files=True,
|
||||||
|
)
|
||||||
|
return make_response(jsonify(process_model_extensions), 200)
|
||||||
|
|
||||||
|
|
||||||
|
def extension_show(
|
||||||
|
modified_process_model_identifier: str,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
_raise_unless_extensions_api_enabled()
|
||||||
|
process_model_identifier = _get_process_model_identifier(modified_process_model_identifier)
|
||||||
|
process_model = _get_process_model(process_model_identifier)
|
||||||
|
files = FileSystemService.get_sorted_files(process_model)
|
||||||
|
for f in files:
|
||||||
|
file_contents = FileSystemService.get_data(process_model, f.name)
|
||||||
|
f.file_contents = file_contents
|
||||||
|
process_model.files = files
|
||||||
|
return make_response(jsonify(process_model), 200)
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_unless_extensions_api_enabled() -> None:
|
||||||
|
if not current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED"]:
|
||||||
|
raise ApiError(
|
||||||
|
error_code="extensions_api_not_enabled",
|
||||||
|
message="The extensions api is not enabled. Cannot run process models in this way.",
|
||||||
|
status_code=403,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_process_model_identifier(modified_process_model_identifier: str) -> str:
|
||||||
|
process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
|
||||||
|
return _add_extension_group_identifier_it_not_present(process_model_identifier)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_extension_group_identifier_it_not_present(process_model_identifier: str) -> str:
|
||||||
|
"""Adds the extension prefix if it does not already exist on the process model identifier.
|
||||||
|
|
||||||
|
This allows for the frontend to use just process model identifier without having to know the extension group
|
||||||
|
or having to add it to the uischema json which would have numerous other issues. Instead let backend take care of that.
|
||||||
|
"""
|
||||||
|
extension_prefix = current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX"]
|
||||||
|
if process_model_identifier.startswith(f"{extension_prefix}/"):
|
||||||
|
return process_model_identifier
|
||||||
|
return f"{extension_prefix}/{process_model_identifier}"
|
|
@ -1,4 +1,3 @@
|
||||||
"""APIs for dealing with process groups, process models, and process instances."""
|
|
||||||
from flask import make_response
|
from flask import make_response
|
||||||
from flask.wrappers import Response
|
from flask.wrappers import Response
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
"""APIs for dealing with process groups, process models, and process instances."""
|
|
||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
|
@ -144,8 +144,6 @@ def process_instance_run(
|
||||||
process_instance_metadata["data"] = process_instance_data
|
process_instance_metadata["data"] = process_instance_data
|
||||||
return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json")
|
return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json")
|
||||||
|
|
||||||
# FIXME: this should never happen currently but it'd be ideal to always do this
|
|
||||||
# currently though it does not return next task so it cannnot be used to take the user to the next human task
|
|
||||||
return make_response(jsonify(process_instance), 200)
|
return make_response(jsonify(process_instance), 200)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -143,10 +143,7 @@ def process_model_update(
|
||||||
def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any:
|
def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any:
|
||||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||||
process_model = _get_process_model(process_model_identifier)
|
process_model = _get_process_model(process_model_identifier)
|
||||||
files = sorted(
|
files = FileSystemService.get_sorted_files(process_model)
|
||||||
SpecFileService.get_files(process_model),
|
|
||||||
key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
|
|
||||||
)
|
|
||||||
process_model.files = files
|
process_model.files = files
|
||||||
|
|
||||||
if include_file_references:
|
if include_file_references:
|
||||||
|
@ -277,7 +274,7 @@ def process_model_file_create(
|
||||||
def process_model_file_show(modified_process_model_identifier: str, file_name: str) -> Any:
|
def process_model_file_show(modified_process_model_identifier: str, file_name: str) -> Any:
|
||||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||||
process_model = _get_process_model(process_model_identifier)
|
process_model = _get_process_model(process_model_identifier)
|
||||||
files = SpecFileService.get_files(process_model, file_name)
|
files = FileSystemService.get_files(process_model, file_name)
|
||||||
if len(files) == 0:
|
if len(files) == 0:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
error_code="process_model_file_not_found",
|
error_code="process_model_file_not_found",
|
||||||
|
|
|
@ -14,6 +14,7 @@ from lxml.builder import ElementMaker # type: ignore
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_required_parameter_or_raise
|
from spiffworkflow_backend.routes.process_api_blueprint import _get_required_parameter_or_raise
|
||||||
|
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||||
from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
|
from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
|
||||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||||
|
|
||||||
|
@ -27,7 +28,7 @@ def script_unit_test_create(
|
||||||
|
|
||||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||||
process_model = _get_process_model(process_model_identifier)
|
process_model = _get_process_model(process_model_identifier)
|
||||||
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
|
file = FileSystemService.get_files(process_model, process_model.primary_file_name)[0]
|
||||||
if file is None:
|
if file is None:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
error_code="cannot_find_file",
|
error_code="cannot_find_file",
|
||||||
|
|
|
@ -17,7 +17,7 @@ def secret_show(key: str) -> Response:
|
||||||
secret = SecretService.get_secret(key)
|
secret = SecretService.get_secret(key)
|
||||||
|
|
||||||
# normal serialization does not include the secret value, but this is the one endpoint where we want to return the goods
|
# normal serialization does not include the secret value, but this is the one endpoint where we want to return the goods
|
||||||
secret_as_dict = secret.serialized
|
secret_as_dict = secret.serialized()
|
||||||
secret_as_dict["value"] = SecretService._decrypt(secret.value)
|
secret_as_dict["value"] = SecretService._decrypt(secret.value)
|
||||||
|
|
||||||
return make_response(secret_as_dict, 200)
|
return make_response(secret_as_dict, 200)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
"""APIs for dealing with process groups, process models, and process instances."""
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
|
|
|
@ -503,6 +503,7 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/users/exists/by-username"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/users/exists/by-username"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/connector-proxy/typeahead/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/connector-proxy/typeahead/*"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/debug/version-info"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/debug/version-info"))
|
||||||
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/extensions"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-groups"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-groups"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-models"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-models"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/processes"))
|
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/processes"))
|
||||||
|
@ -546,6 +547,7 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/debug/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/debug/*"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/send-event/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/send-event/*"))
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-complete/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-complete/*"))
|
||||||
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/extensions/*"))
|
||||||
|
|
||||||
# read comes from PG and PM ALL permissions as well
|
# read comes from PG and PM ALL permissions as well
|
||||||
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-assign/*"))
|
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-assign/*"))
|
||||||
|
|
|
@ -32,6 +32,9 @@ class ErrorHandlingService:
|
||||||
def _update_process_instance_in_database(
|
def _update_process_instance_in_database(
|
||||||
cls, process_instance: ProcessInstanceModel, fault_or_suspend_on_exception: str
|
cls, process_instance: ProcessInstanceModel, fault_or_suspend_on_exception: str
|
||||||
) -> None:
|
) -> None:
|
||||||
|
if process_instance.persistence_level == "none":
|
||||||
|
return
|
||||||
|
|
||||||
# First, suspend or fault the instance
|
# First, suspend or fault the instance
|
||||||
if fault_or_suspend_on_exception == "suspend":
|
if fault_or_suspend_on_exception == "suspend":
|
||||||
cls._set_instance_status(
|
cls._set_instance_status(
|
||||||
|
|
|
@ -12,6 +12,10 @@ from spiffworkflow_backend.models.file import FileType
|
||||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessModelFileNotFoundError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class FileSystemService:
|
class FileSystemService:
|
||||||
|
|
||||||
"""Simple Service meant for extension that provides some useful
|
"""Simple Service meant for extension that provides some useful
|
||||||
|
@ -51,6 +55,46 @@ class FileSystemService:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_files(
|
||||||
|
cls,
|
||||||
|
process_model_info: ProcessModelInfo,
|
||||||
|
file_name: str | None = None,
|
||||||
|
extension_filter: str = "",
|
||||||
|
) -> list[File]:
|
||||||
|
"""Return all files associated with a workflow specification."""
|
||||||
|
path = os.path.join(FileSystemService.root_path(), process_model_info.id_for_file_path())
|
||||||
|
files = cls._get_files(path, file_name)
|
||||||
|
if extension_filter != "":
|
||||||
|
files = list(filter(lambda file: file.name.endswith(extension_filter), files))
|
||||||
|
return files
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_sorted_files(
|
||||||
|
cls,
|
||||||
|
process_model_info: ProcessModelInfo,
|
||||||
|
) -> list[File]:
|
||||||
|
files = sorted(
|
||||||
|
FileSystemService.get_files(process_model_info),
|
||||||
|
key=lambda f: "" if f.name == process_model_info.primary_file_name else f.sort_index,
|
||||||
|
)
|
||||||
|
return files
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
|
||||||
|
full_file_path = FileSystemService.full_file_path(process_model_info, file_name)
|
||||||
|
if not os.path.exists(full_file_path):
|
||||||
|
raise ProcessModelFileNotFoundError(
|
||||||
|
f"No file found with name {file_name} in {process_model_info.display_name}"
|
||||||
|
)
|
||||||
|
with open(full_file_path, "rb") as f_handle:
|
||||||
|
spec_file_data = f_handle.read()
|
||||||
|
return spec_file_data
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def full_file_path(process_model: ProcessModelInfo, file_name: str) -> str:
|
||||||
|
return os.path.abspath(os.path.join(FileSystemService.process_model_full_path(process_model), file_name))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def full_path_from_relative_path(relative_path: str) -> str:
|
def full_path_from_relative_path(relative_path: str) -> str:
|
||||||
return os.path.join(FileSystemService.root_path(), relative_path)
|
return os.path.join(FileSystemService.root_path(), relative_path)
|
||||||
|
|
|
@ -266,7 +266,7 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||||
scripts directory available for execution.
|
scripts directory available for execution.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self, use_restricted_script_engine: bool = True) -> None:
|
||||||
default_globals = {
|
default_globals = {
|
||||||
"_strptime": _strptime,
|
"_strptime": _strptime,
|
||||||
"dateparser": dateparser,
|
"dateparser": dateparser,
|
||||||
|
@ -288,7 +288,6 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||||
**JinjaHelpers.get_helper_mapping(),
|
**JinjaHelpers.get_helper_mapping(),
|
||||||
}
|
}
|
||||||
|
|
||||||
use_restricted_script_engine = True
|
|
||||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_USE_RESTRICTED_SCRIPT_ENGINE") == "false":
|
if os.environ.get("SPIFFWORKFLOW_BACKEND_USE_RESTRICTED_SCRIPT_ENGINE") == "false":
|
||||||
use_restricted_script_engine = False
|
use_restricted_script_engine = False
|
||||||
|
|
||||||
|
@ -379,7 +378,7 @@ IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, Bpm
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceProcessor:
|
class ProcessInstanceProcessor:
|
||||||
_script_engine = CustomBpmnScriptEngine()
|
_default_script_engine = CustomBpmnScriptEngine()
|
||||||
SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
|
SERIALIZER_VERSION = "1.0-spiffworkflow-backend"
|
||||||
|
|
||||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG)
|
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG)
|
||||||
|
@ -392,8 +391,14 @@ class ProcessInstanceProcessor:
|
||||||
# __init__ calls these helpers:
|
# __init__ calls these helpers:
|
||||||
# * get_spec, which returns a spec and any subprocesses (as IdToBpmnProcessSpecMapping dict)
|
# * get_spec, which returns a spec and any subprocesses (as IdToBpmnProcessSpecMapping dict)
|
||||||
# * __get_bpmn_process_instance, which takes spec and subprocesses and instantiates and returns a BpmnWorkflow
|
# * __get_bpmn_process_instance, which takes spec and subprocesses and instantiates and returns a BpmnWorkflow
|
||||||
def __init__(self, process_instance_model: ProcessInstanceModel, validate_only: bool = False) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
process_instance_model: ProcessInstanceModel,
|
||||||
|
validate_only: bool = False,
|
||||||
|
script_engine: PythonScriptEngine | None = None,
|
||||||
|
) -> None:
|
||||||
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
||||||
|
self._script_engine = script_engine or self.__class__._default_script_engine
|
||||||
self.setup_processor_with_process_instance(
|
self.setup_processor_with_process_instance(
|
||||||
process_instance_model=process_instance_model, validate_only=validate_only
|
process_instance_model=process_instance_model, validate_only=validate_only
|
||||||
)
|
)
|
||||||
|
@ -447,7 +452,7 @@ class ProcessInstanceProcessor:
|
||||||
validate_only,
|
validate_only,
|
||||||
subprocesses=subprocesses,
|
subprocesses=subprocesses,
|
||||||
)
|
)
|
||||||
self.set_script_engine(self.bpmn_process_instance)
|
self.set_script_engine(self.bpmn_process_instance, self._script_engine)
|
||||||
|
|
||||||
except MissingSpecError as ke:
|
except MissingSpecError as ke:
|
||||||
raise ApiError(
|
raise ApiError(
|
||||||
|
@ -470,7 +475,7 @@ class ProcessInstanceProcessor:
|
||||||
f"The given process model was not found: {process_model_identifier}.",
|
f"The given process model was not found: {process_model_identifier}.",
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
spec_files = SpecFileService.get_files(process_model_info)
|
spec_files = FileSystemService.get_files(process_model_info)
|
||||||
return cls.get_spec(spec_files, process_model_info)
|
return cls.get_spec(spec_files, process_model_info)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -478,15 +483,20 @@ class ProcessInstanceProcessor:
|
||||||
(bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses(
|
(bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses(
|
||||||
process_model_identifier,
|
process_model_identifier,
|
||||||
)
|
)
|
||||||
return cls.get_bpmn_process_instance_from_workflow_spec(bpmn_process_spec, subprocesses)
|
bpmn_process_instance = cls.get_bpmn_process_instance_from_workflow_spec(bpmn_process_spec, subprocesses)
|
||||||
|
cls.set_script_engine(bpmn_process_instance)
|
||||||
|
return bpmn_process_instance
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set_script_engine(bpmn_process_instance: BpmnWorkflow) -> None:
|
def set_script_engine(
|
||||||
ProcessInstanceProcessor._script_engine.environment.restore_state(bpmn_process_instance)
|
bpmn_process_instance: BpmnWorkflow, script_engine: PythonScriptEngine | None = None
|
||||||
bpmn_process_instance.script_engine = ProcessInstanceProcessor._script_engine
|
) -> None:
|
||||||
|
script_engine_to_use = script_engine or ProcessInstanceProcessor._default_script_engine
|
||||||
|
script_engine_to_use.environment.restore_state(bpmn_process_instance)
|
||||||
|
bpmn_process_instance.script_engine = script_engine_to_use
|
||||||
|
|
||||||
def preserve_script_engine_state(self) -> None:
|
def preserve_script_engine_state(self) -> None:
|
||||||
ProcessInstanceProcessor._script_engine.environment.preserve_state(self.bpmn_process_instance)
|
self._script_engine.environment.preserve_state(self.bpmn_process_instance)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _update_bpmn_definition_mappings(
|
def _update_bpmn_definition_mappings(
|
||||||
|
@ -712,7 +722,6 @@ class ProcessInstanceProcessor:
|
||||||
spec,
|
spec,
|
||||||
subprocess_specs=subprocesses,
|
subprocess_specs=subprocesses,
|
||||||
)
|
)
|
||||||
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
|
||||||
return bpmn_process_instance
|
return bpmn_process_instance
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -740,8 +749,6 @@ class ProcessInstanceProcessor:
|
||||||
raise err
|
raise err
|
||||||
finally:
|
finally:
|
||||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||||
|
|
||||||
ProcessInstanceProcessor.set_script_engine(bpmn_process_instance)
|
|
||||||
else:
|
else:
|
||||||
bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
bpmn_process_instance = ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec(
|
||||||
spec, subprocesses
|
spec, subprocesses
|
||||||
|
@ -756,11 +763,10 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_definition_to_task_definitions_mappings,
|
bpmn_definition_to_task_definitions_mappings,
|
||||||
)
|
)
|
||||||
|
|
||||||
def slam_in_data(self, data: dict) -> None:
|
def add_data_to_bpmn_process_instance(self, data: dict) -> None:
|
||||||
|
# if we do not use a deep merge, then the data does not end up on the object for some reason
|
||||||
self.bpmn_process_instance.data = DeepMerge.merge(self.bpmn_process_instance.data, data)
|
self.bpmn_process_instance.data = DeepMerge.merge(self.bpmn_process_instance.data, data)
|
||||||
|
|
||||||
self.save()
|
|
||||||
|
|
||||||
def raise_if_no_potential_owners(self, potential_owner_ids: list[int], message: str) -> None:
|
def raise_if_no_potential_owners(self, potential_owner_ids: list[int], message: str) -> None:
|
||||||
if not potential_owner_ids:
|
if not potential_owner_ids:
|
||||||
raise NoPotentialOwnersForTaskError(message)
|
raise NoPotentialOwnersForTaskError(message)
|
||||||
|
@ -1376,11 +1382,19 @@ class ProcessInstanceProcessor:
|
||||||
execution_strategy_name: str | None = None,
|
execution_strategy_name: str | None = None,
|
||||||
execution_strategy: ExecutionStrategy | None = None,
|
execution_strategy: ExecutionStrategy | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
with ProcessInstanceQueueService.dequeued(self.process_instance_model):
|
if self.process_instance_model.persistence_level != "none":
|
||||||
# TODO: ideally we just lock in the execution service, but not sure
|
with ProcessInstanceQueueService.dequeued(self.process_instance_model):
|
||||||
# about _add_bpmn_process_definitions and if that needs to happen in
|
# TODO: ideally we just lock in the execution service, but not sure
|
||||||
# the same lock like it does on main
|
# about _add_bpmn_process_definitions and if that needs to happen in
|
||||||
self._do_engine_steps(exit_at, save, execution_strategy_name, execution_strategy)
|
# the same lock like it does on main
|
||||||
|
self._do_engine_steps(exit_at, save, execution_strategy_name, execution_strategy)
|
||||||
|
else:
|
||||||
|
self._do_engine_steps(
|
||||||
|
exit_at,
|
||||||
|
save=False,
|
||||||
|
execution_strategy_name=execution_strategy_name,
|
||||||
|
execution_strategy=execution_strategy,
|
||||||
|
)
|
||||||
|
|
||||||
def _do_engine_steps(
|
def _do_engine_steps(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -233,7 +233,7 @@ class ProcessInstanceReportService:
|
||||||
cls.non_metadata_columns()
|
cls.non_metadata_columns()
|
||||||
for process_instance_row in process_instance_sqlalchemy_rows:
|
for process_instance_row in process_instance_sqlalchemy_rows:
|
||||||
process_instance_mapping = process_instance_row._mapping
|
process_instance_mapping = process_instance_row._mapping
|
||||||
process_instance_dict = process_instance_row[0].serialized
|
process_instance_dict = process_instance_row[0].serialized()
|
||||||
for metadata_column in metadata_columns:
|
for metadata_column in metadata_columns:
|
||||||
if metadata_column["accessor"] not in process_instance_dict:
|
if metadata_column["accessor"] not in process_instance_dict:
|
||||||
process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[
|
process_instance_dict[metadata_column["accessor"]] = process_instance_mapping[
|
||||||
|
|
|
@ -6,6 +6,7 @@ from glob import glob
|
||||||
from json import JSONDecodeError
|
from json import JSONDecodeError
|
||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError
|
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ProcessEntityNotFoundError
|
||||||
from spiffworkflow_backend.interfaces import ProcessGroupLite
|
from spiffworkflow_backend.interfaces import ProcessGroupLite
|
||||||
|
@ -100,6 +101,14 @@ class ProcessModelService(FileSystemService):
|
||||||
setattr(process_model, atu_key, atu_value)
|
setattr(process_model, atu_key, atu_value)
|
||||||
cls.save_process_model(process_model)
|
cls.save_process_model(process_model)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def is_allowed_to_run_as_extension(cls, process_model: ProcessModelInfo) -> bool:
|
||||||
|
if not current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED"]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
configured_prefix = current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX"]
|
||||||
|
return process_model.id.startswith(f"{configured_prefix}/")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def save_process_model(cls, process_model: ProcessModelInfo) -> None:
|
def save_process_model(cls, process_model: ProcessModelInfo) -> None:
|
||||||
process_model_path = os.path.abspath(
|
process_model_path = os.path.abspath(
|
||||||
|
@ -161,6 +170,7 @@ class ProcessModelService(FileSystemService):
|
||||||
cls,
|
cls,
|
||||||
process_group_id: str | None = None,
|
process_group_id: str | None = None,
|
||||||
recursive: bool | None = False,
|
recursive: bool | None = False,
|
||||||
|
include_files: bool | None = False,
|
||||||
) -> list[ProcessModelInfo]:
|
) -> list[ProcessModelInfo]:
|
||||||
process_models = []
|
process_models = []
|
||||||
root_path = FileSystemService.root_path()
|
root_path = FileSystemService.root_path()
|
||||||
|
@ -175,6 +185,12 @@ class ProcessModelService(FileSystemService):
|
||||||
for file in glob(process_model_glob, recursive=True):
|
for file in glob(process_model_glob, recursive=True):
|
||||||
process_model_relative_path = os.path.relpath(file, start=FileSystemService.root_path())
|
process_model_relative_path = os.path.relpath(file, start=FileSystemService.root_path())
|
||||||
process_model = cls.get_process_model_from_relative_path(os.path.dirname(process_model_relative_path))
|
process_model = cls.get_process_model_from_relative_path(os.path.dirname(process_model_relative_path))
|
||||||
|
if include_files:
|
||||||
|
files = FileSystemService.get_sorted_files(process_model)
|
||||||
|
for f in files:
|
||||||
|
file_contents = FileSystemService.get_data(process_model, f.name)
|
||||||
|
f.file_contents = file_contents
|
||||||
|
process_model.files = files
|
||||||
process_models.append(process_model)
|
process_models.append(process_model)
|
||||||
process_models.sort()
|
process_models.sort()
|
||||||
return process_models
|
return process_models
|
||||||
|
@ -185,8 +201,18 @@ class ProcessModelService(FileSystemService):
|
||||||
process_group_id: str | None = None,
|
process_group_id: str | None = None,
|
||||||
recursive: bool | None = False,
|
recursive: bool | None = False,
|
||||||
filter_runnable_by_user: bool | None = False,
|
filter_runnable_by_user: bool | None = False,
|
||||||
|
filter_runnable_as_extension: bool | None = False,
|
||||||
|
include_files: bool | None = False,
|
||||||
) -> list[ProcessModelInfo]:
|
) -> list[ProcessModelInfo]:
|
||||||
process_models = cls.get_process_models(process_group_id, recursive)
|
if filter_runnable_as_extension and filter_runnable_by_user:
|
||||||
|
raise Exception(
|
||||||
|
"It is not valid to filter process models by both filter_runnable_by_user and"
|
||||||
|
" filter_runnable_as_extension"
|
||||||
|
)
|
||||||
|
|
||||||
|
process_models = cls.get_process_models(
|
||||||
|
process_group_id=process_group_id, recursive=recursive, include_files=include_files
|
||||||
|
)
|
||||||
|
|
||||||
permission_to_check = "read"
|
permission_to_check = "read"
|
||||||
permission_base_uri = "/v1.0/process-models"
|
permission_base_uri = "/v1.0/process-models"
|
||||||
|
@ -194,6 +220,9 @@ class ProcessModelService(FileSystemService):
|
||||||
if filter_runnable_by_user:
|
if filter_runnable_by_user:
|
||||||
permission_to_check = "create"
|
permission_to_check = "create"
|
||||||
permission_base_uri = "/v1.0/process-instances"
|
permission_base_uri = "/v1.0/process-instances"
|
||||||
|
if filter_runnable_as_extension:
|
||||||
|
permission_to_check = "create"
|
||||||
|
permission_base_uri = "/v1.0/extensions"
|
||||||
|
|
||||||
# if user has access to uri/* with that permission then there's no reason to check each one individually
|
# if user has access to uri/* with that permission then there's no reason to check each one individually
|
||||||
guid_of_non_existent_item_to_check_perms_against = str(uuid.uuid4())
|
guid_of_non_existent_item_to_check_perms_against = str(uuid.uuid4())
|
||||||
|
@ -303,7 +332,7 @@ class ProcessModelService(FileSystemService):
|
||||||
cat_path = cls.full_path_from_id(process_group.id)
|
cat_path = cls.full_path_from_id(process_group.id)
|
||||||
os.makedirs(cat_path, exist_ok=True)
|
os.makedirs(cat_path, exist_ok=True)
|
||||||
json_path = os.path.join(cat_path, cls.PROCESS_GROUP_JSON_FILE)
|
json_path = os.path.join(cat_path, cls.PROCESS_GROUP_JSON_FILE)
|
||||||
serialized_process_group = process_group.serialized
|
serialized_process_group = process_group.serialized()
|
||||||
for key in list(serialized_process_group.keys()):
|
for key in list(serialized_process_group.keys()):
|
||||||
if key not in PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION:
|
if key not in PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION:
|
||||||
del serialized_process_group[key]
|
del serialized_process_group[key]
|
||||||
|
|
|
@ -18,10 +18,6 @@ from spiffworkflow_backend.services.process_caller_service import ProcessCallerS
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
|
|
||||||
|
|
||||||
class ProcessModelFileNotFoundError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessModelFileInvalidError(Exception):
|
class ProcessModelFileInvalidError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -33,19 +29,6 @@ class SpecFileService(FileSystemService):
|
||||||
The files are stored in a directory whose path is determined by the category and spec names.
|
The files are stored in a directory whose path is determined by the category and spec names.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_files(
|
|
||||||
process_model_info: ProcessModelInfo,
|
|
||||||
file_name: str | None = None,
|
|
||||||
extension_filter: str = "",
|
|
||||||
) -> list[File]:
|
|
||||||
"""Return all files associated with a workflow specification."""
|
|
||||||
path = os.path.join(FileSystemService.root_path(), process_model_info.id_for_file_path())
|
|
||||||
files = SpecFileService._get_files(path, file_name)
|
|
||||||
if extension_filter != "":
|
|
||||||
files = list(filter(lambda file: file.name.endswith(extension_filter), files))
|
|
||||||
return files
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reference_map(references: list[SpecReference]) -> dict[str, SpecReference]:
|
def reference_map(references: list[SpecReference]) -> dict[str, SpecReference]:
|
||||||
"""Creates a dict with provided references organized by id."""
|
"""Creates a dict with provided references organized by id."""
|
||||||
|
@ -58,7 +41,7 @@ class SpecFileService(FileSystemService):
|
||||||
def get_references_for_process(
|
def get_references_for_process(
|
||||||
process_model_info: ProcessModelInfo,
|
process_model_info: ProcessModelInfo,
|
||||||
) -> list[SpecReference]:
|
) -> list[SpecReference]:
|
||||||
files = SpecFileService.get_files(process_model_info)
|
files = FileSystemService.get_files(process_model_info)
|
||||||
references = []
|
references = []
|
||||||
for file in files:
|
for file in files:
|
||||||
references.extend(SpecFileService.get_references_for_file(file, process_model_info))
|
references.extend(SpecFileService.get_references_for_file(file, process_model_info))
|
||||||
|
@ -200,21 +183,6 @@ class SpecFileService(FileSystemService):
|
||||||
SpecFileService.write_file_data_to_system(full_file_path, binary_data)
|
SpecFileService.write_file_data_to_system(full_file_path, binary_data)
|
||||||
return SpecFileService.to_file_object(file_name, full_file_path)
|
return SpecFileService.to_file_object(file_name, full_file_path)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
|
|
||||||
full_file_path = SpecFileService.full_file_path(process_model_info, file_name)
|
|
||||||
if not os.path.exists(full_file_path):
|
|
||||||
raise ProcessModelFileNotFoundError(
|
|
||||||
f"No file found with name {file_name} in {process_model_info.display_name}"
|
|
||||||
)
|
|
||||||
with open(full_file_path, "rb") as f_handle:
|
|
||||||
spec_file_data = f_handle.read()
|
|
||||||
return spec_file_data
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def full_file_path(process_model: ProcessModelInfo, file_name: str) -> str:
|
|
||||||
return os.path.abspath(os.path.join(SpecFileService.process_model_full_path(process_model), file_name))
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime:
|
def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime:
|
||||||
full_file_path = SpecFileService.full_file_path(process_model, file_name)
|
full_file_path = SpecFileService.full_file_path(process_model, file_name)
|
||||||
|
|
|
@ -244,12 +244,8 @@ class TaskModelSavingDelegate(EngineStepDelegate):
|
||||||
self._add_parents(spiff_task.parent)
|
self._add_parents(spiff_task.parent)
|
||||||
|
|
||||||
def _should_update_task_model(self) -> bool:
|
def _should_update_task_model(self) -> bool:
|
||||||
"""We need to figure out if we have previously save task info on this process intance.
|
"""No reason to save task model stuff if the process instance isn't persistent."""
|
||||||
|
return self.process_instance.persistence_level != "none"
|
||||||
Use the bpmn_process_id to do this.
|
|
||||||
"""
|
|
||||||
# return self.process_instance.bpmn_process_id is not None
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
class GreedyExecutionStrategy(ExecutionStrategy):
|
class GreedyExecutionStrategy(ExecutionStrategy):
|
||||||
|
@ -394,12 +390,13 @@ class WorkflowExecutionService:
|
||||||
# execution_strategy.spiff_run
|
# execution_strategy.spiff_run
|
||||||
# spiff.[some_run_task_method]
|
# spiff.[some_run_task_method]
|
||||||
def run_and_save(self, exit_at: None = None, save: bool = False) -> None:
|
def run_and_save(self, exit_at: None = None, save: bool = False) -> None:
|
||||||
with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped:
|
if self.process_instance_model.persistence_level != "none":
|
||||||
if tripped:
|
with safe_assertion(ProcessInstanceLockService.has_lock(self.process_instance_model.id)) as tripped:
|
||||||
raise AssertionError(
|
if tripped:
|
||||||
"The current thread has not obtained a lock for this process"
|
raise AssertionError(
|
||||||
f" instance ({self.process_instance_model.id})."
|
"The current thread has not obtained a lock for this process"
|
||||||
)
|
f" instance ({self.process_instance_model.id})."
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||||
|
@ -410,8 +407,9 @@ class WorkflowExecutionService:
|
||||||
if self.bpmn_process_instance.is_completed():
|
if self.bpmn_process_instance.is_completed():
|
||||||
self.process_instance_completer(self.bpmn_process_instance)
|
self.process_instance_completer(self.bpmn_process_instance)
|
||||||
|
|
||||||
self.process_bpmn_messages()
|
if self.process_instance_model.persistence_level != "none":
|
||||||
self.queue_waiting_receive_messages()
|
self.process_bpmn_messages()
|
||||||
|
self.queue_waiting_receive_messages()
|
||||||
except WorkflowTaskException as wte:
|
except WorkflowTaskException as wte:
|
||||||
ProcessInstanceTmpService.add_event_to_process_instance(
|
ProcessInstanceTmpService.add_event_to_process_instance(
|
||||||
self.process_instance_model,
|
self.process_instance_model,
|
||||||
|
@ -426,11 +424,12 @@ class WorkflowExecutionService:
|
||||||
raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe
|
raise ApiError.from_workflow_exception("task_error", str(swe), swe) from swe
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self.execution_strategy.save(self.bpmn_process_instance)
|
if self.process_instance_model.persistence_level != "none":
|
||||||
db.session.commit()
|
self.execution_strategy.save(self.bpmn_process_instance)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
if save:
|
if save:
|
||||||
self.process_instance_saver()
|
self.process_instance_saver()
|
||||||
|
|
||||||
def process_bpmn_messages(self) -> None:
|
def process_bpmn_messages(self) -> None:
|
||||||
bpmn_messages = self.bpmn_process_instance.get_bpmn_messages()
|
bpmn_messages = self.bpmn_process_instance.get_bpmn_messages()
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||||
|
<bpmn:process id="Process_ScriptWithImport" name="Script With Import" isExecutable="true">
|
||||||
|
<bpmn:startEvent id="StartEvent_1">
|
||||||
|
<bpmn:outgoing>Flow_0r3ua0i</bpmn:outgoing>
|
||||||
|
</bpmn:startEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_0r3ua0i" sourceRef="StartEvent_1" targetRef="Activity_SetInitialData" />
|
||||||
|
<bpmn:scriptTask id="Activity_SetInitialData" name="Set Initial Data">
|
||||||
|
<bpmn:incoming>Flow_0r3ua0i</bpmn:incoming>
|
||||||
|
<bpmn:outgoing>Flow_1vqk60p</bpmn:outgoing>
|
||||||
|
<bpmn:script>
|
||||||
|
# from spiffworkflow_backend.models.db import db
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
|
||||||
|
pi = ProcessInstanceModel.query.first()
|
||||||
|
pi_json = { "id": pi.id }
|
||||||
|
del(pi)</bpmn:script>
|
||||||
|
</bpmn:scriptTask>
|
||||||
|
<bpmn:endEvent id="Event_19fiqu4">
|
||||||
|
<bpmn:incoming>Flow_1vqk60p</bpmn:incoming>
|
||||||
|
</bpmn:endEvent>
|
||||||
|
<bpmn:sequenceFlow id="Flow_1vqk60p" sourceRef="Activity_SetInitialData" targetRef="Event_19fiqu4" />
|
||||||
|
</bpmn:process>
|
||||||
|
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||||
|
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_ScriptWithImport">
|
||||||
|
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||||
|
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Activity_0l45w13_di" bpmnElement="Activity_SetInitialData">
|
||||||
|
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||||
|
<bpmndi:BPMNLabel />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNShape id="Event_19fiqu4_di" bpmnElement="Event_19fiqu4">
|
||||||
|
<dc:Bounds x="752" y="159" width="36" height="36" />
|
||||||
|
</bpmndi:BPMNShape>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_0r3ua0i_di" bpmnElement="Flow_0r3ua0i">
|
||||||
|
<di:waypoint x="215" y="177" />
|
||||||
|
<di:waypoint x="270" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
<bpmndi:BPMNEdge id="Flow_1vqk60p_di" bpmnElement="Flow_1vqk60p">
|
||||||
|
<di:waypoint x="690" y="177" />
|
||||||
|
<di:waypoint x="752" y="177" />
|
||||||
|
</bpmndi:BPMNEdge>
|
||||||
|
</bpmndi:BPMNPlane>
|
||||||
|
</bpmndi:BPMNDiagram>
|
||||||
|
</bpmn:definitions>
|
|
@ -2,9 +2,12 @@ import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
from collections.abc import Generator
|
||||||
|
from contextlib import contextmanager
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
from flask.app import Flask
|
||||||
from flask.testing import FlaskClient
|
from flask.testing import FlaskClient
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
@ -449,3 +452,12 @@ class BaseTest:
|
||||||
customer_curr = next(c for c in message.correlation_rules if c.name == "customer_id")
|
customer_curr = next(c for c in message.correlation_rules if c.name == "customer_id")
|
||||||
assert po_curr is not None
|
assert po_curr is not None
|
||||||
assert customer_curr is not None
|
assert customer_curr is not None
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def app_config_mock(self, app: Flask, config_identifier: str, new_config_value: Any) -> Generator:
|
||||||
|
initial_value = app.config[config_identifier]
|
||||||
|
app.config[config_identifier] = new_config_value
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
app.config[config_identifier] = initial_value
|
||||||
|
|
|
@ -0,0 +1,123 @@
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
from flask.app import Flask
|
||||||
|
from flask.testing import FlaskClient
|
||||||
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
|
|
||||||
|
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||||
|
|
||||||
|
|
||||||
|
class TestExtensionsController(BaseTest):
|
||||||
|
def test_basic_extension(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", True):
|
||||||
|
process_model = self.create_group_and_model_with_bpmn(
|
||||||
|
client=client,
|
||||||
|
user=with_super_admin_user,
|
||||||
|
process_group_id="extensions",
|
||||||
|
process_model_id="sample",
|
||||||
|
bpmn_file_location="sample",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/v1.0/extensions/{self.modify_process_identifier_for_path_param(process_model.id)}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
content_type="application/json",
|
||||||
|
data=json.dumps({"extension_input": {"OUR_AWESOME_INPUT": "the awesome value"}}),
|
||||||
|
)
|
||||||
|
|
||||||
|
expected_task_data = {
|
||||||
|
"Mike": "Awesome",
|
||||||
|
"my_var": "Hello World",
|
||||||
|
"person": "Kevin",
|
||||||
|
"validate_only": False,
|
||||||
|
"wonderfulness": "Very wonderful",
|
||||||
|
"OUR_AWESOME_INPUT": "the awesome value",
|
||||||
|
}
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json is not None
|
||||||
|
assert response.json == expected_task_data
|
||||||
|
|
||||||
|
def test_returns_403_if_extensions_not_enabled(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", False):
|
||||||
|
process_model = self.create_group_and_model_with_bpmn(
|
||||||
|
client=client,
|
||||||
|
user=with_super_admin_user,
|
||||||
|
process_group_id="extensions",
|
||||||
|
process_model_id="sample",
|
||||||
|
bpmn_file_location="sample",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/v1.0/extensions/{self.modify_process_identifier_for_path_param(process_model.id)}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json
|
||||||
|
assert response.json["error_code"] == "extensions_api_not_enabled"
|
||||||
|
|
||||||
|
def test_returns_403_if_process_model_does_not_match_configured_prefix(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", True):
|
||||||
|
process_model = self.create_group_and_model_with_bpmn(
|
||||||
|
client=client,
|
||||||
|
user=with_super_admin_user,
|
||||||
|
process_group_id="extensions_not_it",
|
||||||
|
process_model_id="sample",
|
||||||
|
bpmn_file_location="sample",
|
||||||
|
)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/v1.0/extensions/{self.modify_process_identifier_for_path_param(process_model.id)}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
print(f"response.json: {response.json}")
|
||||||
|
assert response.status_code == 403
|
||||||
|
assert response.json
|
||||||
|
assert response.json["error_code"] == "invalid_process_model_extension"
|
||||||
|
|
||||||
|
def test_extension_can_run_without_restriction(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
client: FlaskClient,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
with_super_admin_user: UserModel,
|
||||||
|
) -> None:
|
||||||
|
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", True):
|
||||||
|
process_model = self.create_group_and_model_with_bpmn(
|
||||||
|
client=client,
|
||||||
|
user=with_super_admin_user,
|
||||||
|
process_group_id="extensions",
|
||||||
|
process_model_id="script_task_with_import",
|
||||||
|
bpmn_file_location="script_task_with_import",
|
||||||
|
)
|
||||||
|
|
||||||
|
# we need a process instance in the database so the scriptTask can work
|
||||||
|
self.create_process_instance_from_process_model(process_model, user=with_super_admin_user)
|
||||||
|
|
||||||
|
response = client.post(
|
||||||
|
f"/v1.0/extensions/{self.modify_process_identifier_for_path_param(process_model.id)}",
|
||||||
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.json is not None
|
||||||
|
assert "pi_json" in response.json
|
||||||
|
assert "id" in response.json["pi_json"]
|
||||||
|
assert re.match(r"^\d+$", str(response.json["pi_json"]["id"]))
|
|
@ -590,7 +590,7 @@ class TestProcessApi(BaseTest):
|
||||||
"/v1.0/process-groups",
|
"/v1.0/process-groups",
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
content_type="application/json",
|
content_type="application/json",
|
||||||
data=json.dumps(process_group.serialized),
|
data=json.dumps(process_group.serialized()),
|
||||||
)
|
)
|
||||||
assert response.status_code == 201
|
assert response.status_code == 201
|
||||||
assert response.json
|
assert response.json
|
||||||
|
@ -658,7 +658,7 @@ class TestProcessApi(BaseTest):
|
||||||
f"/v1.0/process-groups/{group_id}",
|
f"/v1.0/process-groups/{group_id}",
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
content_type="application/json",
|
content_type="application/json",
|
||||||
data=json.dumps(process_group.serialized),
|
data=json.dumps(process_group.serialized()),
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ class TestGetLocaltime(BaseTest):
|
||||||
|
|
||||||
assert spiff_task
|
assert spiff_task
|
||||||
|
|
||||||
data = ProcessInstanceProcessor._script_engine.environment.last_result()
|
data = ProcessInstanceProcessor._default_script_engine.environment.last_result()
|
||||||
some_time = data["some_time"]
|
some_time = data["some_time"]
|
||||||
localtime = data["localtime"]
|
localtime = data["localtime"]
|
||||||
timezone = data["timezone"]
|
timezone = data["timezone"]
|
||||||
|
|
|
@ -280,6 +280,7 @@ class TestAuthorizationService(BaseTest):
|
||||||
("/active-users/*", "create"),
|
("/active-users/*", "create"),
|
||||||
("/connector-proxy/typeahead/*", "read"),
|
("/connector-proxy/typeahead/*", "read"),
|
||||||
("/debug/version-info", "read"),
|
("/debug/version-info", "read"),
|
||||||
|
("/extensions", "read"),
|
||||||
("/onboarding", "read"),
|
("/onboarding", "read"),
|
||||||
("/process-groups", "read"),
|
("/process-groups", "read"),
|
||||||
("/process-instances/find-by-id/*", "read"),
|
("/process-instances/find-by-id/*", "read"),
|
||||||
|
@ -318,6 +319,7 @@ class TestAuthorizationService(BaseTest):
|
||||||
("/can-run-privileged-script/*", "create"),
|
("/can-run-privileged-script/*", "create"),
|
||||||
("/data-stores/*", "read"),
|
("/data-stores/*", "read"),
|
||||||
("/debug/*", "create"),
|
("/debug/*", "create"),
|
||||||
|
("/extensions/*", "create"),
|
||||||
("/event-error-details/*", "read"),
|
("/event-error-details/*", "read"),
|
||||||
("/logs/*", "read"),
|
("/logs/*", "read"),
|
||||||
("/messages", "read"),
|
("/messages", "read"),
|
||||||
|
|
|
@ -19,33 +19,33 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def app_no_cache_dir(app: Flask) -> Generator[Flask, None, None]:
|
def app_no_cache_dir(app: Flask) -> Generator[Flask, None, None]:
|
||||||
app.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] = None
|
with BaseTest().app_config_mock(app, "SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", None):
|
||||||
yield app
|
yield app
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def app_some_cache_dir(app: Flask) -> Generator[Flask, None, None]:
|
def app_some_cache_dir(app: Flask) -> Generator[Flask, None, None]:
|
||||||
app.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] = "some_cache_dir"
|
with BaseTest().app_config_mock(app, "SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", "some_cache_dir"):
|
||||||
yield app
|
yield app
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def app_disabled(app: Flask) -> Generator[Flask, None, None]:
|
def app_disabled(app: Flask) -> Generator[Flask, None, None]:
|
||||||
app.config["SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED"] = False
|
with BaseTest().app_config_mock(app, "SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED", False):
|
||||||
yield app
|
yield app
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def app_enabled(app_some_cache_dir: Flask) -> Generator[Flask, None, None]:
|
def app_enabled(app_some_cache_dir: Flask) -> Generator[Flask, None, None]:
|
||||||
app_some_cache_dir.config["SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED"] = True
|
with BaseTest().app_config_mock(app_some_cache_dir, "SPIFFWORKFLOW_BACKEND_FEATURE_ELEMENT_UNITS_ENABLED", True):
|
||||||
yield app_some_cache_dir
|
yield app_some_cache_dir
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def app_enabled_tmp_cache_dir(app_enabled: Flask) -> Generator[Flask, None, None]:
|
def app_enabled_tmp_cache_dir(app_enabled: Flask) -> Generator[Flask, None, None]:
|
||||||
with tempfile.TemporaryDirectory() as tmpdirname:
|
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||||
app_enabled.config["SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR"] = tmpdirname
|
with BaseTest().app_config_mock(app_enabled, "SPIFFWORKFLOW_BACKEND_ELEMENT_UNITS_CACHE_DIR", tmpdirname):
|
||||||
yield app_enabled
|
yield app_enabled
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
|
|
|
@ -34,7 +34,7 @@ class TestProcessInstanceProcessor(BaseTest):
|
||||||
) -> None:
|
) -> None:
|
||||||
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
|
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
|
||||||
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
|
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
|
||||||
script_engine = ProcessInstanceProcessor._script_engine
|
script_engine = ProcessInstanceProcessor._default_script_engine
|
||||||
|
|
||||||
result = script_engine._evaluate("a", {"a": 1})
|
result = script_engine._evaluate("a", {"a": 1})
|
||||||
assert result == 1
|
assert result == 1
|
||||||
|
@ -48,7 +48,7 @@ class TestProcessInstanceProcessor(BaseTest):
|
||||||
) -> None:
|
) -> None:
|
||||||
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
|
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
|
||||||
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
|
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
|
||||||
script_engine = ProcessInstanceProcessor._script_engine
|
script_engine = ProcessInstanceProcessor._default_script_engine
|
||||||
result = script_engine._evaluate("fact_service(type='norris')", {})
|
result = script_engine._evaluate("fact_service(type='norris')", {})
|
||||||
assert result == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants."
|
assert result == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants."
|
||||||
app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
|
app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import re
|
||||||
|
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
|
|
||||||
|
@ -25,3 +27,27 @@ class TestProcessModelService(BaseTest):
|
||||||
|
|
||||||
assert process_model.display_name == "new_name"
|
assert process_model.display_name == "new_name"
|
||||||
assert process_model.primary_process_id == primary_process_id
|
assert process_model.primary_process_id == primary_process_id
|
||||||
|
|
||||||
|
def test_can_get_file_contents(
|
||||||
|
self,
|
||||||
|
app: Flask,
|
||||||
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
|
) -> None:
|
||||||
|
process_model = load_test_spec(
|
||||||
|
"test_group/hello_world",
|
||||||
|
bpmn_file_name="hello_world.bpmn",
|
||||||
|
process_model_source_directory="hello_world",
|
||||||
|
)
|
||||||
|
assert process_model.display_name == "test_group/hello_world"
|
||||||
|
|
||||||
|
primary_process_id = process_model.primary_process_id
|
||||||
|
assert primary_process_id == "Process_HelloWorld"
|
||||||
|
|
||||||
|
process_models = ProcessModelService.get_process_models(recursive=True, include_files=True)
|
||||||
|
assert len(process_models) == 1
|
||||||
|
|
||||||
|
pm_string = app.json.dumps(process_models[0])
|
||||||
|
pm_dict = app.json.loads(pm_string)
|
||||||
|
assert len(pm_dict["files"]) == 1
|
||||||
|
file = pm_dict["files"][0]
|
||||||
|
assert re.search("hello", file["file_contents"]) is not None
|
||||||
|
|
|
@ -17,6 +17,7 @@ import ErrorDisplay from './components/ErrorDisplay';
|
||||||
import APIErrorProvider from './contexts/APIErrorContext';
|
import APIErrorProvider from './contexts/APIErrorContext';
|
||||||
import ScrollToTop from './components/ScrollToTop';
|
import ScrollToTop from './components/ScrollToTop';
|
||||||
import EditorRoutes from './routes/EditorRoutes';
|
import EditorRoutes from './routes/EditorRoutes';
|
||||||
|
import Extension from './routes/Extension';
|
||||||
|
|
||||||
export default function App() {
|
export default function App() {
|
||||||
if (!UserService.isLoggedIn()) {
|
if (!UserService.isLoggedIn()) {
|
||||||
|
@ -43,6 +44,14 @@ export default function App() {
|
||||||
<Route path="/tasks/*" element={<HomePageRoutes />} />
|
<Route path="/tasks/*" element={<HomePageRoutes />} />
|
||||||
<Route path="/admin/*" element={<AdminRoutes />} />
|
<Route path="/admin/*" element={<AdminRoutes />} />
|
||||||
<Route path="/editor/*" element={<EditorRoutes />} />
|
<Route path="/editor/*" element={<EditorRoutes />} />
|
||||||
|
<Route
|
||||||
|
path="/extensions/:process_model"
|
||||||
|
element={<Extension />}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="/extensions/:process_model/:extension_route"
|
||||||
|
element={<Extension />}
|
||||||
|
/>
|
||||||
</Routes>
|
</Routes>
|
||||||
</ErrorBoundary>
|
</ErrorBoundary>
|
||||||
</Content>
|
</Content>
|
||||||
|
|
|
@ -24,11 +24,18 @@ import { Can } from '@casl/react';
|
||||||
import logo from '../logo.svg';
|
import logo from '../logo.svg';
|
||||||
import UserService from '../services/UserService';
|
import UserService from '../services/UserService';
|
||||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||||
import { PermissionsToCheck } from '../interfaces';
|
import {
|
||||||
|
PermissionsToCheck,
|
||||||
|
ProcessModel,
|
||||||
|
ProcessFile,
|
||||||
|
ExtensionUiSchema,
|
||||||
|
UiSchemaNavItem,
|
||||||
|
} from '../interfaces';
|
||||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||||
import { UnauthenticatedError } from '../services/HttpService';
|
import HttpService, { UnauthenticatedError } from '../services/HttpService';
|
||||||
import { DOCUMENTATION_URL, SPIFF_ENVIRONMENT } from '../config';
|
import { DOCUMENTATION_URL, SPIFF_ENVIRONMENT } from '../config';
|
||||||
import appVersionInfo from '../helpers/appVersionInfo';
|
import appVersionInfo from '../helpers/appVersionInfo';
|
||||||
|
import { slugifyString } from '../helpers';
|
||||||
|
|
||||||
export default function NavigationBar() {
|
export default function NavigationBar() {
|
||||||
const handleLogout = () => {
|
const handleLogout = () => {
|
||||||
|
@ -41,6 +48,9 @@ export default function NavigationBar() {
|
||||||
|
|
||||||
const location = useLocation();
|
const location = useLocation();
|
||||||
const [activeKey, setActiveKey] = useState<string>('');
|
const [activeKey, setActiveKey] = useState<string>('');
|
||||||
|
const [extensionNavigationItems, setExtensionNavigationItems] = useState<
|
||||||
|
UiSchemaNavItem[] | null
|
||||||
|
>(null);
|
||||||
|
|
||||||
const { targetUris } = useUriListForPermissions();
|
const { targetUris } = useUriListForPermissions();
|
||||||
|
|
||||||
|
@ -87,6 +97,41 @@ export default function NavigationBar() {
|
||||||
setActiveKey(newActiveKey);
|
setActiveKey(newActiveKey);
|
||||||
}, [location]);
|
}, [location]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const processExtensionResult = (processModels: ProcessModel[]) => {
|
||||||
|
const eni: UiSchemaNavItem[] = processModels
|
||||||
|
.map((processModel: ProcessModel) => {
|
||||||
|
const extensionUiSchemaFile = processModel.files.find(
|
||||||
|
(file: ProcessFile) => file.name === 'extension_uischema.json'
|
||||||
|
);
|
||||||
|
if (extensionUiSchemaFile && extensionUiSchemaFile.file_contents) {
|
||||||
|
try {
|
||||||
|
const extensionUiSchema: ExtensionUiSchema = JSON.parse(
|
||||||
|
extensionUiSchemaFile.file_contents
|
||||||
|
);
|
||||||
|
if (extensionUiSchema.navigation_items) {
|
||||||
|
return extensionUiSchema.navigation_items;
|
||||||
|
}
|
||||||
|
} catch (jsonParseError: any) {
|
||||||
|
console.error(
|
||||||
|
`Unable to get navigation items for ${processModel.id}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return [] as UiSchemaNavItem[];
|
||||||
|
})
|
||||||
|
.flat();
|
||||||
|
if (eni) {
|
||||||
|
setExtensionNavigationItems(eni);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpService.makeCallToBackend({
|
||||||
|
path: targetUris.extensionListPath,
|
||||||
|
successCallback: processExtensionResult,
|
||||||
|
});
|
||||||
|
}, [targetUris.extensionListPath]);
|
||||||
|
|
||||||
const isActivePage = (menuItemPath: string) => {
|
const isActivePage = (menuItemPath: string) => {
|
||||||
return activeKey === menuItemPath;
|
return activeKey === menuItemPath;
|
||||||
};
|
};
|
||||||
|
@ -201,6 +246,29 @@ export default function NavigationBar() {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const extensionNavigationElements = () => {
|
||||||
|
if (!extensionNavigationItems) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return extensionNavigationItems.map((navItem: UiSchemaNavItem) => {
|
||||||
|
const navItemRoute = `/extensions${navItem.route}`;
|
||||||
|
const regexp = new RegExp(`^${navItemRoute}`);
|
||||||
|
if (regexp.test(location.pathname)) {
|
||||||
|
setActiveKey(navItemRoute);
|
||||||
|
}
|
||||||
|
return (
|
||||||
|
<HeaderMenuItem
|
||||||
|
href={navItemRoute}
|
||||||
|
isCurrentPage={isActivePage(navItemRoute)}
|
||||||
|
data-qa={`extension-${slugifyString(navItem.label)}`}
|
||||||
|
>
|
||||||
|
{navItem.label}
|
||||||
|
</HeaderMenuItem>
|
||||||
|
);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
const headerMenuItems = () => {
|
const headerMenuItems = () => {
|
||||||
if (!UserService.isLoggedIn()) {
|
if (!UserService.isLoggedIn()) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -240,6 +308,7 @@ export default function NavigationBar() {
|
||||||
</HeaderMenuItem>
|
</HeaderMenuItem>
|
||||||
</Can>
|
</Can>
|
||||||
{configurationElement()}
|
{configurationElement()}
|
||||||
|
{extensionNavigationElements()}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
|
@ -8,6 +8,8 @@ export const useUriListForPermissions = () => {
|
||||||
authenticationListPath: `/v1.0/authentications`,
|
authenticationListPath: `/v1.0/authentications`,
|
||||||
messageInstanceListPath: '/v1.0/messages',
|
messageInstanceListPath: '/v1.0/messages',
|
||||||
dataStoreListPath: '/v1.0/data-stores',
|
dataStoreListPath: '/v1.0/data-stores',
|
||||||
|
extensionListPath: '/v1.0/extensions',
|
||||||
|
extensionPath: `/v1.0/extensions/${params.process_model}`,
|
||||||
processGroupListPath: '/v1.0/process-groups',
|
processGroupListPath: '/v1.0/process-groups',
|
||||||
processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`,
|
processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`,
|
||||||
processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`,
|
processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`,
|
||||||
|
|
|
@ -424,3 +424,23 @@ export interface DataStore {
|
||||||
name: string;
|
name: string;
|
||||||
type: string;
|
type: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface UiSchemaNavItem {
|
||||||
|
label: string;
|
||||||
|
route: string;
|
||||||
|
}
|
||||||
|
export interface UiSchemaPageDefinition {
|
||||||
|
header: string;
|
||||||
|
api: string;
|
||||||
|
|
||||||
|
form_schema_filename?: any;
|
||||||
|
form_ui_schema_filename?: any;
|
||||||
|
markdown_instruction_filename?: string;
|
||||||
|
}
|
||||||
|
export interface UiSchemaRoute {
|
||||||
|
[key: string]: UiSchemaPageDefinition;
|
||||||
|
}
|
||||||
|
export interface ExtensionUiSchema {
|
||||||
|
navigation_items?: UiSchemaNavItem[];
|
||||||
|
routes: UiSchemaRoute;
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,170 @@
|
||||||
|
import { useEffect, useState } from 'react';
|
||||||
|
import MDEditor from '@uiw/react-md-editor';
|
||||||
|
import { useParams } from 'react-router-dom';
|
||||||
|
import validator from '@rjsf/validator-ajv8';
|
||||||
|
import { Editor } from '@monaco-editor/react';
|
||||||
|
import { Form } from '../rjsf/carbon_theme';
|
||||||
|
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||||
|
import {
|
||||||
|
ExtensionUiSchema,
|
||||||
|
ProcessFile,
|
||||||
|
ProcessModel,
|
||||||
|
UiSchemaPageDefinition,
|
||||||
|
} from '../interfaces';
|
||||||
|
import HttpService from '../services/HttpService';
|
||||||
|
import useAPIError from '../hooks/UseApiError';
|
||||||
|
import { recursivelyChangeNullAndUndefined } from '../helpers';
|
||||||
|
|
||||||
|
export default function Extension() {
|
||||||
|
const { targetUris } = useUriListForPermissions();
|
||||||
|
const params = useParams();
|
||||||
|
|
||||||
|
const [_processModel, setProcessModel] = useState<ProcessModel | null>(null);
|
||||||
|
const [formData, setFormData] = useState<any>(null);
|
||||||
|
const [formButtonsDisabled, setFormButtonsDisabled] = useState(false);
|
||||||
|
const [processedTaskData, setProcessedTaskData] = useState<any>(null);
|
||||||
|
const [filesByName] = useState<{
|
||||||
|
[key: string]: ProcessFile;
|
||||||
|
}>({});
|
||||||
|
const [uiSchemaPageDefinition, setUiSchemaPageDefinition] =
|
||||||
|
useState<UiSchemaPageDefinition | null>(null);
|
||||||
|
|
||||||
|
const { addError, removeError } = useAPIError();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const processExtensionResult = (pm: ProcessModel) => {
|
||||||
|
setProcessModel(pm);
|
||||||
|
let extensionUiSchemaFile: ProcessFile | null = null;
|
||||||
|
pm.files.forEach((file: ProcessFile) => {
|
||||||
|
filesByName[file.name] = file;
|
||||||
|
if (file.name === 'extension_uischema.json') {
|
||||||
|
extensionUiSchemaFile = file;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// typescript is really confused by extensionUiSchemaFile so force it since we are properly checking
|
||||||
|
if (
|
||||||
|
extensionUiSchemaFile &&
|
||||||
|
(extensionUiSchemaFile as ProcessFile).file_contents
|
||||||
|
) {
|
||||||
|
const extensionUiSchema: ExtensionUiSchema = JSON.parse(
|
||||||
|
(extensionUiSchemaFile as any).file_contents
|
||||||
|
);
|
||||||
|
|
||||||
|
let routeIdentifier = `/${params.process_model}`;
|
||||||
|
if (params.extension_route) {
|
||||||
|
routeIdentifier = `${routeIdentifier}/${params.extension_route}`;
|
||||||
|
}
|
||||||
|
setUiSchemaPageDefinition(extensionUiSchema.routes[routeIdentifier]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
HttpService.makeCallToBackend({
|
||||||
|
path: targetUris.extensionPath,
|
||||||
|
successCallback: processExtensionResult,
|
||||||
|
});
|
||||||
|
}, [targetUris.extensionPath, params, filesByName]);
|
||||||
|
|
||||||
|
const processSubmitResult = (result: any) => {
|
||||||
|
setProcessedTaskData(result);
|
||||||
|
setFormButtonsDisabled(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleFormSubmit = (formObject: any, _event: any) => {
|
||||||
|
if (formButtonsDisabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataToSubmit = formObject?.formData;
|
||||||
|
|
||||||
|
setFormButtonsDisabled(true);
|
||||||
|
setProcessedTaskData(null);
|
||||||
|
removeError();
|
||||||
|
delete dataToSubmit.isManualTask;
|
||||||
|
|
||||||
|
let apiPath = targetUris.extensionPath;
|
||||||
|
if (uiSchemaPageDefinition && uiSchemaPageDefinition.api) {
|
||||||
|
apiPath = `${targetUris.extensionListPath}/${uiSchemaPageDefinition.api}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: rjsf sets blanks values to undefined and JSON.stringify removes keys with undefined values
|
||||||
|
// so we convert undefined values to null recursively so that we can unset values in form fields
|
||||||
|
recursivelyChangeNullAndUndefined(dataToSubmit, null);
|
||||||
|
|
||||||
|
HttpService.makeCallToBackend({
|
||||||
|
path: apiPath,
|
||||||
|
successCallback: processSubmitResult,
|
||||||
|
failureCallback: (error: any) => {
|
||||||
|
addError(error);
|
||||||
|
setFormButtonsDisabled(false);
|
||||||
|
},
|
||||||
|
httpMethod: 'POST',
|
||||||
|
postBody: { extension_input: dataToSubmit },
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if (uiSchemaPageDefinition) {
|
||||||
|
const componentsToDisplay = [<h1>{uiSchemaPageDefinition.header}</h1>];
|
||||||
|
|
||||||
|
if (uiSchemaPageDefinition.markdown_instruction_filename) {
|
||||||
|
const markdownFile =
|
||||||
|
filesByName[uiSchemaPageDefinition.markdown_instruction_filename];
|
||||||
|
|
||||||
|
if (markdownFile.file_contents) {
|
||||||
|
componentsToDisplay.push(
|
||||||
|
<div data-color-mode="light">
|
||||||
|
<MDEditor.Markdown
|
||||||
|
linkTarget="_blank"
|
||||||
|
source={markdownFile.file_contents}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (uiSchemaPageDefinition.form_schema_filename) {
|
||||||
|
const formSchemaFile =
|
||||||
|
filesByName[uiSchemaPageDefinition.form_schema_filename];
|
||||||
|
const formUiSchemaFile =
|
||||||
|
filesByName[uiSchemaPageDefinition.form_ui_schema_filename];
|
||||||
|
if (formSchemaFile.file_contents && formUiSchemaFile.file_contents) {
|
||||||
|
componentsToDisplay.push(
|
||||||
|
<Form
|
||||||
|
id="form-to-submit"
|
||||||
|
formData={formData}
|
||||||
|
onChange={(obj: any) => {
|
||||||
|
setFormData(obj.formData);
|
||||||
|
}}
|
||||||
|
disabled={formButtonsDisabled}
|
||||||
|
onSubmit={handleFormSubmit}
|
||||||
|
schema={JSON.parse(formSchemaFile.file_contents)}
|
||||||
|
uiSchema={JSON.parse(formUiSchemaFile.file_contents)}
|
||||||
|
validator={validator}
|
||||||
|
omitExtraData
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (processedTaskData) {
|
||||||
|
componentsToDisplay.push(
|
||||||
|
<>
|
||||||
|
<h2 className="with-top-margin">Result:</h2>
|
||||||
|
<Editor
|
||||||
|
className="with-top-margin"
|
||||||
|
height="30rem"
|
||||||
|
width="auto"
|
||||||
|
defaultLanguage="json"
|
||||||
|
defaultValue={JSON.stringify(processedTaskData, null, 2)}
|
||||||
|
options={{
|
||||||
|
readOnly: true,
|
||||||
|
scrollBeyondLastLine: true,
|
||||||
|
minimap: { enabled: true },
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return <div className="fixed-width-container">{componentsToDisplay}</div>;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
Loading…
Reference in New Issue