Feature/allow markdown in extension results (#435)

* allow markdown in extensions results w/ burnettk

* fixed tests

* moved our rjsf form to component so extensions can also use it w/ burnettk

* added ability to create extensions that can download files w/ burnettk

* added test for extensions-get-data endpoint w/ burnettk

* make user optional when getting process instance reports

* added extensions-get-data to elevated perm macro and raise an error if user is not specified when needed when running a report

* fixed typeguard test

* push extensions branch

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
Co-authored-by: burnettk <burnettk@users.noreply.github.com>
This commit is contained in:
jasquat 2023-08-21 12:29:25 -04:00 committed by GitHub
parent 2b363f4783
commit 17309fb7fd
19 changed files with 576 additions and 326 deletions

View File

@ -34,6 +34,7 @@ on:
- feature/event-payloads-part-2
- feature/event-payload-migration-fix
- spiffdemo
- feature/allow-markdown-in-extension-results
jobs:
create_frontend_docker_image:

View File

@ -11,6 +11,7 @@ script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
supported_session_types=$(grep -E '^(el)?if.*\<session_type\>.*==' "$0" | sed -E 's/.*== "([^"]+)".*/\1/' | tr '\n' ' ')
session_type="${1:-}"
shift
if [[ -z "${session_type}" ]] || ! grep -qE "\<${session_type}\>" <<<"$supported_session_types"; then
if [[ -n "$session_type" ]]; then
>&2 echo "ERROR: Given session typeis not supported - ${session_type}"
@ -57,11 +58,11 @@ poetry install
if [[ "${session_type}" == "tests" ]]; then
setup_db_for_ci
poetry run coverage run --parallel -m pytest
poetry run coverage run --parallel -m pytest "$@"
elif [[ "${session_type}" == "typeguard" ]]; then
setup_db_for_ci
RUN_TYPEGUARD=true poetry run pytest
RUN_TYPEGUARD=true poetry run pytest "$@"
elif [[ "${session_type}" == "mypy" ]]; then
poetry run mypy src tests

View File

@ -9,21 +9,24 @@ set -o errtrace -o errexit -o nounset -o pipefail
port="${SPIFFWORKFLOW_BACKEND_PORT:-7000}"
arg="${1:-}"
if [[ "$arg" == "acceptance" ]]; then
proces_model_dir="${1:-}"
if [[ "$proces_model_dir" == "acceptance" ]]; then
export SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=true
export SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=acceptance_tests.yml
elif [[ "$arg" == "localopenid" ]]; then
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
elif [[ "$proces_model_dir" == "localopenid" ]]; then
export SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL="http://localhost:$port/openid"
export SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME="example.yml"
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
else
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR="$proces_model_dir"
fi
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_ENV=local_development
fi
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
# export FLASK_SESSION_SECRET_KEY="super_secret_key"
export FLASK_SESSION_SECRET_KEY="e7711a3ba96c46c68e084a86952de16f"

View File

@ -848,6 +848,28 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/extensions-get-data/{query_params}:
parameters:
- name: query_params
in: path
required: true
description: The params required to run the extension. The first parameter must be the modified_process_model_identifier of the extension to run.
schema:
type: string
format: path
get:
operationId: spiffworkflow_backend.routes.extensions_controller.extension_get_data
summary: Returns the metadata for a given extension
tags:
- Extensions
responses:
"200":
description: Resulting extension metadata
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
/process-models/{modified_process_model_identifier}/script-unit-tests:
parameters:
- name: modified_process_model_identifier

View File

@ -17,5 +17,5 @@ SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED = (
environ.get("SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", default="false")
environ.get("SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", default="true")
) == "true"

View File

@ -1,100 +1,58 @@
import json
from typing import Any
import flask.wrappers
from flask import current_app
from flask import g
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
from spiffworkflow_backend.routes.process_api_blueprint import _un_modify_modified_process_model_id
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.jinja_service import JinjaService
from spiffworkflow_backend.services.process_instance_processor import CustomBpmnScriptEngine
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsAlreadyLockedError
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceIsNotEnqueuedError
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError
def extension_run(
modified_process_model_identifier: str,
body: dict | None = None,
) -> flask.wrappers.Response:
_raise_unless_extensions_api_enabled()
_, result = _run_extension(modified_process_model_identifier, body)
return make_response(jsonify(result), 200)
process_model_identifier = _get_process_model_identifier(modified_process_model_identifier)
try:
process_model = _get_process_model(process_model_identifier)
except ApiError as ex:
if ex.error_code == "process_model_cannot_be_found":
raise ApiError(
error_code="invalid_process_model_extension",
message=(
f"Process Model '{process_model_identifier}' cannot be run as an extension. It must be in the"
" correct Process Group:"
f" {current_app.config['SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX']}"
),
status_code=403,
) from ex
raise ex
if process_model.primary_file_name is None:
raise ApiError(
error_code="process_model_missing_primary_bpmn_file",
message=(
f"Process Model '{process_model_identifier}' does not have a primary"
" bpmn file. One must be set in order to instantiate this model."
),
status_code=400,
)
process_instance = ProcessInstanceModel(
status=ProcessInstanceStatus.not_started.value,
process_initiator_id=g.user.id,
process_model_identifier=process_model.id,
process_model_display_name=process_model.display_name,
persistence_level="none",
def extension_get_data(
query_params: str,
) -> flask.wrappers.Response:
modified_process_model_identifier, *additional_args = query_params.split("/")
process_model, result = _run_extension(
modified_process_model_identifier, {"extension_input": {"additional_args": additional_args}}
)
processor = None
try:
processor = ProcessInstanceProcessor(
process_instance, script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False)
)
if body and "extension_input" in body:
processor.do_engine_steps(save=False, execution_strategy_name="one_at_a_time")
next_task = processor.next_task()
next_task.update_data(body["extension_input"])
processor.do_engine_steps(save=False, execution_strategy_name="greedy")
except (
ApiError,
ProcessInstanceIsNotEnqueuedError,
ProcessInstanceIsAlreadyLockedError,
) as e:
ErrorHandlingService.handle_error(process_instance, e)
raise e
except Exception as e:
ErrorHandlingService.handle_error(process_instance, e)
# FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes.
# we need to recurse through all last tasks if the last task is a call activity or subprocess.
if processor is not None:
task = processor.bpmn_process_instance.last_task
raise ApiError.from_task(
error_code="unknown_exception",
message=f"An unknown error occurred. Original error: {e}",
status_code=400,
task=task,
) from e
raise e
task_data = {}
if processor is not None:
task_data = processor.get_data()
return make_response(jsonify(task_data), 200)
response_schema = json.loads(FileSystemService.get_data(process_model, "response_schema.json"))
headers = response_schema.get("headers", None)
mimetype = response_schema.get("mimetype", None)
data_extraction_path = response_schema.get("data_extraction_path", "").split(".")
contents = _extract_data(data_extraction_path, result["task_data"])
response = Response(
str(contents),
mimetype=mimetype,
headers=headers,
status=200,
)
return response
def extension_list() -> flask.wrappers.Response:
@ -124,6 +82,105 @@ def extension_show(
return make_response(jsonify(process_model), 200)
def _extract_data(keys: list[str], data: Any) -> Any:
if len(keys) > 0 and isinstance(data, dict) and keys[0] in data:
return _extract_data(keys[1:], data[keys[0]])
return data
def _run_extension(
modified_process_model_identifier: str,
body: dict | None = None,
) -> tuple[ProcessModelInfo, dict]:
_raise_unless_extensions_api_enabled()
process_model_identifier = _get_process_model_identifier(modified_process_model_identifier)
try:
process_model = _get_process_model(process_model_identifier)
except ApiError as ex:
if ex.error_code == "process_model_cannot_be_found":
raise ApiError(
error_code="invalid_process_model_extension",
message=(
f"Process Model '{process_model_identifier}' cannot be run as an extension. It must be in the"
" correct Process Group:"
f" {current_app.config['SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX']}"
),
status_code=403,
) from ex
raise ex
if process_model.primary_file_name is None:
raise ApiError(
error_code="process_model_missing_primary_bpmn_file",
message=(
f"Process Model '{process_model_identifier}' does not have a primary"
" bpmn file. One must be set in order to instantiate this model."
),
status_code=400,
)
ui_schema_page_definition = None
if body and "ui_schema_page_definition" in body:
ui_schema_page_definition = body["ui_schema_page_definition"]
process_instance = ProcessInstanceModel(
status=ProcessInstanceStatus.not_started.value,
process_initiator_id=g.user.id,
process_model_identifier=process_model.id,
process_model_display_name=process_model.display_name,
persistence_level="none",
)
processor = None
try:
processor = ProcessInstanceProcessor(
process_instance, script_engine=CustomBpmnScriptEngine(use_restricted_script_engine=False)
)
if body and "extension_input" in body:
processor.do_engine_steps(save=False, execution_strategy_name="one_at_a_time")
next_task = processor.next_task()
next_task.update_data(body["extension_input"])
processor.do_engine_steps(save=False, execution_strategy_name="greedy")
except (
ApiError,
ProcessInstanceIsNotEnqueuedError,
ProcessInstanceIsAlreadyLockedError,
WorkflowExecutionServiceError,
) as e:
ErrorHandlingService.handle_error(process_instance, e)
raise e
except Exception as e:
ErrorHandlingService.handle_error(process_instance, e)
# FIXME: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes.
# we need to recurse through all last tasks if the last task is a call activity or subprocess.
if processor is not None:
task = processor.bpmn_process_instance.last_task
raise ApiError.from_task(
error_code="unknown_exception",
message=f"An unknown error occurred. Original error: {e}",
status_code=400,
task=task,
) from e
raise e
task_data = {}
if processor is not None:
task_data = processor.get_data()
result: dict[str, Any] = {"task_data": task_data}
if ui_schema_page_definition:
if "results_markdown_filename" in ui_schema_page_definition:
file_contents = SpecFileService.get_data(
process_model, ui_schema_page_definition["results_markdown_filename"]
).decode("utf-8")
form_contents = JinjaService.render_jinja_template(file_contents, task_data=task_data)
result["rendered_results_markdown"] = form_contents
return (process_model, result)
def _raise_unless_extensions_api_enabled() -> None:
if not current_app.config["SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED"]:
raise ApiError(

View File

@ -851,7 +851,7 @@ def _prepare_form_data(form_file: str, task_model: TaskModel, process_model: Pro
file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
try:
form_contents = JinjaService.render_jinja_template(file_contents, task_model)
form_contents = JinjaService.render_jinja_template(file_contents, task=task_model)
try:
# form_contents is a str
hot_dict: dict = json.loads(form_contents)

View File

@ -59,7 +59,9 @@ def verify_token(token: str | None = None, force_run: bool | None = False) -> No
token = request.headers["Authorization"].removeprefix("Bearer ")
if not token and "access_token" in request.cookies:
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/"):
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/") or request.path.startswith(
f"{V1_API_PATH_PREFIX}/extensions-get-data/"
):
token = request.cookies["access_token"]
# This should never be set here but just in case

View File

@ -549,6 +549,7 @@ class AuthorizationService:
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/send-event/*"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-complete/*"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/extensions/*"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/extensions-get-data/*"))
# read comes from PG and PM ALL permissions as well
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-assign/*"))

View File

@ -56,17 +56,26 @@ class JinjaService:
return ""
@classmethod
def render_jinja_template(cls, unprocessed_template: str, task: TaskModel | SpiffTask) -> str:
def render_jinja_template(
cls, unprocessed_template: str, task: TaskModel | SpiffTask | None = None, task_data: dict | None = None
) -> str:
jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True)
jinja_environment.filters.update(JinjaHelpers.get_helper_mapping())
try:
template = jinja_environment.from_string(unprocessed_template)
if isinstance(task, TaskModel):
if task_data is not None:
data = task_data
elif isinstance(task, TaskModel):
data = task.get_data()
else:
elif task is not None:
data = task.data
else:
raise ValueError("No task or task data provided to render_jinja_template")
return template.render(**data, **JinjaHelpers.get_helper_mapping())
except jinja2.exceptions.TemplateError as template_error:
if task is None:
raise template_error
if isinstance(task, TaskModel):
wfe = TaskModelError(str(template_error), task_model=task, exception=template_error)
else:
@ -77,6 +86,8 @@ class JinjaService:
wfe.add_note("Jinja2 template errors can happen when trying to display task data")
raise wfe from template_error
except Exception as error:
if task is None:
raise error
_type, _value, tb = exc_info()
if isinstance(task, TaskModel):
wfe = TaskModelError(str(error), task_model=task, exception=error)

View File

@ -35,6 +35,10 @@ class ProcessInstanceReportMetadataInvalidError(Exception):
pass
class ProcessInstanceReportCannotBeRunError(Exception):
pass
class ProcessInstanceReportService:
@classmethod
def system_metadata_map(cls, metadata_key: str) -> ReportMetadata | None:
@ -369,7 +373,7 @@ class ProcessInstanceReportService:
def run_process_instance_report(
cls,
report_metadata: ReportMetadata,
user: UserModel,
user: UserModel | None = None,
page: int = 1,
per_page: int = 100,
) -> dict:
@ -436,6 +440,10 @@ class ProcessInstanceReportService:
and not instances_with_tasks_waiting_for_me
and with_relation_to_me is True
):
if user is None:
raise ProcessInstanceReportCannotBeRunError(
"A user must be specified to run report with with_relation_to_me"
)
process_instance_query = process_instance_query.outerjoin(HumanTaskModel).outerjoin(
HumanTaskUserModel,
and_(
@ -460,6 +468,10 @@ class ProcessInstanceReportService:
human_task_already_joined = False
if instances_with_tasks_completed_by_me is True:
if user is None:
raise ProcessInstanceReportCannotBeRunError(
"A user must be specified to run report with instances_with_tasks_completed_by_me."
)
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.process_initiator_id != user.id
)
@ -475,6 +487,10 @@ class ProcessInstanceReportService:
# this excludes some tasks you can complete, because that's the way the requirements were described.
# if it's assigned to one of your groups, it does not get returned by this query.
if instances_with_tasks_waiting_for_me is True:
if user is None:
raise ProcessInstanceReportCannotBeRunError(
"A user must be specified to run report with instances_with_tasks_waiting_for_me."
)
process_instance_query = process_instance_query.filter(
ProcessInstanceModel.process_initiator_id != user.id
)
@ -493,6 +509,10 @@ class ProcessInstanceReportService:
restrict_human_tasks_to_user = user
if user_group_identifier is not None:
if user is None:
raise ProcessInstanceReportCannotBeRunError(
"A user must be specified to run report with a group identifier."
)
group_model_join_conditions = [GroupModel.id == HumanTaskModel.lane_assignment_id]
if user_group_identifier:
group_model_join_conditions.append(GroupModel.identifier == user_group_identifier)

View File

@ -0,0 +1,5 @@
{
"headers": {"Content-disposition": "attachment; filename=metadata_export.csv"},
"mimetype": "text/csv",
"data_extraction_path": "pi_json.id"
}

View File

@ -33,12 +33,14 @@ class TestExtensionsController(BaseTest):
)
expected_task_data = {
"Mike": "Awesome",
"my_var": "Hello World",
"person": "Kevin",
"validate_only": False,
"wonderfulness": "Very wonderful",
"OUR_AWESOME_INPUT": "the awesome value",
"task_data": {
"Mike": "Awesome",
"my_var": "Hello World",
"person": "Kevin",
"validate_only": False,
"wonderfulness": "Very wonderful",
"OUR_AWESOME_INPUT": "the awesome value",
}
}
assert response.status_code == 200
assert response.json is not None
@ -107,6 +109,7 @@ class TestExtensionsController(BaseTest):
process_group_id="extensions",
process_model_id="script_task_with_import",
bpmn_file_location="script_task_with_import",
bpmn_file_name="script_task_with_import.bpmn",
)
# we need a process instance in the database so the scriptTask can work
@ -118,6 +121,36 @@ class TestExtensionsController(BaseTest):
)
assert response.json is not None
assert "pi_json" in response.json
assert "id" in response.json["pi_json"]
assert re.match(r"^\d+$", str(response.json["pi_json"]["id"]))
assert "task_data" in response.json
task_data = response.json["task_data"]
assert "pi_json" in task_data
assert "id" in task_data["pi_json"]
assert re.match(r"^\d+$", str(task_data["pi_json"]["id"]))
def test_extension_data_get_can_return_proper_response(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
with self.app_config_mock(app, "SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", True):
process_model = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="extensions",
process_model_id="script_task_with_import",
bpmn_file_location="script_task_with_import",
)
# we need a process instance in the database so the scriptTask can work
self.create_process_instance_from_process_model(process_model, user=with_super_admin_user)
response = client.get(
f"/v1.0/extensions-get-data/{self.modify_process_identifier_for_path_param(process_model.id)}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.mimetype == "text/csv"
assert response.headers[0] == ("Content-disposition", "attachment; filename=metadata_export.csv")
assert re.match(r"\d+", response.text)

View File

@ -319,8 +319,9 @@ class TestAuthorizationService(BaseTest):
("/can-run-privileged-script/*", "create"),
("/data-stores/*", "read"),
("/debug/*", "create"),
("/extensions/*", "create"),
("/event-error-details/*", "read"),
("/extensions-get-data/*", "read"),
("/extensions/*", "create"),
("/logs/*", "read"),
("/messages", "read"),
("/messages/*", "create"),

View File

@ -0,0 +1,210 @@
import validator from '@rjsf/validator-ajv8';
import { ReactNode } from 'react';
import { Form } from '../rjsf/carbon_theme';
import { DATE_RANGE_DELIMITER } from '../config';
import DateRangePickerWidget from '../rjsf/custom_widgets/DateRangePicker/DateRangePickerWidget';
import TypeaheadWidget from '../rjsf/custom_widgets/TypeaheadWidget/TypeaheadWidget';
type OwnProps = {
id: string;
formData: any;
schema: any;
uiSchema: any;
disabled?: boolean;
onChange?: any;
onSubmit?: any;
children?: ReactNode;
noValidate?: boolean;
};
export default function CustomForm({
id,
formData,
schema,
uiSchema,
disabled = false,
onChange,
onSubmit,
children,
noValidate = false,
}: OwnProps) {
const rjsfWidgets = {
typeahead: TypeaheadWidget,
'date-range': DateRangePickerWidget,
};
const formatDateString = (dateString?: string) => {
let dateObject = new Date();
if (dateString) {
dateObject = new Date(dateString);
}
return dateObject.toISOString().split('T')[0];
};
const checkFieldComparisons = (
formDataToCheck: any,
propertyKey: string,
minimumDateCheck: string,
formattedDateString: string,
errors: any,
jsonSchema: any
) => {
// field format:
// field:[field_name_to_use]
//
// if field is a range:
// field:[field_name_to_use]:[start or end]
//
// defaults to "start" in all cases
const [_, fieldIdentifierToCompareWith, startOrEnd] =
minimumDateCheck.split(':');
if (!(fieldIdentifierToCompareWith in formDataToCheck)) {
errors[propertyKey].addError(
`was supposed to be compared against '${fieldIdentifierToCompareWith}' but it either doesn't have a value or does not exist`
);
return;
}
const rawDateToCompareWith = formDataToCheck[fieldIdentifierToCompareWith];
if (!rawDateToCompareWith) {
errors[propertyKey].addError(
`was supposed to be compared against '${fieldIdentifierToCompareWith}' but that field did not have a value`
);
return;
}
const [startDate, endDate] =
rawDateToCompareWith.split(DATE_RANGE_DELIMITER);
let dateToCompareWith = startDate;
if (startOrEnd && startOrEnd === 'end') {
dateToCompareWith = endDate;
}
if (!dateToCompareWith) {
const errorMessage = `was supposed to be compared against '${[
fieldIdentifierToCompareWith,
startOrEnd,
].join(':')}' but that field did not have a value`;
errors[propertyKey].addError(errorMessage);
return;
}
const dateStringToCompareWith = formatDateString(dateToCompareWith);
if (dateStringToCompareWith > formattedDateString) {
let fieldToCompareWithTitle = fieldIdentifierToCompareWith;
if (
fieldIdentifierToCompareWith in jsonSchema.properties &&
jsonSchema.properties[fieldIdentifierToCompareWith].title
) {
fieldToCompareWithTitle =
jsonSchema.properties[fieldIdentifierToCompareWith].title;
}
errors[propertyKey].addError(
`must be equal to or greater than '${fieldToCompareWithTitle}'`
);
}
};
const checkMinimumDate = (
formDataToCheck: any,
propertyKey: string,
propertyMetadata: any,
errors: any,
jsonSchema: any
) => {
// can be either "today" or another field
let dateString = formDataToCheck[propertyKey];
if (dateString) {
if (typeof dateString === 'string') {
// in the case of date ranges, just take the start date and check that
[dateString] = dateString.split(DATE_RANGE_DELIMITER);
}
const formattedDateString = formatDateString(dateString);
const minimumDateChecks = propertyMetadata.minimumDate.split(',');
minimumDateChecks.forEach((mdc: string) => {
if (mdc === 'today') {
const dateTodayString = formatDateString();
if (dateTodayString > formattedDateString) {
errors[propertyKey].addError('must be today or after');
}
} else if (mdc.startsWith('field:')) {
checkFieldComparisons(
formDataToCheck,
propertyKey,
mdc,
formattedDateString,
errors,
jsonSchema
);
}
});
}
};
const getFieldsWithDateValidations = (
jsonSchema: any,
formDataToCheck: any,
errors: any
// eslint-disable-next-line sonarjs/cognitive-complexity
) => {
// if the jsonSchema has an items attribute then assume the element itself
// doesn't have a custom validation but it's children could so use that
const jsonSchemaToUse =
'items' in jsonSchema ? jsonSchema.items : jsonSchema;
if ('properties' in jsonSchemaToUse) {
Object.keys(jsonSchemaToUse.properties).forEach((propertyKey: string) => {
const propertyMetadata = jsonSchemaToUse.properties[propertyKey];
if ('minimumDate' in propertyMetadata) {
checkMinimumDate(
formDataToCheck,
propertyKey,
propertyMetadata,
errors,
jsonSchemaToUse
);
}
// recurse through all nested properties as well
let formDataToSend = formDataToCheck[propertyKey];
if (formDataToSend) {
if (formDataToSend.constructor.name !== 'Array') {
formDataToSend = [formDataToSend];
}
formDataToSend.forEach((item: any, index: number) => {
let errorsToSend = errors[propertyKey];
if (index in errorsToSend) {
errorsToSend = errorsToSend[index];
}
getFieldsWithDateValidations(propertyMetadata, item, errorsToSend);
});
}
});
}
return errors;
};
const customValidate = (formDataToCheck: any, errors: any) => {
return getFieldsWithDateValidations(schema, formDataToCheck, errors);
};
return (
<Form
id={id}
disabled={disabled}
formData={formData}
onChange={onChange}
onSubmit={onSubmit}
schema={schema}
uiSchema={uiSchema}
widgets={rjsfWidgets}
validator={validator}
customValidate={customValidate}
noValidate={noValidate}
omitExtraData
>
{children}
</Form>
);
}

View File

@ -437,6 +437,7 @@ export interface UiSchemaPageDefinition {
form_schema_filename?: any;
form_ui_schema_filename?: any;
markdown_instruction_filename?: string;
navigate_to_on_form_submit?: string;
}
export interface UiSchemaRoute {
[key: string]: UiSchemaPageDefinition;
@ -445,3 +446,8 @@ export interface ExtensionUiSchema {
navigation_items?: UiSchemaNavItem[];
routes: UiSchemaRoute;
}
export interface ExtensionPostBody {
extension_input: any;
ui_schema_page_definition?: UiSchemaPageDefinition;
}

View File

@ -1,11 +1,10 @@
import { useEffect, useState } from 'react';
import MDEditor from '@uiw/react-md-editor';
import { useParams } from 'react-router-dom';
import validator from '@rjsf/validator-ajv8';
import { Editor } from '@monaco-editor/react';
import { Form } from '../rjsf/carbon_theme';
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import {
ExtensionPostBody,
ExtensionUiSchema,
ProcessFile,
ProcessModel,
@ -14,7 +13,10 @@ import {
import HttpService from '../services/HttpService';
import useAPIError from '../hooks/UseApiError';
import { recursivelyChangeNullAndUndefined } from '../helpers';
import CustomForm from '../components/CustomForm';
import { BACKEND_BASE_URL } from '../config';
// eslint-disable-next-line sonarjs/cognitive-complexity
export default function Extension() {
const { targetUris } = useUriListForPermissions();
const params = useParams();
@ -23,6 +25,7 @@ export default function Extension() {
const [formData, setFormData] = useState<any>(null);
const [formButtonsDisabled, setFormButtonsDisabled] = useState(false);
const [processedTaskData, setProcessedTaskData] = useState<any>(null);
const [markdownToRender, setMarkdownToRender] = useState<string | null>(null);
const [filesByName] = useState<{
[key: string]: ProcessFile;
}>({});
@ -66,7 +69,10 @@ export default function Extension() {
}, [targetUris.extensionPath, params, filesByName]);
const processSubmitResult = (result: any) => {
setProcessedTaskData(result);
setProcessedTaskData(result.task_data);
if (result.rendered_results_markdown) {
setMarkdownToRender(result.rendered_results_markdown);
}
setFormButtonsDisabled(false);
};
@ -82,25 +88,54 @@ export default function Extension() {
removeError();
delete dataToSubmit.isManualTask;
let apiPath = targetUris.extensionPath;
if (uiSchemaPageDefinition && uiSchemaPageDefinition.api) {
apiPath = `${targetUris.extensionListPath}/${uiSchemaPageDefinition.api}`;
if (
uiSchemaPageDefinition &&
uiSchemaPageDefinition.navigate_to_on_form_submit
) {
let isValid = true;
const optionString =
uiSchemaPageDefinition.navigate_to_on_form_submit.replace(
/{(\w+)}/g,
(_, k) => {
const value = dataToSubmit[k];
if (value === undefined) {
isValid = false;
addError({
message: `Could not find a value for ${k} in form data.`,
});
}
return value;
}
);
if (!isValid) {
return;
}
const url = `${BACKEND_BASE_URL}/extensions-get-data/${params.process_model}/${optionString}`;
window.location.href = url;
setFormButtonsDisabled(false);
} else {
const postBody: ExtensionPostBody = { extension_input: dataToSubmit };
let apiPath = targetUris.extensionPath;
if (uiSchemaPageDefinition && uiSchemaPageDefinition.api) {
apiPath = `${targetUris.extensionListPath}/${uiSchemaPageDefinition.api}`;
postBody.ui_schema_page_definition = uiSchemaPageDefinition;
}
// NOTE: rjsf sets blanks values to undefined and JSON.stringify removes keys with undefined values
// so we convert undefined values to null recursively so that we can unset values in form fields
recursivelyChangeNullAndUndefined(dataToSubmit, null);
HttpService.makeCallToBackend({
path: apiPath,
successCallback: processSubmitResult,
failureCallback: (error: any) => {
addError(error);
setFormButtonsDisabled(false);
},
httpMethod: 'POST',
postBody,
});
}
// NOTE: rjsf sets blanks values to undefined and JSON.stringify removes keys with undefined values
// so we convert undefined values to null recursively so that we can unset values in form fields
recursivelyChangeNullAndUndefined(dataToSubmit, null);
HttpService.makeCallToBackend({
path: apiPath,
successCallback: processSubmitResult,
failureCallback: (error: any) => {
addError(error);
setFormButtonsDisabled(false);
},
httpMethod: 'POST',
postBody: { extension_input: dataToSubmit },
});
};
if (uiSchemaPageDefinition) {
@ -129,7 +164,7 @@ export default function Extension() {
filesByName[uiSchemaPageDefinition.form_ui_schema_filename];
if (formSchemaFile.file_contents && formUiSchemaFile.file_contents) {
componentsToDisplay.push(
<Form
<CustomForm
id="form-to-submit"
formData={formData}
onChange={(obj: any) => {
@ -139,30 +174,40 @@ export default function Extension() {
onSubmit={handleFormSubmit}
schema={JSON.parse(formSchemaFile.file_contents)}
uiSchema={JSON.parse(formUiSchemaFile.file_contents)}
validator={validator}
omitExtraData
/>
);
}
}
if (processedTaskData) {
componentsToDisplay.push(
<>
<h2 className="with-top-margin">Result:</h2>
<Editor
className="with-top-margin"
height="30rem"
width="auto"
defaultLanguage="json"
defaultValue={JSON.stringify(processedTaskData, null, 2)}
options={{
readOnly: true,
scrollBeyondLastLine: true,
minimap: { enabled: true },
}}
/>
</>
);
if (markdownToRender) {
componentsToDisplay.push(
<div data-color-mode="light" className="with-top-margin">
<MDEditor.Markdown
className="onboarding"
linkTarget="_blank"
source={markdownToRender}
/>
</div>
);
} else {
componentsToDisplay.push(
<>
<h2 className="with-top-margin">Result:</h2>
<Editor
className="with-top-margin"
height="30rem"
width="auto"
defaultLanguage="json"
defaultValue={JSON.stringify(processedTaskData, null, 2)}
options={{
readOnly: true,
scrollBeyondLastLine: true,
minimap: { enabled: true },
}}
/>
</>
);
}
}
return <div className="fixed-width-container">{componentsToDisplay}</div>;
}

View File

@ -615,6 +615,24 @@ export default function ProcessModelShow() {
};
if (processModel) {
const processStartButton = (
<Stack orientation="horizontal" gap={3}>
<Can
I="POST"
a={targetUris.processInstanceCreatePath}
ability={ability}
>
<>
<ProcessInstanceRun
processModel={processModel}
onSuccessCallback={setProcessInstance}
/>
<br />
<br />
</>
</Can>
</Stack>
);
return (
<>
{fileUploadModal()}
@ -680,22 +698,7 @@ export default function ProcessModelShow() {
</Can>
</Stack>
<p className="process-description">{processModel.description}</p>
<Stack orientation="horizontal" gap={3}>
<Can
I="POST"
a={targetUris.processInstanceCreatePath}
ability={ability}
>
<>
<ProcessInstanceRun
processModel={processModel}
onSuccessCallback={setProcessInstance}
/>
<br />
<br />
</>
</Can>
</Stack>
{processModel.primary_file_name ? processStartButton : null}
{processModelFilesSection()}
<Can
I="POST"

View File

@ -1,11 +1,9 @@
import React, { useEffect, useState } from 'react';
import { useNavigate, useParams } from 'react-router-dom';
import validator from '@rjsf/validator-ajv8';
import { Grid, Column, Button, ButtonSet, Loading } from '@carbon/react';
import { useDebouncedCallback } from 'use-debounce';
import { Form } from '../rjsf/carbon_theme';
import HttpService from '../services/HttpService';
import useAPIError from '../hooks/UseApiError';
import {
@ -16,9 +14,7 @@ import {
import { BasicTask, EventDefinition, Task } from '../interfaces';
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
import InstructionsForEndUser from '../components/InstructionsForEndUser';
import TypeaheadWidget from '../rjsf/custom_widgets/TypeaheadWidget/TypeaheadWidget';
import DateRangePickerWidget from '../rjsf/custom_widgets/DateRangePicker/DateRangePickerWidget';
import { DATE_RANGE_DELIMITER } from '../config';
import CustomForm from '../components/CustomForm';
export default function TaskShow() {
const [basicTask, setBasicTask] = useState<BasicTask | null>(null);
@ -33,11 +29,6 @@ export default function TaskShow() {
const { addError, removeError } = useAPIError();
const rjsfWidgets = {
typeahead: TypeaheadWidget,
'date-range': DateRangePickerWidget,
};
// if a user can complete a task then the for-me page should
// always work for them so use that since it will work in all cases
const navigateToInterstitial = (myTask: BasicTask) => {
@ -196,157 +187,6 @@ export default function TaskShow() {
});
};
const formatDateString = (dateString?: string) => {
let dateObject = new Date();
if (dateString) {
dateObject = new Date(dateString);
}
return dateObject.toISOString().split('T')[0];
};
const checkFieldComparisons = (
formData: any,
propertyKey: string,
minimumDateCheck: string,
formattedDateString: string,
errors: any,
jsonSchema: any
) => {
// field format:
// field:[field_name_to_use]
//
// if field is a range:
// field:[field_name_to_use]:[start or end]
//
// defaults to "start" in all cases
const [_, fieldIdentifierToCompareWith, startOrEnd] =
minimumDateCheck.split(':');
if (!(fieldIdentifierToCompareWith in formData)) {
errors[propertyKey].addError(
`was supposed to be compared against '${fieldIdentifierToCompareWith}' but it either doesn't have a value or does not exist`
);
return;
}
const rawDateToCompareWith = formData[fieldIdentifierToCompareWith];
if (!rawDateToCompareWith) {
errors[propertyKey].addError(
`was supposed to be compared against '${fieldIdentifierToCompareWith}' but that field did not have a value`
);
return;
}
const [startDate, endDate] =
rawDateToCompareWith.split(DATE_RANGE_DELIMITER);
let dateToCompareWith = startDate;
if (startOrEnd && startOrEnd === 'end') {
dateToCompareWith = endDate;
}
if (!dateToCompareWith) {
const errorMessage = `was supposed to be compared against '${[
fieldIdentifierToCompareWith,
startOrEnd,
].join(':')}' but that field did not have a value`;
errors[propertyKey].addError(errorMessage);
return;
}
const dateStringToCompareWith = formatDateString(dateToCompareWith);
if (dateStringToCompareWith > formattedDateString) {
let fieldToCompareWithTitle = fieldIdentifierToCompareWith;
if (
fieldIdentifierToCompareWith in jsonSchema.properties &&
jsonSchema.properties[fieldIdentifierToCompareWith].title
) {
fieldToCompareWithTitle =
jsonSchema.properties[fieldIdentifierToCompareWith].title;
}
errors[propertyKey].addError(
`must be equal to or greater than '${fieldToCompareWithTitle}'`
);
}
};
const checkMinimumDate = (
formData: any,
propertyKey: string,
propertyMetadata: any,
errors: any,
jsonSchema: any
) => {
// can be either "today" or another field
let dateString = formData[propertyKey];
if (dateString) {
if (typeof dateString === 'string') {
// in the case of date ranges, just take the start date and check that
[dateString] = dateString.split(DATE_RANGE_DELIMITER);
}
const formattedDateString = formatDateString(dateString);
const minimumDateChecks = propertyMetadata.minimumDate.split(',');
minimumDateChecks.forEach((mdc: string) => {
if (mdc === 'today') {
const dateTodayString = formatDateString();
if (dateTodayString > formattedDateString) {
errors[propertyKey].addError('must be today or after');
}
} else if (mdc.startsWith('field:')) {
checkFieldComparisons(
formData,
propertyKey,
mdc,
formattedDateString,
errors,
jsonSchema
);
}
});
}
};
const getFieldsWithDateValidations = (
jsonSchema: any,
formData: any,
errors: any
// eslint-disable-next-line sonarjs/cognitive-complexity
) => {
// if the jsonSchema has an items attribute then assume the element itself
// doesn't have a custom validation but it's children could so use that
const jsonSchemaToUse =
'items' in jsonSchema ? jsonSchema.items : jsonSchema;
if ('properties' in jsonSchemaToUse) {
Object.keys(jsonSchemaToUse.properties).forEach((propertyKey: string) => {
const propertyMetadata = jsonSchemaToUse.properties[propertyKey];
if ('minimumDate' in propertyMetadata) {
checkMinimumDate(
formData,
propertyKey,
propertyMetadata,
errors,
jsonSchemaToUse
);
}
// recurse through all nested properties as well
let formDataToSend = formData[propertyKey];
if (formDataToSend) {
if (formDataToSend.constructor.name !== 'Array') {
formDataToSend = [formDataToSend];
}
formDataToSend.forEach((item: any, index: number) => {
let errorsToSend = errors[propertyKey];
if (index in errorsToSend) {
errorsToSend = errorsToSend[index];
}
getFieldsWithDateValidations(propertyMetadata, item, errorsToSend);
});
}
});
}
return errors;
};
const handleCloseButton = () => {
setAutosaveOnFormChanges(false);
setFormButtonsDisabled(true);
@ -437,17 +277,13 @@ export default function TaskShow() {
);
}
const customValidate = (formData: any, errors: any) => {
return getFieldsWithDateValidations(jsonSchema, formData, errors);
};
// we are using two forms here so we can have one that validates data and one that does not.
// this allows us to autosave form data without extra attributes and without validations
// but still requires validations when the user submits the form that they can edit.
return (
<Grid fullWidth condensed>
<Column sm={4} md={5} lg={8}>
<Form
<CustomForm
id="form-to-submit"
disabled={formButtonsDisabled}
formData={taskData}
@ -458,23 +294,16 @@ export default function TaskShow() {
onSubmit={handleFormSubmit}
schema={jsonSchema}
uiSchema={formUiSchema}
widgets={rjsfWidgets}
validator={validator}
customValidate={customValidate}
omitExtraData
>
{reactFragmentToHideSubmitButton}
</Form>
<Form
</CustomForm>
<CustomForm
id="hidden-form-for-autosave"
formData={taskData}
onSubmit={handleAutosaveFormSubmit}
schema={jsonSchema}
uiSchema={formUiSchema}
widgets={rjsfWidgets}
validator={validator}
noValidate
omitExtraData
/>
</Column>
</Grid>