Merge remote-tracking branch 'origin/main' into feature/message_fixes
This commit is contained in:
commit
7c12dffe41
|
@ -45,11 +45,11 @@ dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "apscheduler"
|
name = "apscheduler"
|
||||||
version = "3.9.1"
|
version = "3.10.0"
|
||||||
description = "In-process task scheduler with Cron-like capabilities"
|
description = "In-process task scheduler with Cron-like capabilities"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
pytz = "*"
|
pytz = "*"
|
||||||
|
@ -58,14 +58,13 @@ six = ">=1.4.0"
|
||||||
tzlocal = ">=2.0,<3.0.0 || >=4.0.0"
|
tzlocal = ">=2.0,<3.0.0 || >=4.0.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
asyncio = ["trollius"]
|
|
||||||
doc = ["sphinx", "sphinx-rtd-theme"]
|
doc = ["sphinx", "sphinx-rtd-theme"]
|
||||||
gevent = ["gevent"]
|
gevent = ["gevent"]
|
||||||
mongodb = ["pymongo (>=3.0)"]
|
mongodb = ["pymongo (>=3.0)"]
|
||||||
redis = ["redis (>=3.0)"]
|
redis = ["redis (>=3.0)"]
|
||||||
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
||||||
sqlalchemy = ["sqlalchemy (>=0.8)"]
|
sqlalchemy = ["sqlalchemy (>=1.4)"]
|
||||||
testing = ["mock", "pytest", "pytest-asyncio", "pytest-asyncio (<0.6)", "pytest-cov", "pytest-tornado5"]
|
testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"]
|
||||||
tornado = ["tornado (>=4.3)"]
|
tornado = ["tornado (>=4.3)"]
|
||||||
twisted = ["twisted"]
|
twisted = ["twisted"]
|
||||||
zookeeper = ["kazoo"]
|
zookeeper = ["kazoo"]
|
||||||
|
@ -1760,7 +1759,7 @@ lxml = "*"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
|
resolved_reference = "2ca6ebf800d4ff1d54f3e1c48798a2cb879560f7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlalchemy"
|
name = "sqlalchemy"
|
||||||
|
@ -2131,8 +2130,8 @@ aniso8601 = [
|
||||||
{file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
|
{file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
|
||||||
]
|
]
|
||||||
apscheduler = [
|
apscheduler = [
|
||||||
{file = "APScheduler-3.9.1-py2.py3-none-any.whl", hash = "sha256:ddc25a0ddd899de44d7f451f4375fb971887e65af51e41e5dcf681f59b8b2c9a"},
|
{file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"},
|
||||||
{file = "APScheduler-3.9.1.tar.gz", hash = "sha256:65e6574b6395498d371d045f2a8a7e4f7d50c6ad21ef7313d15b1c7cf20df1e3"},
|
{file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"},
|
||||||
]
|
]
|
||||||
astroid = [
|
astroid = [
|
||||||
{file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"},
|
{file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"},
|
||||||
|
@ -2521,6 +2520,7 @@ lazy-object-proxy = [
|
||||||
{file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
|
{file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
|
||||||
]
|
]
|
||||||
livereload = [
|
livereload = [
|
||||||
|
{file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"},
|
||||||
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
||||||
]
|
]
|
||||||
lxml = [
|
lxml = [
|
||||||
|
|
|
@ -8,11 +8,12 @@ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much.
|
# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much.
|
||||||
|
# vim is just for debugging
|
||||||
FROM base AS deployment
|
FROM base AS deployment
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get clean -y \
|
&& apt-get clean -y \
|
||||||
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client \
|
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client vim \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Setup image for installing Python dependencies.
|
# Setup image for installing Python dependencies.
|
||||||
|
|
|
@ -55,6 +55,14 @@ if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
|
||||||
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
|
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then
|
||||||
|
if [[ -z "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH:-}" ]]; then
|
||||||
|
export SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH=$(mktemp /tmp/ssh_private_key.XXXXXX)
|
||||||
|
fi
|
||||||
|
chmod 600 "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
|
||||||
|
echo "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY}" >"${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
|
||||||
|
fi
|
||||||
|
|
||||||
# Assure that the the Process Models Directory is initialized as a git repo
|
# Assure that the the Process Models Directory is initialized as a git repo
|
||||||
git init "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}"
|
git init "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}"
|
||||||
git config --global --add safe.directory "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}"
|
git config --global --add safe.directory "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}"
|
||||||
|
|
|
@ -12,17 +12,9 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
||||||
bpmn_models_absolute_dir="$1"
|
bpmn_models_absolute_dir="$1"
|
||||||
git_commit_message="$2"
|
git_commit_message="$2"
|
||||||
git_branch="$3"
|
git_branch="$3"
|
||||||
git_commit_username="$4"
|
|
||||||
git_commit_email="$5"
|
|
||||||
git_commit_password="$6"
|
|
||||||
|
|
||||||
if [[ -z "${5:-}" ]]; then
|
if [[ -z "${3:-}" ]]; then
|
||||||
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]"
|
>&2 echo "usage: $(basename "${0}") [bpmn_models_absolute_dir] [git_commit_message] [git_branch]"
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -z "$git_commit_password" && -z "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then
|
|
||||||
>&2 echo "ERROR: A git password or SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY must be provided"
|
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -32,38 +24,27 @@ function failed_to_get_lock() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function run() {
|
function run() {
|
||||||
cd "$bpmn_models_absolute_dir"
|
cd "${bpmn_models_absolute_dir}"
|
||||||
git add .
|
git add .
|
||||||
|
|
||||||
# https://unix.stackexchange.com/a/155077/456630
|
# https://unix.stackexchange.com/a/155077/456630
|
||||||
if [ -z "$(git status --porcelain)" ]; then
|
if [ -z "$(git status --porcelain)" ]; then
|
||||||
echo "No changes to commit"
|
echo "No changes to commit"
|
||||||
else
|
return
|
||||||
|
|
||||||
git config --local user.name "$git_commit_username"
|
|
||||||
git config --local user.email "$git_commit_email"
|
|
||||||
|
|
||||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then
|
|
||||||
tmpfile=$(mktemp /tmp/tmp_git.XXXXXX)
|
|
||||||
chmod 600 "$tmpfile"
|
|
||||||
echo "$SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY" >"$tmpfile"
|
|
||||||
export GIT_SSH_COMMAND="ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i ${tmpfile} -F /dev/null"
|
|
||||||
else
|
|
||||||
PAT="${git_commit_username}:${git_commit_password}"
|
|
||||||
AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n')
|
|
||||||
git config --local http.extraHeader "Authorization: Basic $AUTH"
|
|
||||||
fi
|
|
||||||
|
|
||||||
git commit -m "$git_commit_message"
|
|
||||||
git push --set-upstream origin "$git_branch"
|
|
||||||
|
|
||||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then
|
|
||||||
git config --unset --local http.extraHeader
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# FIXME: the environment variables may not be working with the root user which we are using in the docker container.
|
||||||
|
# we see some evidence with this issue https://stackoverflow.com/questions/68975943/git-config-environment-variables
|
||||||
|
# and it didn't seem to work for us either so set them like this for now.
|
||||||
|
# One day we should probably not use the root user in the docker container.
|
||||||
|
git config --local user.email "$GIT_COMMITTER_EMAIL"
|
||||||
|
git config --local user.name "$GIT_COMMITTER_NAME"
|
||||||
|
|
||||||
|
git commit -m "${git_commit_message}"
|
||||||
|
git push --set-upstream origin "${git_branch}"
|
||||||
}
|
}
|
||||||
|
|
||||||
exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock
|
exec {lock_fd}>/var/lock/spiff-workflow-git-lock || failed_to_get_lock
|
||||||
flock --timeout 60 "$lock_fd" || failed_to_get_lock
|
flock --timeout 60 "${lock_fd}" || failed_to_get_lock
|
||||||
run
|
run
|
||||||
flock -u "$lock_fd"
|
flock -u "${lock_fd}"
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
"""__init__."""
|
"""__init__."""
|
||||||
import faulthandler
|
import faulthandler
|
||||||
|
import sys
|
||||||
import os
|
import os
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
@ -166,10 +167,9 @@ def traces_sampler(sampling_context: Any) -> Any:
|
||||||
|
|
||||||
# tasks_controller.task_submit
|
# tasks_controller.task_submit
|
||||||
# this is the current pain point as of 31 jan 2023.
|
# this is the current pain point as of 31 jan 2023.
|
||||||
if (
|
if path_info and (
|
||||||
path_info
|
(path_info.startswith("/v1.0/tasks/") and request_method == "PUT")
|
||||||
and path_info.startswith("/v1.0/tasks/")
|
or (path_info.startswith("/v1.0/task-data/") and request_method == "GET")
|
||||||
and request_method == "PUT"
|
|
||||||
):
|
):
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
@ -210,7 +210,7 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
||||||
|
|
||||||
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
|
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
|
||||||
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
|
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
|
||||||
# profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
||||||
|
|
||||||
sentry_sdk.init(
|
sentry_sdk.init(
|
||||||
dsn=app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
|
dsn=app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
|
||||||
|
@ -227,6 +227,6 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
||||||
traces_sample_rate=float(sentry_traces_sample_rate),
|
traces_sample_rate=float(sentry_traces_sample_rate),
|
||||||
traces_sampler=traces_sampler,
|
traces_sampler=traces_sampler,
|
||||||
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
||||||
# _experiments={"profiles_sample_rate": profiles_sample_rate},
|
_experiments={"profiles_sample_rate": profiles_sample_rate},
|
||||||
before_send=before_send,
|
before_send=before_send,
|
||||||
)
|
)
|
||||||
|
|
|
@ -2,6 +2,10 @@
|
||||||
import re
|
import re
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
|
# Consider: https://flask.palletsprojects.com/en/2.2.x/config/#configuring-from-environment-variables
|
||||||
|
# and from_prefixed_env(), though we want to ensure that these variables are all documented, so that
|
||||||
|
# is a benefit of the status quo and having them all in this file explicitly.
|
||||||
|
|
||||||
FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY")
|
FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY")
|
||||||
|
|
||||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get(
|
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get(
|
||||||
|
@ -98,9 +102,6 @@ SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = (
|
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = (
|
||||||
environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true"
|
environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||||
)
|
)
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY = environ.get(
|
|
||||||
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY"
|
|
||||||
)
|
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME")
|
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME")
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
||||||
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL"
|
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL"
|
||||||
|
@ -108,11 +109,8 @@ SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
||||||
SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get(
|
SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get(
|
||||||
"SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None
|
"SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None
|
||||||
)
|
)
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY = environ.get(
|
SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH = environ.get(
|
||||||
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY", default=None
|
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH", default=None
|
||||||
)
|
|
||||||
SPIFFWORKFLOW_BACKEND_GIT_USER_PASSWORD = environ.get(
|
|
||||||
"SPIFFWORKFLOW_BACKEND_GIT_USER_PASSWORD", default=None
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Database Configuration
|
# Database Configuration
|
||||||
|
|
|
@ -94,9 +94,11 @@ def _process_data_fetcher(
|
||||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
processor = ProcessInstanceProcessor(process_instance)
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
all_process_data = processor.get_data()
|
all_process_data = processor.get_data()
|
||||||
process_data_value = None
|
process_data_value = all_process_data.get(process_data_identifier)
|
||||||
if process_data_identifier in all_process_data:
|
|
||||||
process_data_value = all_process_data[process_data_identifier]
|
if process_data_value is None:
|
||||||
|
script_engine_last_result = processor._script_engine.environment.last_result()
|
||||||
|
process_data_value = script_engine_last_result.get(process_data_identifier)
|
||||||
|
|
||||||
if process_data_value is not None and index is not None:
|
if process_data_value is not None and index is not None:
|
||||||
process_data_value = process_data_value[index]
|
process_data_value = process_data_value[index]
|
||||||
|
@ -108,7 +110,7 @@ def _process_data_fetcher(
|
||||||
):
|
):
|
||||||
parts = process_data_value.split(";")
|
parts = process_data_value.split(";")
|
||||||
mimetype = parts[0][4:]
|
mimetype = parts[0][4:]
|
||||||
filename = parts[1]
|
filename = parts[1].split("=")[1]
|
||||||
base64_value = parts[2].split(",")[1]
|
base64_value = parts[2].split(",")[1]
|
||||||
file_contents = base64.b64decode(base64_value)
|
file_contents = base64.b64decode(base64_value)
|
||||||
|
|
||||||
|
|
|
@ -199,16 +199,18 @@ def process_instance_log_list(
|
||||||
)
|
)
|
||||||
if not detailed:
|
if not detailed:
|
||||||
log_query = log_query.filter(
|
log_query = log_query.filter(
|
||||||
# this was the previous implementation, where we only show completed tasks and skipped tasks.
|
# 1. this was the previous implementation, where we only show completed tasks and skipped tasks.
|
||||||
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
|
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
|
||||||
# or_(
|
# or_(
|
||||||
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||||
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
|
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
|
||||||
# )
|
# )
|
||||||
|
# 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023
|
||||||
|
# we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities.
|
||||||
and_(
|
and_(
|
||||||
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||||
SpiffLoggingModel.bpmn_task_type.in_( # type: ignore
|
SpiffLoggingModel.bpmn_task_type.in_( # type: ignore
|
||||||
["Default Throwing Event", "End Event", "Default Start Event"]
|
["Default Throwing Event"]
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
"""Get_data_sizes."""
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.script_attributes_context import (
|
||||||
|
ScriptAttributesContext,
|
||||||
|
)
|
||||||
|
from spiffworkflow_backend.scripts.script import Script
|
||||||
|
from spiffworkflow_backend.services.process_instance_processor import (
|
||||||
|
ProcessInstanceProcessor,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class GetDataSizes(Script):
|
||||||
|
"""GetDataSizes."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def requires_privileged_permissions() -> bool:
|
||||||
|
"""We have deemed this function safe to run without elevated permissions."""
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_description(self) -> str:
|
||||||
|
"""Get_description."""
|
||||||
|
return """Returns a dictionary of information about the size of task data and
|
||||||
|
the python environment for the currently running process."""
|
||||||
|
|
||||||
|
def run(
|
||||||
|
self,
|
||||||
|
script_attributes_context: ScriptAttributesContext,
|
||||||
|
*_args: Any,
|
||||||
|
**kwargs: Any
|
||||||
|
) -> Any:
|
||||||
|
"""Run."""
|
||||||
|
workflow = script_attributes_context.task.workflow
|
||||||
|
task_data_size = ProcessInstanceProcessor.get_task_data_size(workflow)
|
||||||
|
task_data_keys_by_task = {
|
||||||
|
t.task_spec.name: sorted(t.data.keys())
|
||||||
|
for t in ProcessInstanceProcessor.get_tasks_with_data(workflow)
|
||||||
|
}
|
||||||
|
python_env_size = ProcessInstanceProcessor.get_python_env_size(workflow)
|
||||||
|
python_env_keys = workflow.script_engine.environment.user_defined_state().keys()
|
||||||
|
return {
|
||||||
|
"python_env_size": python_env_size,
|
||||||
|
"python_env_keys": sorted(python_env_keys),
|
||||||
|
"task_data_size": task_data_size,
|
||||||
|
"task_data_keys_by_task": task_data_keys_by_task,
|
||||||
|
}
|
|
@ -76,8 +76,9 @@ PATH_SEGMENTS_FOR_PERMISSION_ALL = [
|
||||||
},
|
},
|
||||||
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
|
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
|
||||||
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
|
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
|
||||||
{"path": "/task-data", "relevant_permissions": ["read", "update"]},
|
|
||||||
{"path": "/process-data", "relevant_permissions": ["read"]},
|
{"path": "/process-data", "relevant_permissions": ["read"]},
|
||||||
|
{"path": "/process-data-file-download", "relevant_permissions": ["read"]},
|
||||||
|
{"path": "/task-data", "relevant_permissions": ["read", "update"]},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -567,15 +568,25 @@ class AuthorizationService:
|
||||||
permissions_to_assign.append(
|
permissions_to_assign.append(
|
||||||
PermissionToAssign(permission="create", target_uri=target_uri)
|
PermissionToAssign(permission="create", target_uri=target_uri)
|
||||||
)
|
)
|
||||||
target_uri = f"/process-instances/for-me/{process_related_path_segment}"
|
|
||||||
permissions_to_assign.append(
|
|
||||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
|
||||||
)
|
|
||||||
target_uri = f"/logs/{process_related_path_segment}"
|
|
||||||
permissions_to_assign.append(
|
|
||||||
PermissionToAssign(permission="read", target_uri=target_uri)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# giving people access to all logs for an instance actually gives them a little bit more access
|
||||||
|
# than would be optimal. ideally, you would only be able to view the logs for instances that you started
|
||||||
|
# or that you need to approve, etc. we could potentially implement this by adding before filters
|
||||||
|
# in the controllers that confirm that you are viewing logs for your instances. i guess you need to check
|
||||||
|
# both for-me and NOT for-me URLs for the instance in question to see if you should get access to its logs.
|
||||||
|
# if we implemented things this way, there would also be no way to restrict access to logs when you do not
|
||||||
|
# restrict access to instances. everything would be inheriting permissions from instances.
|
||||||
|
# if we want to really codify this rule, we could change logs from a prefix to a suffix
|
||||||
|
# (just add it to the end of the process instances path).
|
||||||
|
# but that makes it harder to change our minds in the future.
|
||||||
|
for target_uri in [
|
||||||
|
f"/process-instances/for-me/{process_related_path_segment}",
|
||||||
|
f"/logs/{process_related_path_segment}",
|
||||||
|
f"/process-data-file-download/{process_related_path_segment}",
|
||||||
|
]:
|
||||||
|
permissions_to_assign.append(
|
||||||
|
PermissionToAssign(permission="read", target_uri=target_uri)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
if permission_set == "all":
|
if permission_set == "all":
|
||||||
for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
|
for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
|
||||||
|
|
|
@ -94,19 +94,7 @@ class GitService:
|
||||||
raise ConfigurationError(
|
raise ConfigurationError(
|
||||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set"
|
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set"
|
||||||
)
|
)
|
||||||
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY"]:
|
|
||||||
os.environ["SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY"] = (
|
|
||||||
current_app.config["SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY"]
|
|
||||||
)
|
|
||||||
|
|
||||||
git_username = ""
|
|
||||||
git_email = ""
|
|
||||||
if (
|
|
||||||
current_app.config["SPIFFWORKFLOW_BACKEND_GIT_USERNAME"]
|
|
||||||
and current_app.config["SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL"]
|
|
||||||
):
|
|
||||||
git_username = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_USERNAME"]
|
|
||||||
git_email = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL"]
|
|
||||||
shell_command_path = os.path.join(
|
shell_command_path = os.path.join(
|
||||||
current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo"
|
current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo"
|
||||||
)
|
)
|
||||||
|
@ -115,9 +103,6 @@ class GitService:
|
||||||
repo_path_to_use,
|
repo_path_to_use,
|
||||||
message,
|
message,
|
||||||
branch_name_to_use,
|
branch_name_to_use,
|
||||||
git_username,
|
|
||||||
git_email,
|
|
||||||
current_app.config["SPIFFWORKFLOW_BACKEND_GIT_USER_PASSWORD"],
|
|
||||||
]
|
]
|
||||||
return cls.run_shell_command_to_get_stdout(shell_command)
|
return cls.run_shell_command_to_get_stdout(shell_command)
|
||||||
|
|
||||||
|
@ -169,8 +154,31 @@ class GitService:
|
||||||
cls, command: list[str], return_success_state: bool = False
|
cls, command: list[str], return_success_state: bool = False
|
||||||
) -> Union[subprocess.CompletedProcess[bytes], bool]:
|
) -> Union[subprocess.CompletedProcess[bytes], bool]:
|
||||||
"""Run_shell_command."""
|
"""Run_shell_command."""
|
||||||
|
my_env = os.environ.copy()
|
||||||
|
my_env["GIT_COMMITTER_NAME"] = (
|
||||||
|
current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") or "unknown"
|
||||||
|
)
|
||||||
|
|
||||||
|
my_env["GIT_COMMITTER_EMAIL"] = (
|
||||||
|
current_app.config.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL")
|
||||||
|
or "unknown@example.org"
|
||||||
|
)
|
||||||
|
|
||||||
|
# SSH authentication can be also provided via gitconfig.
|
||||||
|
ssh_key_path = current_app.config.get(
|
||||||
|
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH"
|
||||||
|
)
|
||||||
|
if ssh_key_path is not None:
|
||||||
|
my_env["GIT_SSH_COMMAND"] = (
|
||||||
|
"ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o"
|
||||||
|
" StrictHostKeyChecking=no -i %s" % ssh_key_path
|
||||||
|
)
|
||||||
|
|
||||||
# this is fine since we pass the commands directly
|
# this is fine since we pass the commands directly
|
||||||
result = subprocess.run(command, check=False, capture_output=True) # noqa
|
result = subprocess.run( # noqa
|
||||||
|
command, check=False, capture_output=True, env=my_env
|
||||||
|
)
|
||||||
|
|
||||||
if return_success_state:
|
if return_success_state:
|
||||||
return result.returncode == 0
|
return result.returncode == 0
|
||||||
|
|
||||||
|
@ -178,9 +186,9 @@ class GitService:
|
||||||
stdout = result.stdout.decode("utf-8")
|
stdout = result.stdout.decode("utf-8")
|
||||||
stderr = result.stderr.decode("utf-8")
|
stderr = result.stderr.decode("utf-8")
|
||||||
raise GitCommandError(
|
raise GitCommandError(
|
||||||
f"Failed to execute git command: {command} "
|
f"Failed to execute git command: {command}"
|
||||||
f"Stdout: {stdout} "
|
f"Stdout: {stdout}"
|
||||||
f"Stderr: {stderr} "
|
f"Stderr: {stderr}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -197,16 +205,21 @@ class GitService:
|
||||||
f" body: {webhook}"
|
f" body: {webhook}"
|
||||||
)
|
)
|
||||||
|
|
||||||
clone_url = webhook["repository"]["clone_url"]
|
config_clone_url = current_app.config[
|
||||||
if (
|
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
||||||
clone_url
|
]
|
||||||
!= current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"]
|
repo = webhook["repository"]
|
||||||
):
|
valid_clone_urls = [repo["clone_url"], repo["git_url"], repo["ssh_url"]]
|
||||||
|
if config_clone_url not in valid_clone_urls:
|
||||||
raise GitCloneUrlMismatchError(
|
raise GitCloneUrlMismatchError(
|
||||||
"Configured clone url does not match clone url from webhook:"
|
"Configured clone url does not match the repo URLs from webhook: %s"
|
||||||
f" {clone_url}"
|
" =/= %s" % (config_clone_url, valid_clone_urls)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Test webhook requests have a zen koan and hook info.
|
||||||
|
if "zen" in webhook or "hook_id" in webhook:
|
||||||
|
return False
|
||||||
|
|
||||||
if "ref" not in webhook:
|
if "ref" not in webhook:
|
||||||
raise InvalidGitWebhookBodyError(
|
raise InvalidGitWebhookBodyError(
|
||||||
f"Could not find the 'ref' arg in the webhook boy: {webhook}"
|
f"Could not find the 'ref' arg in the webhook boy: {webhook}"
|
||||||
|
@ -226,7 +239,7 @@ class GitService:
|
||||||
with FileSystemService.cd(
|
with FileSystemService.cd(
|
||||||
current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
current_app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"]
|
||||||
):
|
):
|
||||||
cls.run_shell_command(["git", "pull"])
|
cls.run_shell_command(["git", "pull", "--rebase"])
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -247,11 +260,6 @@ class GitService:
|
||||||
git_clone_url = current_app.config[
|
git_clone_url = current_app.config[
|
||||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
||||||
]
|
]
|
||||||
if git_clone_url.startswith("https://"):
|
|
||||||
git_clone_url = git_clone_url.replace(
|
|
||||||
"https://",
|
|
||||||
f"https://{current_app.config['SPIFFWORKFLOW_BACKEND_GIT_USERNAME']}:{current_app.config['SPIFFWORKFLOW_BACKEND_GIT_USER_PASSWORD']}@",
|
|
||||||
)
|
|
||||||
cmd = ["git", "clone", git_clone_url, destination_process_root]
|
cmd = ["git", "clone", git_clone_url, destination_process_root]
|
||||||
|
|
||||||
cls.run_shell_command(cmd)
|
cls.run_shell_command(cmd)
|
||||||
|
|
|
@ -147,6 +147,11 @@ class BoxedTaskDataBasedScriptEngineEnvironment(BoxedTaskDataEnvironment): # ty
|
||||||
super().execute(script, context, external_methods)
|
super().execute(script, context, external_methods)
|
||||||
self._last_result = context
|
self._last_result = context
|
||||||
|
|
||||||
|
def user_defined_state(
|
||||||
|
self, external_methods: Optional[Dict[str, Any]] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
return {}
|
||||||
|
|
||||||
def last_result(self) -> Dict[str, Any]:
|
def last_result(self) -> Dict[str, Any]:
|
||||||
return {k: v for k, v in self._last_result.items()}
|
return {k: v for k, v in self._last_result.items()}
|
||||||
|
|
||||||
|
@ -213,13 +218,13 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
|
||||||
for key_to_drop in context_keys_to_drop:
|
for key_to_drop in context_keys_to_drop:
|
||||||
context.pop(key_to_drop)
|
context.pop(key_to_drop)
|
||||||
|
|
||||||
self.state = self._user_defined_state(external_methods)
|
self.state = self.user_defined_state(external_methods)
|
||||||
|
|
||||||
# the task data needs to be updated with the current state so data references can be resolved properly.
|
# the task data needs to be updated with the current state so data references can be resolved properly.
|
||||||
# the state will be removed later once the task is completed.
|
# the state will be removed later once the task is completed.
|
||||||
context.update(self.state)
|
context.update(self.state)
|
||||||
|
|
||||||
def _user_defined_state(
|
def user_defined_state(
|
||||||
self, external_methods: Optional[Dict[str, Any]] = None
|
self, external_methods: Optional[Dict[str, Any]] = None
|
||||||
) -> Dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
keys_to_filter = self.non_user_defined_keys
|
keys_to_filter = self.non_user_defined_keys
|
||||||
|
@ -240,7 +245,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
|
||||||
|
|
||||||
def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
def preserve_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
key = self.PYTHON_ENVIRONMENT_STATE_KEY
|
key = self.PYTHON_ENVIRONMENT_STATE_KEY
|
||||||
state = self._user_defined_state()
|
state = self.user_defined_state()
|
||||||
bpmn_process_instance.data[key] = state
|
bpmn_process_instance.data[key] = state
|
||||||
|
|
||||||
def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
def restore_state(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
|
@ -248,7 +253,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
|
||||||
self.state = bpmn_process_instance.data.get(key, {})
|
self.state = bpmn_process_instance.data.get(key, {})
|
||||||
|
|
||||||
def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
def finalize_result(self, bpmn_process_instance: BpmnWorkflow) -> None:
|
||||||
bpmn_process_instance.data.update(self._user_defined_state())
|
bpmn_process_instance.data.update(self.user_defined_state())
|
||||||
|
|
||||||
def revise_state_with_task_data(self, task: SpiffTask) -> None:
|
def revise_state_with_task_data(self, task: SpiffTask) -> None:
|
||||||
state_keys = set(self.state.keys())
|
state_keys = set(self.state.keys())
|
||||||
|
@ -288,6 +293,7 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||||
"enumerate": enumerate,
|
"enumerate": enumerate,
|
||||||
"format": format,
|
"format": format,
|
||||||
"list": list,
|
"list": list,
|
||||||
|
"dict": dict,
|
||||||
"map": map,
|
"map": map,
|
||||||
"pytz": pytz,
|
"pytz": pytz,
|
||||||
"sum": sum,
|
"sum": sum,
|
||||||
|
@ -765,12 +771,12 @@ class ProcessInstanceProcessor:
|
||||||
|
|
||||||
Rerturns: {process_name: [task_1, task_2, ...], ...}
|
Rerturns: {process_name: [task_1, task_2, ...], ...}
|
||||||
"""
|
"""
|
||||||
serialized_data = json.loads(self.serialize())
|
bpmn_json = json.loads(self.process_instance_model.bpmn_json or '{}')
|
||||||
processes: dict[str, list[str]] = {serialized_data["spec"]["name"]: []}
|
processes: dict[str, list[str]] = {bpmn_json["spec"]["name"]: []}
|
||||||
for task_name, _task_spec in serialized_data["spec"]["task_specs"].items():
|
for task_name, _task_spec in bpmn_json["spec"]["task_specs"].items():
|
||||||
processes[serialized_data["spec"]["name"]].append(task_name)
|
processes[bpmn_json["spec"]["name"]].append(task_name)
|
||||||
if "subprocess_specs" in serialized_data:
|
if "subprocess_specs" in bpmn_json:
|
||||||
for subprocess_name, subprocess_details in serialized_data[
|
for subprocess_name, subprocess_details in bpmn_json[
|
||||||
"subprocess_specs"
|
"subprocess_specs"
|
||||||
].items():
|
].items():
|
||||||
processes[subprocess_name] = []
|
processes[subprocess_name] = []
|
||||||
|
@ -805,7 +811,7 @@ class ProcessInstanceProcessor:
|
||||||
|
|
||||||
#################################################################
|
#################################################################
|
||||||
|
|
||||||
def get_all_task_specs(self) -> dict[str, dict]:
|
def get_all_task_specs(self, bpmn_json: dict) -> dict[str, dict]:
|
||||||
"""This looks both at top level task_specs and subprocess_specs in the serialized data.
|
"""This looks both at top level task_specs and subprocess_specs in the serialized data.
|
||||||
|
|
||||||
It returns a dict of all task specs based on the task name like it is in the serialized form.
|
It returns a dict of all task specs based on the task name like it is in the serialized form.
|
||||||
|
@ -813,10 +819,9 @@ class ProcessInstanceProcessor:
|
||||||
NOTE: this may not fully work for tasks that are NOT call activities since their task_name may not be unique
|
NOTE: this may not fully work for tasks that are NOT call activities since their task_name may not be unique
|
||||||
but in our current use case we only care about the call activities here.
|
but in our current use case we only care about the call activities here.
|
||||||
"""
|
"""
|
||||||
serialized_data = json.loads(self.serialize())
|
spiff_task_json = bpmn_json["spec"]["task_specs"] or {}
|
||||||
spiff_task_json = serialized_data["spec"]["task_specs"] or {}
|
if "subprocess_specs" in bpmn_json:
|
||||||
if "subprocess_specs" in serialized_data:
|
for _subprocess_name, subprocess_details in bpmn_json[
|
||||||
for _subprocess_name, subprocess_details in serialized_data[
|
|
||||||
"subprocess_specs"
|
"subprocess_specs"
|
||||||
].items():
|
].items():
|
||||||
if "task_specs" in subprocess_details:
|
if "task_specs" in subprocess_details:
|
||||||
|
@ -838,8 +843,8 @@ class ProcessInstanceProcessor:
|
||||||
Also note that subprocess_task_id might in fact be a call activity, because spiff treats
|
Also note that subprocess_task_id might in fact be a call activity, because spiff treats
|
||||||
call activities like subprocesses in terms of the serialization.
|
call activities like subprocesses in terms of the serialization.
|
||||||
"""
|
"""
|
||||||
bpmn_json = json.loads(self.serialize())
|
bpmn_json = json.loads(self.process_instance_model.bpmn_json or '{}')
|
||||||
spiff_task_json = self.get_all_task_specs()
|
spiff_task_json = self.get_all_task_specs(bpmn_json)
|
||||||
|
|
||||||
subprocesses_by_child_task_ids = {}
|
subprocesses_by_child_task_ids = {}
|
||||||
task_typename_by_task_id = {}
|
task_typename_by_task_id = {}
|
||||||
|
@ -1275,6 +1280,7 @@ class ProcessInstanceProcessor:
|
||||||
# by background processing. when that happens it can potentially overwrite
|
# by background processing. when that happens it can potentially overwrite
|
||||||
# human tasks which is bad because we cache them with the previous id's.
|
# human tasks which is bad because we cache them with the previous id's.
|
||||||
# waiting_tasks = bpmn_process_instance.get_tasks(TaskState.WAITING)
|
# waiting_tasks = bpmn_process_instance.get_tasks(TaskState.WAITING)
|
||||||
|
# waiting_tasks = bpmn_process_instance.get_waiting()
|
||||||
# if len(waiting_tasks) > 0:
|
# if len(waiting_tasks) > 0:
|
||||||
# return ProcessInstanceStatus.waiting
|
# return ProcessInstanceStatus.waiting
|
||||||
if len(user_tasks) > 0:
|
if len(user_tasks) > 0:
|
||||||
|
@ -1496,16 +1502,40 @@ class ProcessInstanceProcessor:
|
||||||
except WorkflowTaskException as we:
|
except WorkflowTaskException as we:
|
||||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||||
|
|
||||||
def check_task_data_size(self) -> None:
|
@classmethod
|
||||||
"""CheckTaskDataSize."""
|
def get_tasks_with_data(
|
||||||
tasks_to_check = self.bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK)
|
cls, bpmn_process_instance: BpmnWorkflow
|
||||||
task_data = [task.data for task in tasks_to_check]
|
) -> List[SpiffTask]:
|
||||||
task_data_to_check = list(filter(len, task_data))
|
return [
|
||||||
|
task
|
||||||
|
for task in bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK)
|
||||||
|
if len(task.data) > 0
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_task_data_size(cls, bpmn_process_instance: BpmnWorkflow) -> int:
|
||||||
|
tasks_with_data = cls.get_tasks_with_data(bpmn_process_instance)
|
||||||
|
all_task_data = [task.data for task in tasks_with_data]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
task_data_len = len(json.dumps(task_data_to_check))
|
return len(json.dumps(all_task_data))
|
||||||
except Exception:
|
except Exception:
|
||||||
task_data_len = 0
|
return 0
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_python_env_size(cls, bpmn_process_instance: BpmnWorkflow) -> int:
|
||||||
|
user_defined_state = (
|
||||||
|
bpmn_process_instance.script_engine.environment.user_defined_state()
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return len(json.dumps(user_defined_state))
|
||||||
|
except Exception:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def check_task_data_size(self) -> None:
|
||||||
|
"""CheckTaskDataSize."""
|
||||||
|
task_data_len = self.get_task_data_size(self.bpmn_process_instance)
|
||||||
|
|
||||||
# Not sure what the number here should be but this now matches the mysql
|
# Not sure what the number here should be but this now matches the mysql
|
||||||
# max_allowed_packet variable on dev - 1073741824
|
# max_allowed_packet variable on dev - 1073741824
|
||||||
|
|
|
@ -61,6 +61,11 @@ class TestGetAllPermissions(BaseTest):
|
||||||
"uri": "/tasks",
|
"uri": "/tasks",
|
||||||
"permissions": ["create", "read", "update", "delete"],
|
"permissions": ["create", "read", "update", "delete"],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"group_identifier": "my_test_group",
|
||||||
|
"uri": "/process-data-file-download/hey:group:*",
|
||||||
|
"permissions": ["read"],
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
permissions = GetAllPermissions().run(script_attributes_context)
|
permissions = GetAllPermissions().run(script_attributes_context)
|
||||||
|
|
|
@ -156,31 +156,43 @@ class TestAuthorizationService(BaseTest):
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test_explode_permissions_all_on_process_group."""
|
"""Test_explode_permissions_all_on_process_group."""
|
||||||
expected_permissions = [
|
expected_permissions = sorted(
|
||||||
("/logs/some-process-group:some-process-model:*", "read"),
|
[
|
||||||
("/process-data/some-process-group:some-process-model:*", "read"),
|
("/logs/some-process-group:some-process-model:*", "read"),
|
||||||
("/process-groups/some-process-group:some-process-model:*", "create"),
|
("/process-data/some-process-group:some-process-model:*", "read"),
|
||||||
("/process-groups/some-process-group:some-process-model:*", "delete"),
|
(
|
||||||
("/process-groups/some-process-group:some-process-model:*", "read"),
|
"/process-data-file-download/some-process-group:some-process-model:*",
|
||||||
("/process-groups/some-process-group:some-process-model:*", "update"),
|
"read",
|
||||||
(
|
),
|
||||||
"/process-instance-suspend/some-process-group:some-process-model:*",
|
("/process-groups/some-process-group:some-process-model:*", "create"),
|
||||||
"create",
|
("/process-groups/some-process-group:some-process-model:*", "delete"),
|
||||||
),
|
("/process-groups/some-process-group:some-process-model:*", "read"),
|
||||||
(
|
("/process-groups/some-process-group:some-process-model:*", "update"),
|
||||||
"/process-instance-terminate/some-process-group:some-process-model:*",
|
(
|
||||||
"create",
|
"/process-instance-suspend/some-process-group:some-process-model:*",
|
||||||
),
|
"create",
|
||||||
("/process-instances/some-process-group:some-process-model:*", "create"),
|
),
|
||||||
("/process-instances/some-process-group:some-process-model:*", "delete"),
|
(
|
||||||
("/process-instances/some-process-group:some-process-model:*", "read"),
|
"/process-instance-terminate/some-process-group:some-process-model:*",
|
||||||
("/process-models/some-process-group:some-process-model:*", "create"),
|
"create",
|
||||||
("/process-models/some-process-group:some-process-model:*", "delete"),
|
),
|
||||||
("/process-models/some-process-group:some-process-model:*", "read"),
|
(
|
||||||
("/process-models/some-process-group:some-process-model:*", "update"),
|
"/process-instances/some-process-group:some-process-model:*",
|
||||||
("/task-data/some-process-group:some-process-model:*", "read"),
|
"create",
|
||||||
("/task-data/some-process-group:some-process-model:*", "update"),
|
),
|
||||||
]
|
(
|
||||||
|
"/process-instances/some-process-group:some-process-model:*",
|
||||||
|
"delete",
|
||||||
|
),
|
||||||
|
("/process-instances/some-process-group:some-process-model:*", "read"),
|
||||||
|
("/process-models/some-process-group:some-process-model:*", "create"),
|
||||||
|
("/process-models/some-process-group:some-process-model:*", "delete"),
|
||||||
|
("/process-models/some-process-group:some-process-model:*", "read"),
|
||||||
|
("/process-models/some-process-group:some-process-model:*", "update"),
|
||||||
|
("/task-data/some-process-group:some-process-model:*", "read"),
|
||||||
|
("/task-data/some-process-group:some-process-model:*", "update"),
|
||||||
|
]
|
||||||
|
)
|
||||||
permissions_to_assign = AuthorizationService.explode_permissions(
|
permissions_to_assign = AuthorizationService.explode_permissions(
|
||||||
"all", "PG:/some-process-group/some-process-model"
|
"all", "PG:/some-process-group/some-process-model"
|
||||||
)
|
)
|
||||||
|
@ -201,6 +213,10 @@ class TestAuthorizationService(BaseTest):
|
||||||
"/logs/some-process-group:some-process-model:*",
|
"/logs/some-process-group:some-process-model:*",
|
||||||
"read",
|
"read",
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"/process-data-file-download/some-process-group:some-process-model:*",
|
||||||
|
"read",
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"/process-instances/for-me/some-process-group:some-process-model:*",
|
"/process-instances/for-me/some-process-group:some-process-model:*",
|
||||||
"read",
|
"read",
|
||||||
|
@ -222,27 +238,39 @@ class TestAuthorizationService(BaseTest):
|
||||||
with_db_and_bpmn_file_cleanup: None,
|
with_db_and_bpmn_file_cleanup: None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test_explode_permissions_all_on_process_model."""
|
"""Test_explode_permissions_all_on_process_model."""
|
||||||
expected_permissions = [
|
expected_permissions = sorted(
|
||||||
("/logs/some-process-group:some-process-model/*", "read"),
|
[
|
||||||
("/process-data/some-process-group:some-process-model/*", "read"),
|
("/logs/some-process-group:some-process-model/*", "read"),
|
||||||
(
|
(
|
||||||
"/process-instance-suspend/some-process-group:some-process-model/*",
|
"/process-data-file-download/some-process-group:some-process-model/*",
|
||||||
"create",
|
"read",
|
||||||
),
|
),
|
||||||
(
|
("/process-data/some-process-group:some-process-model/*", "read"),
|
||||||
"/process-instance-terminate/some-process-group:some-process-model/*",
|
(
|
||||||
"create",
|
"/process-instance-suspend/some-process-group:some-process-model/*",
|
||||||
),
|
"create",
|
||||||
("/process-instances/some-process-group:some-process-model/*", "create"),
|
),
|
||||||
("/process-instances/some-process-group:some-process-model/*", "delete"),
|
(
|
||||||
("/process-instances/some-process-group:some-process-model/*", "read"),
|
"/process-instance-terminate/some-process-group:some-process-model/*",
|
||||||
("/process-models/some-process-group:some-process-model/*", "create"),
|
"create",
|
||||||
("/process-models/some-process-group:some-process-model/*", "delete"),
|
),
|
||||||
("/process-models/some-process-group:some-process-model/*", "read"),
|
(
|
||||||
("/process-models/some-process-group:some-process-model/*", "update"),
|
"/process-instances/some-process-group:some-process-model/*",
|
||||||
("/task-data/some-process-group:some-process-model/*", "read"),
|
"create",
|
||||||
("/task-data/some-process-group:some-process-model/*", "update"),
|
),
|
||||||
]
|
(
|
||||||
|
"/process-instances/some-process-group:some-process-model/*",
|
||||||
|
"delete",
|
||||||
|
),
|
||||||
|
("/process-instances/some-process-group:some-process-model/*", "read"),
|
||||||
|
("/process-models/some-process-group:some-process-model/*", "create"),
|
||||||
|
("/process-models/some-process-group:some-process-model/*", "delete"),
|
||||||
|
("/process-models/some-process-group:some-process-model/*", "read"),
|
||||||
|
("/process-models/some-process-group:some-process-model/*", "update"),
|
||||||
|
("/task-data/some-process-group:some-process-model/*", "read"),
|
||||||
|
("/task-data/some-process-group:some-process-model/*", "update"),
|
||||||
|
]
|
||||||
|
)
|
||||||
permissions_to_assign = AuthorizationService.explode_permissions(
|
permissions_to_assign = AuthorizationService.explode_permissions(
|
||||||
"all", "PM:/some-process-group/some-process-model"
|
"all", "PM:/some-process-group/some-process-model"
|
||||||
)
|
)
|
||||||
|
@ -263,6 +291,10 @@ class TestAuthorizationService(BaseTest):
|
||||||
"/logs/some-process-group:some-process-model/*",
|
"/logs/some-process-group:some-process-model/*",
|
||||||
"read",
|
"read",
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
"/process-data-file-download/some-process-group:some-process-model/*",
|
||||||
|
"read",
|
||||||
|
),
|
||||||
(
|
(
|
||||||
"/process-instances/for-me/some-process-group:some-process-model/*",
|
"/process-instances/for-me/some-process-group:some-process-model/*",
|
||||||
"read",
|
"read",
|
||||||
|
|
|
@ -25,6 +25,7 @@ import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||||
import { PermissionsToCheck } from '../interfaces';
|
import { PermissionsToCheck } from '../interfaces';
|
||||||
import { usePermissionFetcher } from '../hooks/PermissionService';
|
import { usePermissionFetcher } from '../hooks/PermissionService';
|
||||||
import { UnauthenticatedError } from '../services/HttpService';
|
import { UnauthenticatedError } from '../services/HttpService';
|
||||||
|
import { SPIFF_ENVIRONMENT } from '../config';
|
||||||
|
|
||||||
// for ref: https://react-bootstrap.github.io/components/navbar/
|
// for ref: https://react-bootstrap.github.io/components/navbar/
|
||||||
export default function NavigationBar() {
|
export default function NavigationBar() {
|
||||||
|
@ -80,7 +81,12 @@ export default function NavigationBar() {
|
||||||
if (UserService.isLoggedIn()) {
|
if (UserService.isLoggedIn()) {
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<HeaderGlobalAction className="username-header-text">
|
{SPIFF_ENVIRONMENT ? (
|
||||||
|
<HeaderGlobalAction className="spiff-environment-header-text unclickable-text">
|
||||||
|
{SPIFF_ENVIRONMENT}
|
||||||
|
</HeaderGlobalAction>
|
||||||
|
) : null}
|
||||||
|
<HeaderGlobalAction className="username-header-text unclickable-text">
|
||||||
{UserService.getPreferredUsername()}
|
{UserService.getPreferredUsername()}
|
||||||
</HeaderGlobalAction>
|
</HeaderGlobalAction>
|
||||||
<HeaderGlobalAction
|
<HeaderGlobalAction
|
||||||
|
|
|
@ -10,12 +10,15 @@ declare global {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let spiffEnvironment = '';
|
||||||
let appRoutingStrategy = 'subdomain_based';
|
let appRoutingStrategy = 'subdomain_based';
|
||||||
if (
|
if ('spiffworkflowFrontendJsenv' in window) {
|
||||||
'spiffworkflowFrontendJsenv' in window &&
|
if ('APP_ROUTING_STRATEGY' in window.spiffworkflowFrontendJsenv) {
|
||||||
'APP_ROUTING_STRATEGY' in window.spiffworkflowFrontendJsenv
|
appRoutingStrategy = window.spiffworkflowFrontendJsenv.APP_ROUTING_STRATEGY;
|
||||||
) {
|
}
|
||||||
appRoutingStrategy = window.spiffworkflowFrontendJsenv.APP_ROUTING_STRATEGY;
|
if ('ENVIRONMENT_IDENTIFIER' in window.spiffworkflowFrontendJsenv) {
|
||||||
|
spiffEnvironment = window.spiffworkflowFrontendJsenv.ENVIRONMENT_IDENTIFIER;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let hostAndPortAndPathPrefix;
|
let hostAndPortAndPathPrefix;
|
||||||
|
@ -34,6 +37,20 @@ if (/^\d+\./.test(hostname) || hostname === 'localhost') {
|
||||||
}
|
}
|
||||||
hostAndPortAndPathPrefix = `${hostname}:${serverPort}`;
|
hostAndPortAndPathPrefix = `${hostname}:${serverPort}`;
|
||||||
protocol = 'http';
|
protocol = 'http';
|
||||||
|
|
||||||
|
if (spiffEnvironment === '') {
|
||||||
|
// using destructuring on an array where we only want the first element
|
||||||
|
// seems super confusing for non-javascript devs to read so let's NOT do that.
|
||||||
|
// eslint-disable-next-line prefer-destructuring
|
||||||
|
spiffEnvironment = hostname.split('.')[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
'spiffworkflowFrontendJsenv' in window &&
|
||||||
|
'APP_ROUTING_STRATEGY' in window.spiffworkflowFrontendJsenv
|
||||||
|
) {
|
||||||
|
appRoutingStrategy = window.spiffworkflowFrontendJsenv.APP_ROUTING_STRATEGY;
|
||||||
}
|
}
|
||||||
|
|
||||||
let url = `${protocol}://${hostAndPortAndPathPrefix}/v1.0`;
|
let url = `${protocol}://${hostAndPortAndPathPrefix}/v1.0`;
|
||||||
|
@ -62,3 +79,5 @@ export const DATE_TIME_FORMAT = 'yyyy-MM-dd HH:mm:ss';
|
||||||
export const TIME_FORMAT_HOURS_MINUTES = 'HH:mm';
|
export const TIME_FORMAT_HOURS_MINUTES = 'HH:mm';
|
||||||
export const DATE_FORMAT = 'yyyy-MM-dd';
|
export const DATE_FORMAT = 'yyyy-MM-dd';
|
||||||
export const DATE_FORMAT_CARBON = 'Y-m-d';
|
export const DATE_FORMAT_CARBON = 'Y-m-d';
|
||||||
|
|
||||||
|
export const SPIFF_ENVIRONMENT = spiffEnvironment;
|
||||||
|
|
|
@ -14,6 +14,21 @@
|
||||||
width: 5rem;
|
width: 5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.cds--header__action.spiff-environment-header-text {
|
||||||
|
width: 5rem;
|
||||||
|
color: #126d82;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cds--header__action.unclickable-text:hover {
|
||||||
|
background-color: #161616;
|
||||||
|
cursor: default;
|
||||||
|
}
|
||||||
|
.cds--header__action.unclickable-text:focus {
|
||||||
|
border: none;
|
||||||
|
box-shadow: none;
|
||||||
|
border-color: none;
|
||||||
|
}
|
||||||
|
|
||||||
h1 {
|
h1 {
|
||||||
font-weight: 400;
|
font-weight: 400;
|
||||||
font-size: 28px;
|
font-size: 28px;
|
||||||
|
|
|
@ -110,7 +110,11 @@ export default function AdminRoutes() {
|
||||||
/>
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="logs/:process_model_id/:process_instance_id"
|
path="logs/:process_model_id/:process_instance_id"
|
||||||
element={<ProcessInstanceLogList />}
|
element={<ProcessInstanceLogList variant="all" />}
|
||||||
|
/>
|
||||||
|
<Route
|
||||||
|
path="logs/for-me/:process_model_id/:process_instance_id"
|
||||||
|
element={<ProcessInstanceLogList variant="for-me" />}
|
||||||
/>
|
/>
|
||||||
<Route
|
<Route
|
||||||
path="process-instances"
|
path="process-instances"
|
||||||
|
|
|
@ -6,23 +6,28 @@ import PaginationForTable from '../components/PaginationForTable';
|
||||||
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
||||||
import {
|
import {
|
||||||
getPageInfoFromSearchParams,
|
getPageInfoFromSearchParams,
|
||||||
modifyProcessIdentifierForPathParam,
|
|
||||||
convertSecondsToFormattedDateTime,
|
convertSecondsToFormattedDateTime,
|
||||||
} from '../helpers';
|
} from '../helpers';
|
||||||
import HttpService from '../services/HttpService';
|
import HttpService from '../services/HttpService';
|
||||||
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
|
||||||
|
|
||||||
export default function ProcessInstanceLogList() {
|
type OwnProps = {
|
||||||
|
variant: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function ProcessInstanceLogList({ variant }: OwnProps) {
|
||||||
const params = useParams();
|
const params = useParams();
|
||||||
const [searchParams, setSearchParams] = useSearchParams();
|
const [searchParams, setSearchParams] = useSearchParams();
|
||||||
const [processInstanceLogs, setProcessInstanceLogs] = useState([]);
|
const [processInstanceLogs, setProcessInstanceLogs] = useState([]);
|
||||||
const [pagination, setPagination] = useState(null);
|
const [pagination, setPagination] = useState(null);
|
||||||
const modifiedProcessModelId = modifyProcessIdentifierForPathParam(
|
|
||||||
`${params.process_model_id}`
|
|
||||||
);
|
|
||||||
const { targetUris } = useUriListForPermissions();
|
const { targetUris } = useUriListForPermissions();
|
||||||
const isDetailedView = searchParams.get('detailed') === 'true';
|
const isDetailedView = searchParams.get('detailed') === 'true';
|
||||||
|
|
||||||
|
let processInstanceShowPageBaseUrl = `/admin/process-instances/for-me/${params.process_model_id}`;
|
||||||
|
if (variant === 'all') {
|
||||||
|
processInstanceShowPageBaseUrl = `/admin/process-instances/${params.process_model_id}`;
|
||||||
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const setProcessInstanceLogListFromResult = (result: any) => {
|
const setProcessInstanceLogListFromResult = (result: any) => {
|
||||||
setProcessInstanceLogs(result.results);
|
setProcessInstanceLogs(result.results);
|
||||||
|
@ -65,7 +70,7 @@ export default function ProcessInstanceLogList() {
|
||||||
<td>
|
<td>
|
||||||
<Link
|
<Link
|
||||||
data-qa="process-instance-show-link"
|
data-qa="process-instance-show-link"
|
||||||
to={`/admin/process-instances/${modifiedProcessModelId}/${rowToUse.process_instance_id}/${rowToUse.spiff_step}`}
|
to={`${processInstanceShowPageBaseUrl}/${rowToUse.process_instance_id}/${rowToUse.spiff_step}`}
|
||||||
>
|
>
|
||||||
{convertSecondsToFormattedDateTime(rowToUse.timestamp)}
|
{convertSecondsToFormattedDateTime(rowToUse.timestamp)}
|
||||||
</Link>
|
</Link>
|
||||||
|
@ -111,7 +116,7 @@ export default function ProcessInstanceLogList() {
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
`Process Instance: ${params.process_instance_id}`,
|
`Process Instance: ${params.process_instance_id}`,
|
||||||
`/admin/process-instances/${params.process_model_id}/${params.process_instance_id}`,
|
`${processInstanceShowPageBaseUrl}/${params.process_instance_id}`,
|
||||||
],
|
],
|
||||||
['Logs'],
|
['Logs'],
|
||||||
]}
|
]}
|
||||||
|
|
|
@ -115,6 +115,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let processInstanceShowPageBaseUrl = `/admin/process-instances/for-me/${params.process_model_id}/${params.process_instance_id}`;
|
||||||
|
let processInstanceLogListPageBaseUrl = `/admin/logs/for-me/${params.process_model_id}/${params.process_instance_id}`;
|
||||||
|
if (variant === 'all') {
|
||||||
|
processInstanceShowPageBaseUrl = `/admin/process-instances/${params.process_model_id}/${params.process_instance_id}`;
|
||||||
|
processInstanceLogListPageBaseUrl = `/admin/logs/${params.process_model_id}/${params.process_instance_id}`;
|
||||||
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (permissionsLoaded) {
|
if (permissionsLoaded) {
|
||||||
const processTaskFailure = () => {
|
const processTaskFailure = () => {
|
||||||
|
@ -254,11 +261,12 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
if (queryParamArray.length > 0) {
|
if (queryParamArray.length > 0) {
|
||||||
queryParams = `?${queryParamArray.join('&')}`;
|
queryParams = `?${queryParamArray.join('&')}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Link
|
<Link
|
||||||
reloadDocument
|
reloadDocument
|
||||||
data-qa="process-instance-step-link"
|
data-qa="process-instance-step-link"
|
||||||
to={`/admin/process-instances/${params.process_model_id}/${params.process_instance_id}/${spiffStep}${queryParams}`}
|
to={`${processInstanceShowPageBaseUrl}/${spiffStep}${queryParams}`}
|
||||||
>
|
>
|
||||||
{label}
|
{label}
|
||||||
</Link>
|
</Link>
|
||||||
|
@ -282,7 +290,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
};
|
};
|
||||||
|
|
||||||
const returnToLastSpiffStep = () => {
|
const returnToLastSpiffStep = () => {
|
||||||
window.location.href = `/admin/process-instances/${params.process_model_id}/${params.process_instance_id}`;
|
window.location.href = processInstanceShowPageBaseUrl;
|
||||||
};
|
};
|
||||||
|
|
||||||
const resetProcessInstance = () => {
|
const resetProcessInstance = () => {
|
||||||
|
@ -453,7 +461,7 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
size="sm"
|
size="sm"
|
||||||
className="button-white-background"
|
className="button-white-background"
|
||||||
data-qa="process-instance-log-list-link"
|
data-qa="process-instance-log-list-link"
|
||||||
href={`/admin/logs/${modifiedProcessModelId}/${params.process_instance_id}`}
|
href={`${processInstanceLogListPageBaseUrl}`}
|
||||||
>
|
>
|
||||||
Logs
|
Logs
|
||||||
</Button>
|
</Button>
|
||||||
|
|
|
@ -168,6 +168,13 @@ export default function TaskShow() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// recurse through all nested properties as well
|
||||||
|
getFieldsWithDateValidations(
|
||||||
|
propertyMetadata,
|
||||||
|
formData[propertyKey],
|
||||||
|
errors[propertyKey]
|
||||||
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return errors;
|
return errors;
|
||||||
|
|
|
@ -106,7 +106,7 @@ export default function BaseInputTemplate<
|
||||||
<TextInput
|
<TextInput
|
||||||
id={id}
|
id={id}
|
||||||
name={id}
|
name={id}
|
||||||
className="input"
|
className="text-input"
|
||||||
helperText={helperText}
|
helperText={helperText}
|
||||||
invalid={invalid}
|
invalid={invalid}
|
||||||
invalidText={errorMessageForField}
|
invalidText={errorMessageForField}
|
||||||
|
|
|
@ -5,8 +5,8 @@ import { Tag } from '@carbon/react';
|
||||||
function ErrorList({ errors }: ErrorListProps) {
|
function ErrorList({ errors }: ErrorListProps) {
|
||||||
if (errors) {
|
if (errors) {
|
||||||
return (
|
return (
|
||||||
<Tag type="red" size="md" title="Fill Required Fields">
|
<Tag type="red" size="md" title="Fix validation issues">
|
||||||
Please fill out required fields
|
Some fields are invalid. Please correct them before submitting the form.
|
||||||
</Tag>
|
</Tag>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,6 +61,11 @@ function TextareaWidget<
|
||||||
labelToUse = `${labelToUse}*`;
|
labelToUse = `${labelToUse}*`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let helperText = null;
|
||||||
|
if (uiSchema && uiSchema['ui:help']) {
|
||||||
|
helperText = uiSchema['ui:help'];
|
||||||
|
}
|
||||||
|
|
||||||
let invalid = false;
|
let invalid = false;
|
||||||
let errorMessageForField = null;
|
let errorMessageForField = null;
|
||||||
if (rawErrors && rawErrors.length > 0) {
|
if (rawErrors && rawErrors.length > 0) {
|
||||||
|
@ -72,7 +77,8 @@ function TextareaWidget<
|
||||||
<TextArea
|
<TextArea
|
||||||
id={id}
|
id={id}
|
||||||
name={id}
|
name={id}
|
||||||
className="form-control"
|
className="text-input"
|
||||||
|
helperText={helperText}
|
||||||
value={value || ''}
|
value={value || ''}
|
||||||
labelText=""
|
labelText=""
|
||||||
placeholder={placeholder}
|
placeholder={placeholder}
|
||||||
|
|
|
@ -21,3 +21,7 @@
|
||||||
.array-item-toolbox {
|
.array-item-toolbox {
|
||||||
margin-left: 2em;
|
margin-left: 2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.rjsf .text-input {
|
||||||
|
padding-top: 8px;
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue