Squashed 'spiffworkflow-backend/' changes from 031713a6..6cae736a
6cae736a Merge pull request #145 from sartography/data_size_script
241cb6ed Merge branch 'main' of github.com:sartography/spiff-arena
53e824ab set git user configs explicitly using the env vars w/ burnettk
741dab7e so we do not lose our minds when debugging on a server
8dd5d834 Merge branch 'main' of github.com:sartography/spiff-arena
44390824 show help text for textareas as well w/ burnettk
92042e9a why not dict, too
d0b67bb8 Cleanup
4ffba533 Getting ./bin/pyl to pass
d53d0c21 Provide info about keys in task data and python env
02db5acf Unfactor to fix size calculation/type hint issues
84fe64b0 Getting ./bin/pyl to pass
60a03f03 Adding python env size
a929ef91 Add cumulative task data size to script
32bb223b remove start and end events from simple logs view
54fdfd46 copy env so we are doing additive stuff rather than completely rewriting it
833a4125 fix git bash script unbound error w/ burnettk
46c62b90 more git config updates w/ burnettk
621e13fb disable strict host checking for git commands w/ burnettk
182d657d fixed tests w/ burnettk
40db077e give access to download process data files if a user has access to start a process model w/ burnettk
9083fcae backend: create SSH key file when contents provided
90aad09e backend: use sensible lock filename for git
0065b98b backend: specify --rebase when using git pull
94e1e15c backend: fix use of SSH private key for git ops
55252831 Look in task data/script engine environment for data, not just workflow data. (#143)
c5661f96 delint
d2d1ee50 backend/git_service: accept webhook test requests
782650d7 backend/git_service: check repo URLs from webhook
e76114c9 bump spiffworkflow hash
00d8eb55 put the env vars in the env section of the github action configs w/ burnettk
279e3042 added test for quickstart guide w/ burnettk
0e8ebc26 pyl
8732bd25 Merge remote-tracking branch 'origin/main' into feature/support_local_openid_in_cypress_tests
5b89d0cf use _GIT_SOURCE_BRANCH instead of _GIT_BRANCH
18917e62 fix unprefixed config
f32d83de update configs for publish feature
6139b411 added some support for using the backend openid server for cypress tests w/ burnettk
a2792e4d add new uses and delete ppg.ba.sme1
512bce0f the equal sign is not apart of the env var w/ burnettk
28b25fae updated terraform deployed env config name w/ burnettk
88fe9e66 more config updates w/ burnettk
6ad3224b cleaned up the default config file a bit
f328b5db pyl
4ef72a9c fixed some env vars for ci
9a31aff0 pyl
2a642f38 Merge remote-tracking branch 'origin/main' into feature/update_configs_with_prefix
9f144f54 remove unused import
ab6c45bc all tests pass w/ spiff lib upgrade, thanks for the help elizabeth
27870682 format
dd0a642a updated remaining configs manually w/ burnettk
d3153224 updated secret config names as well w/ burnettk
7160c702 updated configs to use the prefix using the script w/ burnettk
e57476ac updated default user group var name for better prefix w/ burnettk
6d3ef2b8 save task data to spiff step details when copmleting a user task as well w/ burnettk jbirddog
fde9ac7f append OPEN_ID to TENANT_SPECIFIC_FIELDS for clarification on what it is from w/ burnettk
df34d786 do not remove the default user group when refreshing permissions w/ burnettk
a80f7513 Merge branch 'main' of github.com:sartography/spiff-arena into main
7c63f409 The aud in a json web token can be a string or an array -- so also deal with the case that it is an array
76fd4c2d updates to user import scripts w/ burnettk
f0067eea commit user if it has changed any attributes w/ burnettk
2f1cb4c0 fix for mypy on python 3.9 w/ burnettk
b2db377d give some leeway to iat when validating auth w/ burnettk
bce49095 add in debug logging when failing to login to help debug auth failures
af2d9a66 Don't try to complete a ready task to move the process instance diagram to the next yellow. (#136)
ee7d5b1e Merge pull request #137 from sartography/failing_test
a1e5111f allow overriding keycloak url in add user script w/ burnettk
d3b20221 updated add test users script to use realm admin w/ burnettk
390287bd The End Event has a spiff step
4e6bee21 added script to import users w/ burnettk
ff8912f5 added api endpoint to allow logging in for the first time with an openid access_token w/ burnettk
7fab1f4f Merge pull request #135 from sartography/bug/len
73e4178d run_pyl
9c40e56e delete 15 users that are no longer needed
82c42a66 Merge remote-tracking branch 'origin/main' into bug/len
057102c1 commit before removing users
fbee1e31 This should fix the len issue - though there may be better ways to fix this. This reverts commit d1e2d29211
.
e48c3188 Revert "more finally for better len"
b398f53a more finally for better len added a fixme for raising errors fixed up an error message
3d22455e fix employee id
9eb0c08d Update Spiffworkflow so we have the better error messages on service tasks.
54e17133 don't default to a broken environment.
44acb13d add five test users, rename peopleops users, delete two
27bbacaf Remove version from docker-compose, as this is causing issues for people still using docker-compose v2.
74984c8e turn off profiling for now to see if that is what is causing segfaults
733fe927 Save logs in finally (#134)
37f728a7 link to the spiff step from a task on the frontend and use the correct db in ci
3c120843 pyl w/ burnettk
af10992a call activities are also working w/ burnettk
d0aa6b1c tasks and subprocesses can are setting the task states properly now when getting task data w/ burnettk jbirddog
4791bda5 pyl w/ burnettk
ffa4fd78 Merge branch 'save_step_data' of github.com:sartography/spiff-arena into save_step_data
9e3a897b Fix bug where deletes from the environment were not removed from the task data as well
7eebf361 Merge remote-tracking branch 'origin/main' into save_step_data
b4f22984 Merge pull request #132 from sartography/feature/no_more_current_user
db3d4703 updated controller to use spiff step details to find correct task data to show w/ burnettk
b3a70d42 run_pyl
fca00361 remove the "current_user" from being added to the task_data.
747f91d1 Merge remote-tracking branch 'origin/main' into save_step_data
ac02e0a9 Merge remote-tracking branch 'origin/main' into save_step_data
cce05cab pyl
dc250235 Merge pull request #131 from sartography/feature/improved_service_task_errors
4d9a7a6a If you have an instance/config.py to override local development it would also override testing.
c06db317 Use a mock when making external calls in tests.
d6654e82 Merge remote-tracking branch 'origin/main' into feature/improved_service_task_errors
6e4b191c Merge branch 'main' of github.com:sartography/spiff-arena
0fd982a4 catch up with environment change
81a7cdc9 added additional columns to spiff_step_details w/ burnettk jbirddog
805b86ec fixing some missing types
8644561b run_pyl
72deffd8 Assure that when something goes wrong calling a service task that we get as much good information about the problem as possible.
687125bd Merge remote-tracking branch 'origin/main' into save_step_data
9c8ca3aa enable faulthandler to hopefully see seg faults in the logs w/ burnettk jbirddog
a9651211 Merge branch 'main' into save_step_data
bc556729 remove dup
10fa5edf add users
d7f52641 Merge pull request #114 from sartography/frontend/use-api-subpath
35efb091 allow setting configs for the frontend through env vars w/ burnettk
7552408a allow configuring gunicorn configs in boot_server_in_docker w/ burnettk
1007e8c7 sentences need spaces between them
1b7fec4a renamed development env to local_development and testing to unit_testing w/ burnettk
02750228 set up qa2 to only do path based routing w/ burnettk
ee3cb621 added backend url to qa2 configs
fab0a6bd added qa2 configs to use keycloak on qa1 to test different domains w/ burnettk
f7743ea4 Merge pull request #128 from sartography/feature/bug_fixes
384d65f6 Update tasks_controller.py
7f0c0926 update get_token
af5a4c50 pass tenant attributes when creating a user w/ burnettk
2cf6f1a5 pyl w/ burnettk
b7bdae94 Merge remote-tracking branch 'origin/main' into feature/tenant-specific-fields-from-openid
007eecdc added bambooid to status users w/ burnettk
3b0e3ff6 POC for saving some data about each step
98490b69 allow added custom attributes to users when adding to keycloak w/ burnettk
68358519 Backend do_engine_steps performance improvements (#129)
2a2855e0 use our json encoder to dump the user for get_current_user script w/ burnettk
706bb570 Use the same markdown library for displaying as for editing - could enable a security run_pyl
7aebec9d When catching non-jinja errors from Jinja, raise a good error message, and make a best effort at tracking down the line number and error line if possible.
b4a31562 move towards returning dict in get_current_user
19ccca40 Merge remote-tracking branch 'origin/main' into feature/tenant-specific-fields-from-openid
257de512 add test users
ecc70795 run_pyl
3eb0fd76 When searching for human tasks to determine if the current user can complete it, filter on the "completed" flag.
ad4fead4 Back to inserting every log
12c3a2b8 hoping to fix tests on windows
d8ba46b6 grab bamboo_id from keycloak
e5a56f6e lint
f1c61581 if there are tenant specific fields in the config, transfer them from openid token to db
35637ba0 Merge branch 'main' of github.com:sartography/spiff-arena into main
86b248fa Fix that dreadful unknown "KeyError" exception that was cropping up. Adding a bit of detail to the spiffworkflow exceptions when a duplicate process model is found. Disable the submit button on tasks after you click submit (avoid the double click and give users a better experience)
6fc2f2b2 Merge pull request #125 from sartography/feature/dynamically-hide-fields-w-task-data
9de6e0b3 refactor some stuff in task_show to separate functions
f2a12c5c show that hiding nested fields works as well
d7fd92e1 make form schema and form ui schema both dicts, add support for hiding fields based on task data
80649382 run_pyl
5a859f16 Merge branch 'main' into feature/more_better_errors
2d4f9d45 add more users, and try to prevent sentry notification again
83b9c901 remove service accounts, formalize j, add madhurya
698b3af8 make test_user_lists more complete and correct
a163d6f7 clean up sentry notification and avoid logger.exception when we do not want sentry
0fced76d couple last serializer updates
b17142bf import EventBasedGatewayConverter from correct package
5b7805d0 try to improve exception handling by avoiding raising ApiError from services
e9913d83 simplify spiff integration post serializer update, w/ elizabeth and jon
9baf0cb2 Quick fix for url building
4641b523 File download from workflow data (#122)
30a73e2a Allow for different Python Environments when executing scripts within SpiffWorkflow (#121)
650b91ed add keycloak users
6d18bd23 bulk insert logs for performance improvement
c50744a1 there is no need to ever sentry_sdk.start_transaction because the flask integration does that
1368d71c get some more insight into connector proxy timings
7ffcded9 avoid poetry installing deps when we have them cached if they do not change
5d50ee8e more spans to track performance
a23a0700 more sentry performance tracing
8a98e8d9 folks who can start instances can also view their logs
17fb81bc shuffle around Dockerfile to allow to work for background container
d9bcbd09 Merge pull request #117 from sartography/feature/authorization
c586e0ea allow overriding git related configs w/ env var and log permissions stuff on boot
f5ee3ec8 Merge branch 'main' into feature/more_better_errors
063ebda6 Merge pull request #115 from sartography/backend/improve-dockerfile
ba6d4c5f Fix typing issue.
b0a05adc Use the id_token, not the auth_token from the open id server for authentication with the front end. The auth_token should be kept safe, and not guranteeded to be a json token.
b9eb5dd8 add four new status users to spiff realm
553c93be backend: avoid redundant steps in Dockerfile
cf2e8975 removed old pin to spiffworkflow w/ burnettk
5970b1e3 tests are passing now w/ burnettk
a39aca6a some initial updates w/ burnettk
6fda0c5c Allow set to be called from scripts
a6482760 update lock file in backend and arena, though that one needs pruning
5b2046ab run_pyl
5328a2d4 Workflow Data Exceptions were not getting processed, we now catch the WorkflowDataException through the generic top level SpiffWorkflowException.
9f98cfee updated prefix for background instances w/ burnettk
acd26c04 Merge pull request #110 from sartography/feature/jinja_errors
864ae116 add keycloak users
f80836e7 pyl
71e20c9f set the correct type for task since SpiffTask and a backend task are not the same
26c791f9 increased the task data size
79dadcaa added pylint back to lock file
35b0871c Merge remote-tracking branch 'origin/main' into feature/jinja_errors
1dbac99a run_pyl had various recommendations that I find a bit of a pain in the butt, but that I did anyway.
b30081ab Merge pull request #109 from sartography/feature/upgrade_bandit
efb8fd0b removed debug print statements
58679f60 ensure we are passing the primary file name to the delete file test
9943cbbb fixed typo w/ burnettk
1fe63808 added some debug statements for ci w/ burnettk
d84942f0 upgraded bandit and flake8 w/ burnettk
72f599cb Merge remote-tracking branch 'origin/main' into feature/jinja_errors
c0530539 allow removing users from groups when refreshing permissions w/ burnettk
106500cf Added useMemo to error context No longer clear errors in the task bar, as that will constantly remove them as soon as they are shown.
08c3106c Merge branch 'main' of github.com:sartography/spiff-arena
957e0373 when changing the primary file name also change the primary process when updating a process model from the api w/ burnettk
b560b364 Making sure we create informative messages when encountering jinja2 syntax errors.
51c325da delete legacy flask-rendered html pages from initial prototype, since backend is now API-only
59ebcbb4 added the process model identifier for the diagram if it is not the top level w/ burnettk
a02a132e Merge pull request #107 from sartography/feature/metadata_on_instance_show
b22e8828 do not allow deleting primary bpmn file and do not allow instantiating models without a primary bpmn file w/ burnettk
002df3f9 show metadata on instance show page but for some reason it reorders elements w/ burnettk
81c3cfe1 removed uniqueness constraint from human task so we can loopback to a previous task with a gateway w/ burnettk
73cbef31 ensure order of permissions in tests
d5949b86 document future enhancement
c8bc2049 strip off newlines and spaces when importing secrets and pinning spiffworkflow to working version until we can debug issues with new one w/ burnettk
bb99d942 added locking system for process instances so hopefully background jobs will not take instances currently being run by the user w/ burnettk
a4e60a36 added uniqueness constraint to spiff step details w/ burnettk
5fd7197c add sum and format
53d99f7d expanded functionality of the form builder
f08ff45b Revert "allow updating models on staging for a bit"
f88d3250 Revert "commit on save yes for now"
6675c407 qa1
10180043 since accepting the github host entry programatically is no more secure
1517a3ee commit on save yes for now
5f1af4a6 allow updating models on staging for a bit
27fbd93d allow getting the GIT SSH key from an app config so we can set it in the secrets file w/ burnettk
9fa2f982 allow specifying an ssh key for git instead of a username and password w/ burnettk
09337070 updated admin user on sartography realm w/ burnettk
17124319 allow passing a realm name into start keycloak and added admin user to sartography realm
7d9600f4 find the top level process to find the task form when using subprocesses in called activities w/ burnettk danfunk
0bf13094 pyl w/ burnettk
97268226 Merge branch 'main' into feature/improved_errors
c1403a9e ensure we have something in the logs w/ burnettk
cdaf59b8 pyl w/ burnettk
55468b67 added configs for sartography env w/ burnettk
205dd4a5 pre-commit-in-ci
bd1058fe updating spiffworkflow version, and fixing the run_pyl (by removing the corft I stuck in earlier)
ecbe1948 Merges
5da88709 Lots of adjustments from running pyl Main change is in the ErrorDisplay.tsx to assure all error information is provided. and index.css to make it "pretty"
ab1d5c22 Removing dependency on flask-bpmn and taking it out of SpiffArena Slightly updating the APIError code for recent updates to SpiffWorkflow's error refactoring.
git-subtree-dir: spiffworkflow-backend
git-subtree-split: 6cae736acd232199447a44f7ff2a8dc4c7779631
This commit is contained in:
parent
1cedc2ea50
commit
7171c2644f
16
.flake8
16
.flake8
|
@ -8,11 +8,19 @@ rst-roles = class,const,func,meth,mod,ref
|
|||
rst-directives = deprecated
|
||||
|
||||
per-file-ignores =
|
||||
# More specific globs seem to overwrite the more generic ones so we have
|
||||
# to split them out by directory
|
||||
# So if you have a rule like:
|
||||
# tests/*: D102,D103
|
||||
# and a rule like:
|
||||
# tests/test_hey.py: D102
|
||||
# THEN, test_hey.py will NOT be excluding D103
|
||||
|
||||
# asserts are ok in tests
|
||||
tests/*:S101
|
||||
tests/*:S101,D102,D103
|
||||
|
||||
# prefer naming functions descriptively rather than forcing comments
|
||||
*:D103
|
||||
src/*:D102,D103
|
||||
|
||||
bin/keycloak_test_server.py:B950,D
|
||||
conftest.py:S105
|
||||
|
@ -24,11 +32,11 @@ per-file-ignores =
|
|||
# the exclude=./migrations option doesn't seem to work with pre-commit
|
||||
# migrations are autogenerated from "flask db migration" so ignore them
|
||||
migrations/*:D
|
||||
src/spiffworkflow_backend/config/testing.py:S105
|
||||
src/spiffworkflow_backend/config/unit_testing.py:S105
|
||||
src/spiffworkflow_backend/load_database_models.py:F401
|
||||
|
||||
# this file overwrites methods from the logging library so we can't change them
|
||||
# and ignore long comment line
|
||||
src/spiffworkflow_backend/services/logging_service.py:N802,B950
|
||||
|
||||
tests/spiffworkflow_backend/integration/test_process_api.py:S607,S101,D103,S605
|
||||
tests/spiffworkflow_backend/integration/test_process_api.py:S607,S101,S605,D102,D103,D101
|
||||
|
|
|
@ -142,12 +142,12 @@ jobs:
|
|||
host port: 3306
|
||||
container port: 3306
|
||||
mysql version: "8.0"
|
||||
mysql database: "spiffworkflow_backend_testing"
|
||||
mysql database: "spiffworkflow_backend_unit_testing"
|
||||
mysql root password: password
|
||||
if: matrix.database == 'mysql'
|
||||
|
||||
- name: Setup Postgres
|
||||
run: docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres
|
||||
run: docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres
|
||||
if: matrix.database == 'postgres'
|
||||
|
||||
- name: Run Nox
|
||||
|
|
52
Dockerfile
52
Dockerfile
|
@ -1,28 +1,44 @@
|
|||
FROM ghcr.io/sartography/python:3.11
|
||||
# Base image to share ENV vars that activate VENV.
|
||||
FROM ghcr.io/sartography/python:3.11 AS base
|
||||
|
||||
ENV VIRTUAL_ENV=/app/venv
|
||||
RUN python3 -m venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much.
|
||||
# vim is just for debugging
|
||||
FROM base AS deployment
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get clean -y \
|
||||
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client vim \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Setup image for installing Python dependencies.
|
||||
FROM base AS setup
|
||||
|
||||
RUN pip install poetry
|
||||
RUN useradd _gunicorn --no-create-home --user-group
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y -q \
|
||||
gcc libssl-dev \
|
||||
curl git-core libpq-dev \
|
||||
gunicorn3 default-mysql-client
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y -q gcc libssl-dev libpq-dev
|
||||
|
||||
WORKDIR /app
|
||||
# poetry install takes a long time and can be cached if dependencies don't change,
|
||||
# so that's why we tolerate running it twice.
|
||||
COPY pyproject.toml poetry.lock /app/
|
||||
RUN poetry install --without dev
|
||||
|
||||
RUN set -xe \
|
||||
&& apt-get remove -y gcc python3-dev libssl-dev \
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY . /app/
|
||||
|
||||
# run poetry install again AFTER copying the app into the image
|
||||
# otherwise it does not know what the main app module is
|
||||
COPY . /app
|
||||
RUN poetry install --without dev
|
||||
|
||||
CMD ./bin/boot_server_in_docker
|
||||
# Final image without setup dependencies.
|
||||
FROM deployment AS final
|
||||
|
||||
LABEL source="https://github.com/sartography/spiff-arena"
|
||||
LABEL description="Software development platform for building, running, and monitoring executable diagrams"
|
||||
|
||||
COPY --from=setup /app /app
|
||||
|
||||
CMD ["./bin/boot_server_in_docker"]
|
||||
|
|
|
@ -10,12 +10,12 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
# run migrations
|
||||
export FLASK_APP=/app/src/spiffworkflow_backend
|
||||
|
||||
if [[ "${WAIT_FOR_DB_TO_BE_READY:-}" == "true" ]]; then
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_WAIT_FOR_DB_TO_BE_READY:-}" == "true" ]]; then
|
||||
echo 'Waiting for db to be ready...'
|
||||
poetry run python ./bin/wait_for_db_to_be_ready.py
|
||||
fi
|
||||
|
||||
if [[ "${DOWNGRADE_DB:-}" == "true" ]]; then
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_DOWNGRADE_DB:-}" == "true" ]]; then
|
||||
echo 'Downgrading database...'
|
||||
poetry run flask db downgrade
|
||||
fi
|
||||
|
@ -25,6 +25,14 @@ if [[ "${SPIFFWORKFLOW_BACKEND_UPGRADE_DB:-}" == "true" ]]; then
|
|||
poetry run flask db upgrade
|
||||
fi
|
||||
|
||||
if [[ -z "${GUNICORN_LOG_LEVEL:-}" ]]; then
|
||||
GUNICORN_LOG_LEVEL=debug
|
||||
fi
|
||||
|
||||
if [[ -z "${GUNICORN_TIMEOUT_SECONDS:-}" ]]; then
|
||||
GUNICORN_TIMEOUT_SECONDS=90
|
||||
fi
|
||||
|
||||
port="${SPIFFWORKFLOW_BACKEND_PORT:-}"
|
||||
if [[ -z "$port" ]]; then
|
||||
port=7000
|
||||
|
@ -32,8 +40,8 @@ fi
|
|||
|
||||
additional_args=""
|
||||
|
||||
if [[ "${APPLICATION_ROOT:-}" != "/" ]]; then
|
||||
additional_args="${additional_args} -e SCRIPT_NAME=${APPLICATION_ROOT}"
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT:-}" != "/" ]]; then
|
||||
additional_args="${additional_args} -e SCRIPT_NAME=${SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT}"
|
||||
fi
|
||||
|
||||
# HACK: if loading fixtures for acceptance tests when we do not need multiple workers
|
||||
|
@ -47,7 +55,25 @@ if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
|
|||
SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
|
||||
fi
|
||||
|
||||
export IS_GUNICORN="true"
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH=$(mktemp /tmp/ssh_private_key.XXXXXX)
|
||||
fi
|
||||
chmod 600 "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
|
||||
echo "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY}" >"${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
|
||||
fi
|
||||
|
||||
# Assure that the the Process Models Directory is initialized as a git repo
|
||||
git init "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}"
|
||||
git config --global --add safe.directory "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}"
|
||||
|
||||
export IS_GUNICORN="true"
|
||||
# THIS MUST BE THE LAST COMMAND!
|
||||
exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$port" --workers="$workers" --limit-request-line 8192 --timeout 90 --capture-output --access-logfile '-' --log-level debug wsgi:app
|
||||
exec poetry run gunicorn ${additional_args} \
|
||||
--bind "0.0.0.0:$port" \
|
||||
--workers="$workers" \
|
||||
--limit-request-line 8192 \
|
||||
--timeout "$GUNICORN_TIMEOUT_SECONDS" \
|
||||
--capture-output \
|
||||
--access-logfile '-' \
|
||||
--log-level "$GUNICORN_LOG_LEVEL" wsgi:app
|
||||
|
|
|
@ -7,8 +7,8 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
|
||||
|
|
|
@ -31,16 +31,16 @@ if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
|
|||
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_FRONTEND_URL:-}" ]]; then
|
||||
export SPIFFWORKFLOW_FRONTEND_URL='http://167.172.242.138:7001'
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND='http://167.172.242.138:7001'
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_URL:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_URL='http://167.172.242.138:7000'
|
||||
fi
|
||||
|
||||
if [[ -z "${OPEN_ID_SERVER_URL:-}" ]]; then
|
||||
export OPEN_ID_SERVER_URL='http://167.172.242.138:7002'
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL='http://167.172.242.138:7002'
|
||||
fi
|
||||
|
||||
git pull
|
||||
|
|
|
@ -7,19 +7,19 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
pushd "$BPMN_SPEC_ABSOLUTE_DIR" >/dev/null 2>&1
|
||||
pushd "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" >/dev/null 2>&1
|
||||
if [[ "$(git rev-parse --abbrev-ref HEAD)" == "main" ]]; then
|
||||
>&2 echo "ERROR: please do not use the main branch of sample-process-models. use dev"
|
||||
exit 1
|
||||
|
@ -27,4 +27,4 @@ if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
|||
popd >/dev/null 2>&1
|
||||
fi
|
||||
|
||||
realpath "$BPMN_SPEC_ABSOLUTE_DIR"
|
||||
realpath "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
|
|
|
@ -8,7 +8,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
|||
|
||||
def main(process_instance_id: str):
|
||||
"""Main."""
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "local_development"
|
||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR") is None:
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = "hey"
|
||||
flask_env_key = "FLASK_SESSION_SECRET_KEY"
|
||||
os.environ[flask_env_key] = "whatevs"
|
||||
app = create_app()
|
||||
|
|
|
@ -20,28 +20,35 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
# ./bin/get_token repeat_form_user_1 repeat_form_user_1 # actually has permissions to the resource in this script
|
||||
# ./bin/get_token ciadmin1 ciadmin1 '%2Fprocess-models'
|
||||
|
||||
# KEYCLOAK_BASE_URL=http://localhost:7002
|
||||
KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org
|
||||
BACKEND_BASE_URL=http://localhost:7000
|
||||
# BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
|
||||
REALM_NAME=spiffworkflow
|
||||
if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then
|
||||
# KEYCLOAK_BASE_URL=http://localhost:7002
|
||||
KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org
|
||||
fi
|
||||
if [[ -z "${BACKEND_BASE_URL:-}" ]]; then
|
||||
# BACKEND_BASE_URL=http://localhost:7000
|
||||
BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
|
||||
fi
|
||||
if [[ -z "${BACKEND_CLIENT_ID:-}" ]]; then
|
||||
export BACKEND_CLIENT_ID=spiffworkflow-backend
|
||||
fi
|
||||
if [[ -z "${BACKEND_CLIENT_SECRET:-}" ]]; then
|
||||
export BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105
|
||||
fi
|
||||
USERNAME=${1-fin}
|
||||
PASSWORD=${2-fin}
|
||||
REALM_NAME=${3-spiffworkflow}
|
||||
|
||||
FRONTEND_CLIENT_ID=spiffworkflow-frontend
|
||||
BACKEND_CLIENT_ID=spiffworkflow-backend
|
||||
BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105
|
||||
SECURE=false
|
||||
|
||||
BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64)
|
||||
KEYCLOAK_URL=$KEYCLOAK_BASE_URL/realms/$REALM_NAME/protocol/openid-connect/token
|
||||
|
||||
echo "Using Keycloak: $KEYCLOAK_URL"
|
||||
echo "realm: $REALM_NAME"
|
||||
echo "client-id: $FRONTEND_CLIENT_ID"
|
||||
echo "username: $USERNAME"
|
||||
echo "password: $PASSWORD"
|
||||
echo "secure: $SECURE"
|
||||
>&2 echo "Using Keycloak: $KEYCLOAK_URL"
|
||||
>&2 echo "realm: $REALM_NAME"
|
||||
>&2 echo "client-id: $BACKEND_CLIENT_ID"
|
||||
>&2 echo "username: $USERNAME"
|
||||
>&2 echo "password: $PASSWORD"
|
||||
>&2 echo "secure: $SECURE"
|
||||
|
||||
|
||||
if [[ $SECURE = 'y' ]]; then
|
||||
|
@ -61,7 +68,9 @@ result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
|
|||
-d "client_id=$BACKEND_CLIENT_ID" \
|
||||
)
|
||||
backend_token=$(jq -r '.access_token' <<< "$result")
|
||||
curl --fail -v "${BACKEND_BASE_URL}/v1.0/process-groups?per_page=1" -H "Authorization: Bearer $backend_token"
|
||||
echo "$backend_token"
|
||||
# curl --fail -v "${BACKEND_BASE_URL}/v1.0/process-groups?per_page=1" -H "Authorization: Bearer $backend_token"
|
||||
# curl -v -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${backend_token}" -H "Authorization: Bearer $backend_token"
|
||||
|
||||
|
||||
### Get with frontend and exchange with backend - not configured to work in keycloak atm
|
||||
|
|
|
@ -12,12 +12,9 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
bpmn_models_absolute_dir="$1"
|
||||
git_commit_message="$2"
|
||||
git_branch="$3"
|
||||
git_commit_username="$4"
|
||||
git_commit_email="$5"
|
||||
git_commit_password="$6"
|
||||
|
||||
if [[ -z "${6:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]"
|
||||
if [[ -z "${3:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "${0}") [bpmn_models_absolute_dir] [git_commit_message] [git_branch]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
@ -27,26 +24,27 @@ function failed_to_get_lock() {
|
|||
}
|
||||
|
||||
function run() {
|
||||
cd "$bpmn_models_absolute_dir"
|
||||
cd "${bpmn_models_absolute_dir}"
|
||||
git add .
|
||||
|
||||
# https://unix.stackexchange.com/a/155077/456630
|
||||
if [ -z "$(git status --porcelain)" ]; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
PAT="${git_commit_username}:${git_commit_password}"
|
||||
AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n')
|
||||
|
||||
git config --local user.name "$git_commit_username"
|
||||
git config --local user.email "$git_commit_email"
|
||||
git config --local http.extraHeader "Authorization: Basic $AUTH"
|
||||
git commit -m "$git_commit_message"
|
||||
git push --set-upstream origin "$git_branch"
|
||||
git config --unset --local http.extraHeader
|
||||
return
|
||||
fi
|
||||
|
||||
# FIXME: the environment variables may not be working with the root user which we are using in the docker container.
|
||||
# we see some evidence with this issue https://stackoverflow.com/questions/68975943/git-config-environment-variables
|
||||
# and it didn't seem to work for us either so set them like this for now.
|
||||
# One day we should probably not use the root user in the docker container.
|
||||
git config --local user.email "$GIT_COMMITTER_EMAIL"
|
||||
git config --local user.name "$GIT_COMMITTER_NAME"
|
||||
|
||||
git commit -m "${git_commit_message}"
|
||||
git push --set-upstream origin "${git_branch}"
|
||||
}
|
||||
|
||||
exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock
|
||||
flock --timeout 60 "$lock_fd" || failed_to_get_lock
|
||||
exec {lock_fd}>/var/lock/spiff-workflow-git-lock || failed_to_get_lock
|
||||
flock --timeout 60 "${lock_fd}" || failed_to_get_lock
|
||||
run
|
||||
flock -u "$lock_fd"
|
||||
flock -u "${lock_fd}"
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
"""Grabs tickets from csv and makes process instances."""
|
||||
import csv
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend import get_hacked_up_app_for_script
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
|
|
|
@ -5,7 +5,7 @@ def main():
|
|||
"""Use main to avoid global namespace."""
|
||||
import csv
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then
|
||||
# export KEYCLOAK_BASE_URL=http://localhost:7002
|
||||
export KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org
|
||||
fi
|
||||
if [[ -z "${BACKEND_BASE_URL:-}" ]]; then
|
||||
# export BACKEND_BASE_URL=http://localhost:7000
|
||||
export BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
|
||||
fi
|
||||
|
||||
user_list="${1}"
|
||||
if [[ -z "${1:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [user_list]"
|
||||
exit 1
|
||||
fi
|
||||
REALM_NAME=${2-spiffworkflow}
|
||||
|
||||
while read -r input_line; do
|
||||
if ! grep -qE '(^#|email)' <<<"$input_line" ; then
|
||||
username=$(awk -F '@' '{print $1}' <<<"$input_line")
|
||||
access_token=$("${script_dir}/get_token" "$username" "$username" "$REALM_NAME")
|
||||
if [[ -z "$access_token" || "$access_token" == "null" ]]; then
|
||||
>&2 echo "ERROR: failed to get access token for '$username'"
|
||||
else
|
||||
|
||||
echo "access_token: ${access_token}"
|
||||
curl -v -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${access_token}" -H "Authorization: Bearer $access_token"
|
||||
fi
|
||||
fi
|
||||
done <"$user_list"
|
|
@ -9,18 +9,18 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
export FLASK_SESSION_SECRET_KEY="this_is_recreate_db_secret_key"
|
||||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $BPMN_SPEC_ABSOLUTE_DIR"
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../../sample-process-models"
|
||||
if [[ ! -d "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR="${script_dir}/../../sample-process-models"
|
||||
if [[ ! -d "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" ]]; then
|
||||
>&2 echo "ERROR: Could not find a location for the sample processes. Last tried: $SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
|
||||
fi
|
||||
|
||||
tasks=""
|
||||
|
@ -35,33 +35,35 @@ if [[ "${1:-}" == "clean" ]]; then
|
|||
fi
|
||||
|
||||
rm -f ./src/instance/*.sqlite3
|
||||
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_development"
|
||||
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_testing"
|
||||
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
|
||||
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
|
||||
|
||||
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
|
||||
# start postgres in background with one db
|
||||
if [[ "${SPIFF_DATABASE_TYPE:-}" == "postgres" ]]; then
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
|
||||
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "select 1"; then
|
||||
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres
|
||||
sleep 4 # classy
|
||||
fi
|
||||
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_development -c "select 1"; then
|
||||
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_local_development -c "select 1"; then
|
||||
# create other db. spiffworkflow_backend_testing came with the docker run.
|
||||
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_development;"
|
||||
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_local_development;"
|
||||
fi
|
||||
fi
|
||||
elif [[ "${1:-}" == "migrate" ]]; then
|
||||
tasks="$tasks migrate"
|
||||
fi
|
||||
tasks="$tasks upgrade"
|
||||
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_development"
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_testing"
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing"
|
||||
|
||||
for task in $tasks; do
|
||||
SPIFFWORKFLOW_BACKEND_ENV=development FLASK_APP=src/spiffworkflow_backend poetry run flask db "$task"
|
||||
SPIFFWORKFLOW_BACKEND_ENV=local_development FLASK_APP=src/spiffworkflow_backend poetry run flask db "$task"
|
||||
done
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
|
||||
SPIFFWORKFLOW_BACKEND_ENV=unit_testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(local_development|unit_testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
|
||||
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
fi
|
||||
|
|
|
@ -14,14 +14,14 @@ if [[ "$arg" == "acceptance" ]]; then
|
|||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
|
||||
export SPIFFWORKFLOW_BACKEND_ENV=development
|
||||
export SPIFFWORKFLOW_BACKEND_ENV=local_development
|
||||
fi
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export BPMN_SPEC_ABSOLUTE_DIR
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=$(./bin/find_sample_process_models)
|
||||
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
|
||||
|
||||
export FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||
export APPLICATION_ROOT="/"
|
||||
export SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT="/"
|
||||
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
|
||||
./bin/boot_server_in_docker
|
||||
|
@ -29,13 +29,13 @@ else
|
|||
export FLASK_DEBUG=1
|
||||
|
||||
if [[ "${SPIFFWORKFLOW_BACKEND_RUN_DATA_SETUP:-}" != "false" ]]; then
|
||||
RUN_BACKGROUND_SCHEDULER=false SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER=false SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS=false poetry run python bin/save_all_bpmn.py
|
||||
fi
|
||||
|
||||
if [[ -z "${RUN_BACKGROUND_SCHEDULER:-}" ]]; then
|
||||
RUN_BACKGROUND_SCHEDULER=true
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER:-}" ]]; then
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER=true
|
||||
fi
|
||||
|
||||
# this line blocks
|
||||
RUN_BACKGROUND_SCHEDULER="${RUN_BACKGROUND_SCHEDULER}" FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER="${SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER}" FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
|
||||
fi
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
"""Get the bpmn process json for a given process instance id and store it in /tmp."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
from spiffworkflow_backend import create_app
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
|
||||
|
||||
def main(env_file: str):
|
||||
"""Main."""
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "local_development"
|
||||
if os.environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR") is None:
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = "hey"
|
||||
flask_env_key = "FLASK_SESSION_SECRET_KEY"
|
||||
os.environ[flask_env_key] = "whatevs"
|
||||
app = create_app()
|
||||
with app.app_context():
|
||||
contents = None
|
||||
with open(env_file, 'r') as f:
|
||||
contents = f.readlines()
|
||||
for line in contents:
|
||||
key, value_raw = line.split('=')
|
||||
value = value_raw.replace('"', '').rstrip()
|
||||
SecretService().add_secret(key, value, UserModel.query.first().id)
|
||||
|
||||
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
raise Exception("env file must be specified")
|
||||
|
||||
main(sys.argv[1])
|
|
@ -7,14 +7,12 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
max_attempts="${1:-}"
|
||||
if [[ -z "$max_attempts" ]]; then
|
||||
max_attempts=100
|
||||
fi
|
||||
max_attempts="${1:-100}"
|
||||
port="${2:-7000}"
|
||||
|
||||
echo "waiting for backend to come up..."
|
||||
attempts=0
|
||||
while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7000/v1.0/status)" != "200" ]]; do
|
||||
while [[ "$(curl -s -o /dev/null -w '%{http_code}' "http://localhost:${port}/v1.0/status")" != "200" ]]; do
|
||||
if [[ "$attempts" -gt "$max_attempts" ]]; then
|
||||
>&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem"
|
||||
exit 1
|
||||
|
@ -22,3 +20,4 @@ while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7000/v1.0/st
|
|||
attempts=$(( attempts + 1 ))
|
||||
sleep 1
|
||||
done
|
||||
echo "backend up"
|
||||
|
|
|
@ -5,10 +5,10 @@ import shutil
|
|||
import pytest
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
@ -37,7 +37,7 @@ from spiffworkflow_backend import create_app # noqa: E402
|
|||
@pytest.fixture(scope="session")
|
||||
def app() -> Flask:
|
||||
"""App."""
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "testing"
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "unit_testing"
|
||||
os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key"
|
||||
app = create_app()
|
||||
|
||||
|
|
|
@ -50,25 +50,25 @@ services:
|
|||
build:
|
||||
context: .
|
||||
environment:
|
||||
- APPLICATION_ROOT=/
|
||||
- SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development}
|
||||
- FLASK_DEBUG=0
|
||||
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
|
||||
- OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002/realms/spiffworkflow}
|
||||
- SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001}
|
||||
- SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000}
|
||||
- SPIFFWORKFLOW_BACKEND_PORT=7000
|
||||
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
|
||||
- SPIFFWORKFLOW_BACKEND_APPLICATION_ROOT=/
|
||||
- SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
|
||||
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||
- SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-local_development}
|
||||
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false}
|
||||
- SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL=${SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL:-http://localhost:7002/realms/spiffworkflow}
|
||||
- SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=${SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME:-acceptance_tests.yml}
|
||||
- RUN_BACKGROUND_SCHEDULER=true
|
||||
- SPIFFWORKFLOW_BACKEND_PORT=7000
|
||||
- SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER=true
|
||||
- SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND=${SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND:-http://localhost:7001}
|
||||
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
|
||||
- SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000}
|
||||
ports:
|
||||
- "7000:7000"
|
||||
network_mode: host
|
||||
volumes:
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ./log:/app/log
|
||||
healthcheck:
|
||||
test: curl localhost:7000/v1.0/status --fail
|
||||
|
@ -82,7 +82,7 @@ services:
|
|||
profiles:
|
||||
- debug
|
||||
volumes:
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ./:/app
|
||||
command: /app/bin/boot_in_docker_debug_mode
|
||||
|
||||
|
|
|
@ -7,14 +7,24 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
# you can get a list of users from the keycloak realm file like:
|
||||
# grep '"email" :' keycloak/realm_exports/spiffworkflow-realm.json | awk -F : '{print $2}' | sed -E 's/ "//g' | sed -E 's/",//g' > s
|
||||
|
||||
# we keep some of these in keycloak/test_user_lists
|
||||
# spiffworkflow-realm.json is a mashup of the status and sartography user lists.
|
||||
user_file_with_one_email_per_line="${1:-}"
|
||||
|
||||
keycloak_realm="${2:-spiffworkflow}"
|
||||
if [[ -z "${1:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
KEYCLOAK_BASE_URL=http://localhost:7002
|
||||
REALM_NAME=master
|
||||
if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then
|
||||
KEYCLOAK_BASE_URL=http://localhost:7002
|
||||
fi
|
||||
|
||||
REALM_NAME="$keycloak_realm"
|
||||
ADMIN_USERNAME="admin"
|
||||
ADMIN_PASSWORD="admin"
|
||||
SECURE=false
|
||||
|
@ -37,14 +47,66 @@ result=$(curl --fail -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
|
|||
)
|
||||
backend_token=$(jq -r '.access_token' <<< "$result")
|
||||
|
||||
while read -r user_email; do
|
||||
if [[ -n "$user_email" ]]; then
|
||||
username=$(awk -F '@' '{print $1}' <<<"$user_email")
|
||||
credentials='{"type":"password","value":"'"${username}"'","temporary":false}'
|
||||
function add_user() {
|
||||
local user_email=$1
|
||||
local username=$2
|
||||
local user_attribute_one=$3
|
||||
|
||||
curl --fail --location --request POST 'http://localhost:7002/admin/realms/spiffworkflow/users' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token" \
|
||||
--data-raw '{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']}'
|
||||
local credentials='{"type":"password","value":"'"${username}"'","temporary":false}'
|
||||
|
||||
local data='{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']'
|
||||
if [[ -n "$user_attribute_one" ]]; then
|
||||
data=''${data}', "attributes": {"'${custom_attribute_one}'": [ "'$user_attribute_one'" ]}'
|
||||
fi
|
||||
data="${data}}"
|
||||
|
||||
local http_code
|
||||
http_code=$(curl --silent -o /dev/null -w '%{http_code}' --location --request POST "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token" \
|
||||
--data-raw "$data")
|
||||
echo "$http_code"
|
||||
}
|
||||
|
||||
first_line_processed="false"
|
||||
custom_attribute_one=''
|
||||
|
||||
while read -r input_line; do
|
||||
if ! grep -qE '^#' <<<"$input_line" ; then
|
||||
if [[ "$first_line_processed" == "false" ]]; then
|
||||
email_header=$(awk -F ',' '{print $1}' <<<"$input_line")
|
||||
if [[ "$email_header" != "email" ]]; then
|
||||
>&2 echo "ERROR: the first column in the first row must be email."
|
||||
exit 1
|
||||
fi
|
||||
custom_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
|
||||
first_line_processed="true"
|
||||
elif [[ -n "$input_line" ]]; then
|
||||
user_email=$(awk -F ',' '{print $1}' <<<"$input_line")
|
||||
username=$(awk -F '@' '{print $1}' <<<"$user_email")
|
||||
user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
|
||||
http_code=$(add_user "$user_email" "$username" "$user_attribute_one")
|
||||
|
||||
if [[ "$http_code" == "409" ]]; then
|
||||
user_info=$(curl --fail --silent --location --request GET "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users?username=${username}&exact=true" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token")
|
||||
|
||||
user_id=$(jq -r '.[0] | .id' <<<"$user_info")
|
||||
if [[ -z "$user_id" ]]; then
|
||||
>&2 echo "ERROR: Could not find user_id for user: ${user_email}"
|
||||
exit 1
|
||||
fi
|
||||
curl --fail --location --silent --request DELETE "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users/${user_id}" \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "Authorization: Bearer $backend_token"
|
||||
|
||||
http_code=$(add_user "$user_email" "$username" "$user_attribute_one")
|
||||
fi
|
||||
if [[ "$http_code" != "201" ]]; then
|
||||
>&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done <"$user_file_with_one_email_per_line"
|
||||
|
|
|
@ -21,6 +21,9 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa
|
|||
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
|
||||
|
||||
for realm in $realms ; do
|
||||
if ! grep -Eq '\-realm$' <<< "$realm"; then
|
||||
realm="${realm}-realm"
|
||||
fi
|
||||
cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/"
|
||||
done
|
||||
|
||||
|
|
|
@ -15,6 +15,11 @@ setup_traps
|
|||
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
realm_name="${1:-}"
|
||||
if [[ -z "$realm_name" ]]; then
|
||||
realm_name="spiffworkflow"
|
||||
fi
|
||||
|
||||
if ! docker network inspect spiffworkflow > /dev/null 2>&1; then
|
||||
docker network create spiffworkflow
|
||||
fi
|
||||
|
@ -45,15 +50,15 @@ docker run \
|
|||
-Dkeycloak.profile.feature.admin_fine_grained_authz=enabled
|
||||
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
cp "${script_dir}/../realm_exports/spiffworkflow-realm.json" /tmp/spiffworkflow-realm.json
|
||||
cp "${script_dir}/../realm_exports/${realm_name}-realm.json" /tmp/${realm_name}-realm.json
|
||||
spiff_subdomain="unused-for-local-dev"
|
||||
perl -pi -e "s/{{SPIFF_SUBDOMAIN}}/${spiff_subdomain}/g" /tmp/spiffworkflow-realm.json
|
||||
docker cp /tmp/spiffworkflow-realm.json keycloak:/tmp
|
||||
perl -pi -e "s/{{SPIFF_SUBDOMAIN}}/${spiff_subdomain}/g" /tmp/${realm_name}-realm.json
|
||||
docker cp /tmp/${realm_name}-realm.json keycloak:/tmp
|
||||
|
||||
sleep 20
|
||||
remove_traps
|
||||
set +e
|
||||
import_output=$(docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/spiffworkflow-realm.json 2>&1)
|
||||
import_output=$(docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/${realm_name}-realm.json 2>&1)
|
||||
setup_traps
|
||||
set -e
|
||||
if ! grep -qE "Import finished successfully" <<<"$import_output"; then
|
||||
|
@ -66,7 +71,7 @@ echo 'imported realms'
|
|||
if [ "${TURN_OFF_SSL:-}" == "true" ]; then
|
||||
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh config credentials --server http://localhost:8080 --realm master --user admin --password admin
|
||||
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/master -s sslRequired=NONE
|
||||
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/spiffworkflow -s sslRequired=NONE
|
||||
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/${realm_name} -s sslRequired=NONE
|
||||
echo 'turned off SSL requirement'
|
||||
fi
|
||||
|
||||
|
|
|
@ -7,14 +7,12 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
max_attempts="${1:-}"
|
||||
if [[ -z "$max_attempts" ]]; then
|
||||
max_attempts=100
|
||||
fi
|
||||
max_attempts="${1:-100}"
|
||||
port="${2:-7002}"
|
||||
|
||||
echo "waiting for backend to come up..."
|
||||
echo "waiting for keycloak to come up..."
|
||||
attempts=0
|
||||
while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7002/realms/master/.well-known/openid-configuration)" != "200" ]]; do
|
||||
while [[ "$(curl -s -o /dev/null -w '%{http_code}' "http://localhost:${port}/realms/master/.well-known/openid-configuration")" != "200" ]]; do
|
||||
if [[ "$attempts" -gt "$max_attempts" ]]; then
|
||||
>&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem"
|
||||
exit 1
|
||||
|
@ -22,3 +20,4 @@ while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7002/realms/
|
|||
attempts=$(( attempts + 1 ))
|
||||
sleep 1
|
||||
done
|
||||
echo "keycloak up"
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,15 @@
|
|||
email,spiffworkflow-employeeid
|
||||
admin@spiffworkflow.org
|
||||
alex@sartography.com,111
|
||||
dan@sartography.com,115
|
||||
daniel@sartography.com
|
||||
elizabeth@sartography.com
|
||||
j@sartography.com
|
||||
jason@sartography.com
|
||||
jon@sartography.com
|
||||
kb@sartography.com
|
||||
kevin@sartography.com
|
||||
madhurya@sartography.com,160
|
||||
madhurya@ymail.com,161
|
||||
mike@sartography.com
|
||||
natalia@sartography.com
|
|
@ -1,9 +1,53 @@
|
|||
finance.lead@status.im
|
||||
legal.lead@status.im
|
||||
program.lead@status.im
|
||||
services.lead@status.im
|
||||
finance.sme@status.im
|
||||
infra.sme@status.im
|
||||
legal.sme@status.im
|
||||
security.sme@status.im
|
||||
|
||||
email,spiffworkflow-employeeid
|
||||
# admin@spiffworkflow.org
|
||||
amir@status.im
|
||||
app.program.lead@status.im,121
|
||||
core1.contributor@status.im,155
|
||||
core2.contributor@status.im,156
|
||||
core3.contributor@status.im,157
|
||||
core4.contributor@status.im,158
|
||||
core5.contributor@status.im,159
|
||||
core@status.im,113
|
||||
dao.project.lead@status.im
|
||||
desktop.program.lead@status.im
|
||||
desktop.project.lead@status.im
|
||||
fin@status.im,118
|
||||
finance.lead@status.im,128
|
||||
finance_user1@status.im
|
||||
harmeet@status.im,109
|
||||
infra.project-lead@status.im,130
|
||||
infra.sme@status.im,119
|
||||
infra1.sme@status.im,131
|
||||
infra2.sme@status.im,132
|
||||
infra3.sme@status.im,167
|
||||
jakub@status.im
|
||||
jarrad@status.im
|
||||
lead@status.im,114
|
||||
legal.project-lead@status.im,133
|
||||
legal.sme@status.im,125
|
||||
legal1.sme@status.im,134
|
||||
legal2.sme@status.im,165
|
||||
legal3.sme@status.im,166
|
||||
manuchehr@status.im,110
|
||||
peopleops.partner.sme@status.im,148
|
||||
peopleops.partner1.sme@status.im,149
|
||||
peopleops.partner2.sme@status.im,173
|
||||
peopleops.partner3.sme@status.im,174
|
||||
peopleops.partner@status.im,150
|
||||
peopleops.project-lead@status.im,147
|
||||
peopleops.talent.sme@status.im,143
|
||||
peopleops.talent1.sme@status.im,142
|
||||
peopleops.talent@status.im,141
|
||||
ppg.ba.project-lead@status.im,137
|
||||
ppg.ba.sme@status.im,138
|
||||
ppg.ba1.sme@status.im,170
|
||||
ppg.ba2.sme@status.im,171
|
||||
ppg.ba3.sme@status.im,172
|
||||
ppg.ba@status.im,127
|
||||
sasha@status.im,112
|
||||
security.project-lead@status.im,151
|
||||
security.sme@status.im,123
|
||||
security1.sme@status.im,135
|
||||
security2.sme@status.im,168
|
||||
security3.sme@status.im,169
|
||||
services.lead@status.im,122
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 907bcf0c3d75
|
||||
Revision ID: 2ec4222f0012
|
||||
Revises:
|
||||
Create Date: 2022-12-28 13:52:13.030028
|
||||
Create Date: 2023-01-24 10:31:26.693063
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '907bcf0c3d75'
|
||||
revision = '2ec4222f0012'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -129,6 +129,8 @@ def upgrade():
|
|||
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
|
||||
sa.Column('spiff_step', sa.Integer(), nullable=True),
|
||||
sa.Column('locked_by', sa.String(length=80), nullable=True),
|
||||
sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
|
@ -204,8 +206,7 @@ def upgrade():
|
|||
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='human_task_unique')
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False)
|
||||
op.create_table('message_correlation',
|
||||
|
@ -269,7 +270,8 @@ def upgrade():
|
|||
sa.Column('task_json', sa.JSON(), nullable=False),
|
||||
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step')
|
||||
)
|
||||
op.create_table('human_task_user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
@ -0,0 +1,34 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 63fc8d693b9f
|
||||
Revises: e05ca5cdc312
|
||||
Create Date: 2023-02-09 11:54:34.935801
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '63fc8d693b9f'
|
||||
down_revision = 'e05ca5cdc312'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('spiff_step_details', sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False))
|
||||
op.add_column('spiff_step_details', sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.drop_column('spiff_step_details', 'engine_step_end_in_seconds')
|
||||
op.drop_column('spiff_step_details', 'engine_step_start_in_seconds')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('spiff_step_details', sa.Column('engine_step_start_in_seconds', mysql.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.add_column('spiff_step_details', sa.Column('engine_step_end_in_seconds', mysql.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.drop_column('spiff_step_details', 'end_in_seconds')
|
||||
op.drop_column('spiff_step_details', 'start_in_seconds')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,32 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: ca9b79dde5cc
|
||||
Revises: 2ec4222f0012
|
||||
Create Date: 2023-02-03 21:06:56.396816
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ca9b79dde5cc'
|
||||
down_revision = '2ec4222f0012'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('tenant_specific_field_1', sa.String(length=255), nullable=True))
|
||||
op.add_column('user', sa.Column('tenant_specific_field_2', sa.String(length=255), nullable=True))
|
||||
op.add_column('user', sa.Column('tenant_specific_field_3', sa.String(length=255), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('user', 'tenant_specific_field_3')
|
||||
op.drop_column('user', 'tenant_specific_field_2')
|
||||
op.drop_column('user', 'tenant_specific_field_1')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,38 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: e05ca5cdc312
|
||||
Revises: ca9b79dde5cc
|
||||
Create Date: 2023-02-08 12:21:41.722774
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e05ca5cdc312'
|
||||
down_revision = 'ca9b79dde5cc'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('spiff_step_details', sa.Column('task_state', sa.String(length=50), nullable=False))
|
||||
op.add_column('spiff_step_details', sa.Column('task_id', sa.String(length=50), nullable=False))
|
||||
op.add_column('spiff_step_details', sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False))
|
||||
op.add_column('spiff_step_details', sa.Column('engine_step_start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.add_column('spiff_step_details', sa.Column('engine_step_end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True))
|
||||
op.drop_column('spiff_step_details', 'timestamp')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('spiff_step_details', sa.Column('timestamp', mysql.DECIMAL(precision=17, scale=6), nullable=False))
|
||||
op.drop_column('spiff_step_details', 'engine_step_end_in_seconds')
|
||||
op.drop_column('spiff_step_details', 'engine_step_start_in_seconds')
|
||||
op.drop_column('spiff_step_details', 'bpmn_task_identifier')
|
||||
op.drop_column('spiff_step_details', 'task_id')
|
||||
op.drop_column('spiff_step_details', 'task_state')
|
||||
# ### end Alembic commands ###
|
|
@ -42,7 +42,7 @@ def setup_database(session: Session) -> None:
|
|||
flask_env_key = "FLASK_SESSION_SECRET_KEY"
|
||||
session.env[flask_env_key] = "super_secret_key"
|
||||
session.env["FLASK_APP"] = "src/spiffworkflow_backend"
|
||||
session.env["SPIFFWORKFLOW_BACKEND_ENV"] = "testing"
|
||||
session.env["SPIFFWORKFLOW_BACKEND_ENV"] = "unit_testing"
|
||||
session.run("flask", "db", "upgrade")
|
||||
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ zookeeper = ["kazoo"]
|
|||
|
||||
[[package]]
|
||||
name = "astroid"
|
||||
version = "2.12.12"
|
||||
version = "2.13.3"
|
||||
description = "An abstract syntax tree for Python with inference support."
|
||||
category = "main"
|
||||
optional = false
|
||||
|
@ -80,7 +80,7 @@ python-versions = ">=3.7.2"
|
|||
|
||||
[package.dependencies]
|
||||
lazy-object-proxy = ">=1.4.0"
|
||||
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
|
||||
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
|
||||
wrapt = [
|
||||
{version = ">=1.11,<2", markers = "python_version < \"3.11\""},
|
||||
{version = ">=1.14,<2", markers = "python_version >= \"3.11\""},
|
||||
|
@ -113,7 +113,7 @@ pytz = ">=2015.7"
|
|||
|
||||
[[package]]
|
||||
name = "bandit"
|
||||
version = "1.7.2"
|
||||
version = "1.7.4"
|
||||
description = "Security oriented static analyser for python code."
|
||||
category = "dev"
|
||||
optional = false
|
||||
|
@ -430,6 +430,17 @@ calendars = ["convertdate", "convertdate", "hijri-converter"]
|
|||
fasttext = ["fasttext"]
|
||||
langdetect = ["langdetect"]
|
||||
|
||||
[[package]]
|
||||
name = "dill"
|
||||
version = "0.3.6"
|
||||
description = "serialize all of python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
graph = ["objgraph (>=1.7.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.6"
|
||||
|
@ -487,30 +498,28 @@ testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pyt
|
|||
|
||||
[[package]]
|
||||
name = "flake8"
|
||||
version = "4.0.1"
|
||||
version = "6.0.0"
|
||||
description = "the modular source code checker: pep8 pyflakes and co"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1"
|
||||
|
||||
[package.dependencies]
|
||||
mccabe = ">=0.7.0,<0.8.0"
|
||||
pycodestyle = ">=2.10.0,<2.11.0"
|
||||
pyflakes = ">=3.0.0,<3.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bandit"
|
||||
version = "4.1.1"
|
||||
description = "Automated security testing with bandit and flake8."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
mccabe = ">=0.6.0,<0.7.0"
|
||||
pycodestyle = ">=2.8.0,<2.9.0"
|
||||
pyflakes = ">=2.4.0,<2.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bandit"
|
||||
version = "2.1.2"
|
||||
description = "Automated security testing with bandit and flake8."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
bandit = "*"
|
||||
flake8 = "*"
|
||||
flake8-polyfill = "*"
|
||||
pycodestyle = "*"
|
||||
bandit = ">=1.7.3"
|
||||
flake8 = ">=5.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-bugbear"
|
||||
|
@ -539,17 +548,6 @@ python-versions = "*"
|
|||
flake8 = ">=3"
|
||||
pydocstyle = ">=2.1"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-polyfill"
|
||||
version = "1.0.2"
|
||||
description = "Polyfill package for Flake8 plugins"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "flake8-rst-docstrings"
|
||||
version = "0.2.7"
|
||||
|
@ -867,6 +865,20 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "isort"
|
||||
version = "5.11.4"
|
||||
description = "A Python utility / library to sort Python imports."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
|
||||
[package.extras]
|
||||
colors = ["colorama (>=0.4.3,<0.5.0)"]
|
||||
pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
|
||||
plugins = ["setuptools"]
|
||||
requirements-deprecated-finder = ["pip-api", "pipreqs"]
|
||||
|
||||
[[package]]
|
||||
name = "itsdangerous"
|
||||
version = "2.1.2"
|
||||
|
@ -1040,11 +1052,11 @@ tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"]
|
|||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.6.1"
|
||||
version = "0.7.0"
|
||||
description = "McCabe checker, plugin for flake8"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
|
@ -1149,7 +1161,7 @@ flake8 = ">=3.9.1"
|
|||
name = "platformdirs"
|
||||
version = "2.5.2"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
|
@ -1226,11 +1238,11 @@ python-versions = ">=3.6"
|
|||
|
||||
[[package]]
|
||||
name = "pycodestyle"
|
||||
version = "2.8.0"
|
||||
version = "2.10.0"
|
||||
description = "Python style guide checker"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "pydocstyle"
|
||||
|
@ -1248,11 +1260,11 @@ toml = ["toml"]
|
|||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "2.4.0"
|
||||
version = "3.0.1"
|
||||
description = "passive checker of Python programs"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "Pygments"
|
||||
|
@ -1279,6 +1291,32 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte
|
|||
docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"]
|
||||
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pylint"
|
||||
version = "2.15.10"
|
||||
description = "python code static checker"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.2"
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=2.12.13,<=2.14.0-dev0"
|
||||
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
|
||||
dill = [
|
||||
{version = ">=0.2", markers = "python_version < \"3.11\""},
|
||||
{version = ">=0.3.6", markers = "python_version >= \"3.11\""},
|
||||
]
|
||||
isort = ">=4.2.5,<6"
|
||||
mccabe = ">=0.6,<0.8"
|
||||
platformdirs = ">=2.2.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
tomlkit = ">=0.10.1"
|
||||
typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
spelling = ["pyenchant (>=3.2,<4.0)"]
|
||||
testutils = ["gitpython (>3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
version = "3.0.9"
|
||||
|
@ -1786,8 +1824,8 @@ lxml = "*"
|
|||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "be26100bcbef8026e26312c665dae42faf476485"
|
||||
resolved_reference = "be26100bcbef8026e26312c665dae42faf476485"
|
||||
reference = "main"
|
||||
resolved_reference = "b439f69f23b547df4de1e8e0c636997f2fd4e33b"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
@ -1886,6 +1924,14 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "tomlkit"
|
||||
version = "0.11.6"
|
||||
description = "Style preserving TOML library"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "tornado"
|
||||
version = "6.2"
|
||||
|
@ -2158,7 +2204,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<3.12"
|
||||
content-hash = "d804b8cbb34882f92cf19e5e59231aa7eac84764298fe7eae72bd03112e09496"
|
||||
content-hash = "b16e8fb0cf991bcba08c3ef1ddf205f5899c622a10c79a7f50fb55a36d53b179"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2182,8 +2228,8 @@ apscheduler = [
|
|||
{file = "APScheduler-3.9.1.post1.tar.gz", hash = "sha256:b2bea0309569da53a7261bfa0ce19c67ddbfe151bda776a6a907579fdbd3eb2a"},
|
||||
]
|
||||
astroid = [
|
||||
{file = "astroid-2.12.12-py3-none-any.whl", hash = "sha256:72702205200b2a638358369d90c222d74ebc376787af8fb2f7f2a86f7b5cc85f"},
|
||||
{file = "astroid-2.12.12.tar.gz", hash = "sha256:1c00a14f5a3ed0339d38d2e2e5b74ea2591df5861c0936bb292b84ccf3a78d83"},
|
||||
{file = "astroid-2.13.3-py3-none-any.whl", hash = "sha256:14c1603c41cc61aae731cad1884a073c4645e26f126d13ac8346113c95577f3b"},
|
||||
{file = "astroid-2.13.3.tar.gz", hash = "sha256:6afc22718a48a689ca24a97981ad377ba7fb78c133f40335dfd16772f29bcfb1"},
|
||||
]
|
||||
attrs = [
|
||||
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
|
||||
|
@ -2194,8 +2240,8 @@ Babel = [
|
|||
{file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"},
|
||||
]
|
||||
bandit = [
|
||||
{file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"},
|
||||
{file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"},
|
||||
{file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"},
|
||||
{file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"},
|
||||
]
|
||||
bcrypt = [
|
||||
{file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
|
||||
|
@ -2367,6 +2413,10 @@ dateparser = [
|
|||
{file = "dateparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:d31659dc806a7d88e2b510b2c74f68b525ae531f145c62a57a99bd616b7f90cf"},
|
||||
{file = "dateparser-1.1.2.tar.gz", hash = "sha256:3821bf191f95b2658c4abd91571c09821ce7a2bc179bf6cefd8b4515c3ccf9ef"},
|
||||
]
|
||||
dill = [
|
||||
{file = "dill-0.3.6-py3-none-any.whl", hash = "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0"},
|
||||
{file = "dill-0.3.6.tar.gz", hash = "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373"},
|
||||
]
|
||||
distlib = [
|
||||
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
|
||||
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
|
||||
|
@ -2388,11 +2438,12 @@ filelock = [
|
|||
{file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"},
|
||||
]
|
||||
flake8 = [
|
||||
{file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"},
|
||||
{file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"},
|
||||
{file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"},
|
||||
{file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"},
|
||||
]
|
||||
flake8-bandit = [
|
||||
{file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"},
|
||||
{file = "flake8_bandit-4.1.1-py3-none-any.whl", hash = "sha256:4c8a53eb48f23d4ef1e59293657181a3c989d0077c9952717e98a0eace43e06d"},
|
||||
{file = "flake8_bandit-4.1.1.tar.gz", hash = "sha256:068e09287189cbfd7f986e92605adea2067630b75380c6b5733dab7d87f9a84e"},
|
||||
]
|
||||
flake8-bugbear = [
|
||||
{file = "flake8-bugbear-22.10.25.tar.gz", hash = "sha256:89e51284eb929fbb7f23fbd428491e7427f7cdc8b45a77248daffe86a039d696"},
|
||||
|
@ -2402,10 +2453,6 @@ flake8-docstrings = [
|
|||
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
|
||||
{file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
|
||||
]
|
||||
flake8-polyfill = [
|
||||
{file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
|
||||
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
|
||||
]
|
||||
flake8-rst-docstrings = [
|
||||
{file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"},
|
||||
{file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"},
|
||||
|
@ -2499,6 +2546,7 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
||||
|
@ -2507,6 +2555,7 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
||||
|
@ -2515,6 +2564,7 @@ greenlet = [
|
|||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
||||
|
@ -2548,6 +2598,10 @@ iniconfig = [
|
|||
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
|
||||
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
|
||||
]
|
||||
isort = [
|
||||
{file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"},
|
||||
{file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"},
|
||||
]
|
||||
itsdangerous = [
|
||||
{file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"},
|
||||
{file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"},
|
||||
|
@ -2737,8 +2791,8 @@ marshmallow-sqlalchemy = [
|
|||
{file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"},
|
||||
]
|
||||
mccabe = [
|
||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
|
||||
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"},
|
||||
|
@ -2812,10 +2866,7 @@ orjson = [
|
|||
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
|
||||
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
|
||||
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
|
||||
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
|
||||
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
|
||||
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
|
||||
|
@ -2922,16 +2973,16 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
|
||||
]
|
||||
pycodestyle = [
|
||||
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
|
||||
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
|
||||
{file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"},
|
||||
{file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"},
|
||||
]
|
||||
pydocstyle = [
|
||||
{file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
|
||||
{file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
|
||||
]
|
||||
pyflakes = [
|
||||
{file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"},
|
||||
{file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"},
|
||||
{file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"},
|
||||
{file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"},
|
||||
]
|
||||
Pygments = [
|
||||
{file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"},
|
||||
|
@ -2941,6 +2992,10 @@ pyjwt = [
|
|||
{file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"},
|
||||
{file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"},
|
||||
]
|
||||
pylint = [
|
||||
{file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"},
|
||||
{file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
|
||||
{file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
|
||||
|
@ -3372,6 +3427,10 @@ tomli = [
|
|||
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||
]
|
||||
tomlkit = [
|
||||
{file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
|
||||
{file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
|
||||
]
|
||||
tornado = [
|
||||
{file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"},
|
||||
{file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"},
|
||||
|
|
|
@ -27,8 +27,7 @@ flask-marshmallow = "*"
|
|||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
# temporarily switch off main to fix CI because poetry export doesn't capture the revision if it's not here (it ignores the lock)
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "be26100bcbef8026e26312c665dae42faf476485"}
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
|
||||
sentry-sdk = "^1.10"
|
||||
sphinx-autoapi = "^2.0"
|
||||
|
@ -73,6 +72,7 @@ pytz = "^2022.6"
|
|||
dateparser = "^1.1.2"
|
||||
types-dateparser = "^1.1.4.1"
|
||||
flask-jwt-extended = "^4.4.4"
|
||||
pylint = "^2.15.10"
|
||||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
@ -85,12 +85,12 @@ xdoctest = {extras = ["colors"], version = "^1.0.1"}
|
|||
sphinx = "^5.0.2"
|
||||
sphinx-autobuild = ">=2021.3.14"
|
||||
pre-commit = "^2.20.0"
|
||||
flake8 = "^4.0.1"
|
||||
flake8 = "*"
|
||||
black = ">=21.10b0"
|
||||
flake8-bandit = "^2.1.2"
|
||||
flake8-bandit = "*"
|
||||
|
||||
# 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841
|
||||
bandit = "1.7.2"
|
||||
bandit = "*"
|
||||
|
||||
flake8-bugbear = "^22.10.25"
|
||||
flake8-docstrings = "^1.6.0"
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,4 +1,5 @@
|
|||
"""__init__."""
|
||||
import faulthandler
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
|
@ -9,17 +10,16 @@ import sqlalchemy
|
|||
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
|
||||
from apscheduler.schedulers.base import BaseScheduler # type: ignore
|
||||
from flask.json.provider import DefaultJSONProvider
|
||||
from flask_bpmn.api.api_error import api_error_blueprint
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import migrate
|
||||
from flask_cors import CORS # type: ignore
|
||||
from flask_mail import Mail # type: ignore
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||
from spiffworkflow_backend.config import setup_config
|
||||
from spiffworkflow_backend.exceptions.api_error import api_error_blueprint
|
||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import migrate
|
||||
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
|
||||
openid_blueprint,
|
||||
)
|
||||
|
@ -80,6 +80,8 @@ def start_scheduler(
|
|||
|
||||
def create_app() -> flask.app.Flask:
|
||||
"""Create_app."""
|
||||
faulthandler.enable()
|
||||
|
||||
# We need to create the sqlite database in a known location.
|
||||
# If we rely on the app.instance_path without setting an environment
|
||||
# variable, it will be one thing when we run flask db upgrade in the
|
||||
|
@ -106,7 +108,6 @@ def create_app() -> flask.app.Flask:
|
|||
|
||||
app.register_blueprint(user_blueprint)
|
||||
app.register_blueprint(api_error_blueprint)
|
||||
app.register_blueprint(admin_blueprint, url_prefix="/admin")
|
||||
app.register_blueprint(openid_blueprint, url_prefix="/openid")
|
||||
|
||||
# preflight options requests will be allowed if they meet the requirements of the url regex.
|
||||
|
@ -114,7 +115,7 @@ def create_app() -> flask.app.Flask:
|
|||
# need to continually keep asking for the same path.
|
||||
origins_re = [
|
||||
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.")
|
||||
for o in app.config["CORS_ALLOW_ORIGINS"]
|
||||
for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]
|
||||
]
|
||||
CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
|
||||
|
||||
|
@ -127,7 +128,7 @@ def create_app() -> flask.app.Flask:
|
|||
|
||||
# do not start the scheduler twice in flask debug mode
|
||||
if (
|
||||
app.config["RUN_BACKGROUND_SCHEDULER"]
|
||||
app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]
|
||||
and os.environ.get("WERKZEUG_RUN_MAIN") != "true"
|
||||
):
|
||||
start_scheduler(app)
|
||||
|
@ -143,22 +144,47 @@ def create_app() -> flask.app.Flask:
|
|||
|
||||
def get_hacked_up_app_for_script() -> flask.app.Flask:
|
||||
"""Get_hacked_up_app_for_script."""
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "local_development"
|
||||
flask_env_key = "FLASK_SESSION_SECRET_KEY"
|
||||
os.environ[flask_env_key] = "whatevs"
|
||||
if "BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
|
||||
if "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
|
||||
home = os.environ["HOME"]
|
||||
full_process_model_path = (
|
||||
f"{home}/projects/github/sartography/sample-process-models"
|
||||
)
|
||||
if os.path.isdir(full_process_model_path):
|
||||
os.environ["BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path
|
||||
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = (
|
||||
full_process_model_path
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Could not find {full_process_model_path}")
|
||||
app = create_app()
|
||||
return app
|
||||
|
||||
|
||||
def traces_sampler(sampling_context: Any) -> Any:
|
||||
# always inherit
|
||||
if sampling_context["parent_sampled"] is not None:
|
||||
return sampling_context["parent_sampled"]
|
||||
|
||||
if "wsgi_environ" in sampling_context:
|
||||
wsgi_environ = sampling_context["wsgi_environ"]
|
||||
path_info = wsgi_environ.get("PATH_INFO")
|
||||
request_method = wsgi_environ.get("REQUEST_METHOD")
|
||||
|
||||
# tasks_controller.task_submit
|
||||
# this is the current pain point as of 31 jan 2023.
|
||||
if (
|
||||
path_info
|
||||
and path_info.startswith("/v1.0/tasks/")
|
||||
and request_method == "PUT"
|
||||
):
|
||||
return 1
|
||||
|
||||
# Default sample rate for all others (replaces traces_sample_rate)
|
||||
return 0.01
|
||||
|
||||
|
||||
def configure_sentry(app: flask.app.Flask) -> None:
|
||||
"""Configure_sentry."""
|
||||
import sentry_sdk
|
||||
|
@ -174,16 +200,28 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
return None
|
||||
return event
|
||||
|
||||
sentry_errors_sample_rate = app.config.get("SENTRY_ERRORS_SAMPLE_RATE")
|
||||
sentry_errors_sample_rate = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE"
|
||||
)
|
||||
if sentry_errors_sample_rate is None:
|
||||
raise Exception("SENTRY_ERRORS_SAMPLE_RATE is not set somehow")
|
||||
raise Exception(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow"
|
||||
)
|
||||
|
||||
sentry_traces_sample_rate = app.config.get("SENTRY_TRACES_SAMPLE_RATE")
|
||||
sentry_traces_sample_rate = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE"
|
||||
)
|
||||
if sentry_traces_sample_rate is None:
|
||||
raise Exception("SENTRY_TRACES_SAMPLE_RATE is not set somehow")
|
||||
raise Exception(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow"
|
||||
)
|
||||
|
||||
# profiling doesn't work on windows, because of an issue like https://github.com/nvdv/vprof/issues/62
|
||||
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
|
||||
# profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
|
||||
|
||||
sentry_sdk.init(
|
||||
dsn=app.config.get("SENTRY_DSN"),
|
||||
dsn=app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
|
||||
integrations=[
|
||||
FlaskIntegration(),
|
||||
],
|
||||
|
@ -195,5 +233,8 @@ def configure_sentry(app: flask.app.Flask) -> None:
|
|||
# of transactions for performance monitoring.
|
||||
# We recommend adjusting this value to less than 1(00%) in production.
|
||||
traces_sample_rate=float(sentry_traces_sample_rate),
|
||||
traces_sampler=traces_sampler,
|
||||
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
|
||||
# _experiments={"profiles_sample_rate": profiles_sample_rate},
|
||||
before_send=before_send,
|
||||
)
|
||||
|
|
|
@ -79,6 +79,26 @@ paths:
|
|||
"200":
|
||||
description: Logout Authenticated User
|
||||
|
||||
/login_with_access_token:
|
||||
parameters:
|
||||
- name: access_token
|
||||
in: query
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.user.login_with_access_token
|
||||
summary: Authenticate user for API access with an openid token already posessed.
|
||||
tags:
|
||||
- Authentication
|
||||
responses:
|
||||
"200":
|
||||
description: "Returns ok: true if user successfully logged in."
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/login_api:
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.user.login_api
|
||||
|
@ -1605,6 +1625,45 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/process-data-file-download/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: The modified id of an existing process model
|
||||
schema:
|
||||
type: string
|
||||
- name: process_instance_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process instance.
|
||||
schema:
|
||||
type: integer
|
||||
- name: process_data_identifier
|
||||
in: path
|
||||
required: true
|
||||
description: The identifier of the process data.
|
||||
schema:
|
||||
type: string
|
||||
- name: index
|
||||
in: query
|
||||
required: false
|
||||
description: The optional index of the value if key's value is an array
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_file_download
|
||||
summary: Download the file referneced in the process data value.
|
||||
tags:
|
||||
- Data Objects
|
||||
responses:
|
||||
"200":
|
||||
description: Fetch succeeded.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/send-event/{modified_process_model_identifier}/{process_instance_id}:
|
||||
parameters:
|
||||
- name: modified_process_model_identifier
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"""__init__.py."""
|
||||
import os
|
||||
import threading
|
||||
import uuid
|
||||
|
||||
from flask.app import Flask
|
||||
from werkzeug.utils import ImportStringError
|
||||
|
@ -16,17 +17,17 @@ def setup_database_uri(app: Flask) -> None:
|
|||
"""Setup_database_uri."""
|
||||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
||||
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
|
||||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
)
|
||||
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
|
||||
elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
)
|
||||
else:
|
||||
# use pswd to trick flake8 with hardcoded passwords
|
||||
db_pswd = os.environ.get("DB_PASSWORD")
|
||||
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")
|
||||
if db_pswd is None:
|
||||
db_pswd = ""
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
|
@ -44,12 +45,33 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
|
|||
app.config.from_object(env_config_module)
|
||||
print(f"loaded config: {env_config_module}")
|
||||
except ImportStringError as exception:
|
||||
if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
|
||||
if (
|
||||
os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT")
|
||||
!= "true"
|
||||
):
|
||||
raise ModuleNotFoundError(
|
||||
f"Cannot find config module: {env_config_module}"
|
||||
) from exception
|
||||
|
||||
|
||||
def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
|
||||
tenant_specific_fields = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"
|
||||
)
|
||||
|
||||
if tenant_specific_fields is None or tenant_specific_fields == "":
|
||||
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = []
|
||||
else:
|
||||
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = (
|
||||
tenant_specific_fields.split(",")
|
||||
)
|
||||
if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3:
|
||||
raise ConfigurationError(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a"
|
||||
" maximum of 3 fields"
|
||||
)
|
||||
|
||||
|
||||
def setup_config(app: Flask) -> None:
|
||||
"""Setup_config."""
|
||||
# ensure the instance folder exists
|
||||
|
@ -59,14 +81,14 @@ def setup_config(app: Flask) -> None:
|
|||
pass
|
||||
|
||||
app.config["ENV_IDENTIFIER"] = os.environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ENV", "development"
|
||||
"SPIFFWORKFLOW_BACKEND_ENV", "local_development"
|
||||
)
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
app.config.from_object("spiffworkflow_backend.config.default")
|
||||
load_config_file(app, "spiffworkflow_backend.config.default")
|
||||
|
||||
env_config_prefix = "spiffworkflow_backend.config."
|
||||
if (
|
||||
os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") == "true"
|
||||
os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT") == "true"
|
||||
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
|
||||
):
|
||||
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
|
||||
|
@ -77,27 +99,44 @@ def setup_config(app: Flask) -> None:
|
|||
# This allows config/testing.py or instance/config.py to override the default config
|
||||
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
|
||||
app.config.from_pyfile("config/testing.py", silent=True)
|
||||
elif (
|
||||
"ENV_IDENTIFIER" in app.config
|
||||
and app.config["ENV_IDENTIFIER"] == "unit_testing"
|
||||
):
|
||||
app.config.from_pyfile("config/unit_testing.py", silent=True)
|
||||
else:
|
||||
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
|
||||
|
||||
app.config["PERMISSIONS_FILE_FULLPATH"] = None
|
||||
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
|
||||
permissions_file_name = app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]
|
||||
if permissions_file_name is not None:
|
||||
app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join(
|
||||
app.root_path,
|
||||
"config",
|
||||
"permissions",
|
||||
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
|
||||
permissions_file_name,
|
||||
)
|
||||
print(f"base_permissions: loaded permissions file: {permissions_file_name}")
|
||||
else:
|
||||
print("base_permissions: no permissions file loaded")
|
||||
|
||||
# unversioned (see .gitignore) config that can override everything and include secrets.
|
||||
# src/spiffworkflow_backend/config/secrets.py
|
||||
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)
|
||||
|
||||
if app.config["BPMN_SPEC_ABSOLUTE_DIR"] is None:
|
||||
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
|
||||
if app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] is None:
|
||||
raise ConfigurationError(
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set"
|
||||
)
|
||||
|
||||
app.config["PROCESS_UUID"] = uuid.uuid4()
|
||||
|
||||
setup_database_uri(app)
|
||||
setup_logger(app)
|
||||
|
||||
if app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] == "":
|
||||
app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"] = None
|
||||
|
||||
thread_local_data = threading.local()
|
||||
app.config["THREAD_LOCAL_DATA"] = thread_local_data
|
||||
_set_up_tenant_specific_fields_as_list_of_strings(app)
|
||||
|
|
|
@ -2,45 +2,57 @@
|
|||
import re
|
||||
from os import environ
|
||||
|
||||
# Does the site allow self-registration of users
|
||||
SELF_REGISTRATION = environ.get("SELF_REGISTRATION", default=False)
|
||||
# Consider: https://flask.palletsprojects.com/en/2.2.x/config/#configuring-from-environment-variables
|
||||
# and from_prefixed_env(), though we want to ensure that these variables are all documented, so that
|
||||
# is a benefit of the status quo and having them all in this file explicitly.
|
||||
|
||||
DEVELOPMENT = False
|
||||
|
||||
BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR")
|
||||
CORS_DEFAULT = "*"
|
||||
CORS_ALLOW_ORIGINS = re.split(
|
||||
r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT)
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
|
||||
)
|
||||
cors_allow_all = "*"
|
||||
SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
|
||||
r",\s*",
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS", default=cors_allow_all),
|
||||
)
|
||||
|
||||
RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
)
|
||||
SPIFFWORKFLOW_FRONTEND_URL = environ.get(
|
||||
"SPIFFWORKFLOW_FRONTEND_URL", default="http://localhost:7001"
|
||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000"
|
||||
)
|
||||
# service task connector proxy
|
||||
CONNECTOR_PROXY_URL = environ.get(
|
||||
"CONNECTOR_PROXY_URL", default="http://localhost:7004"
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004"
|
||||
)
|
||||
|
||||
# Open ID server
|
||||
OPEN_ID_SERVER_URL = environ.get(
|
||||
"OPEN_ID_SERVER_URL",
|
||||
default="http://localhost:7002/realms/spiffworkflow"
|
||||
# "OPEN_ID_SERVER_URL", default="http://localhost:7000/openid"
|
||||
# use "http://localhost:7000/openid" for running with simple openid
|
||||
# server hosted by spiffworkflow-backend
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL",
|
||||
default="http://localhost:7002/realms/spiffworkflow",
|
||||
)
|
||||
|
||||
# Replace above line with this to use the built-in Open ID Server.
|
||||
# OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7000/openid")
|
||||
OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend")
|
||||
OPEN_ID_CLIENT_SECRET_KEY = environ.get(
|
||||
"OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q"
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_ID = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_ID", default="spiffworkflow-backend"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY",
|
||||
default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q",
|
||||
) # noqa: S105
|
||||
|
||||
# Tenant specific fields is a comma separated list of field names that we will convert to list of strings
|
||||
# and store in the user table's tenant_specific_field_n columns. You can have up to three items in this
|
||||
# comma-separated list.
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
|
||||
)
|
||||
|
@ -50,13 +62,21 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
)
|
||||
|
||||
# Sentry Configuration
|
||||
SENTRY_DSN = environ.get("SENTRY_DSN", default="")
|
||||
SENTRY_ERRORS_SAMPLE_RATE = environ.get(
|
||||
"SENTRY_ERRORS_SAMPLE_RATE", default="1"
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default=""
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE", default="1"
|
||||
) # send all errors
|
||||
SENTRY_TRACES_SAMPLE_RATE = environ.get(
|
||||
"SENTRY_TRACES_SAMPLE_RATE", default="0.01"
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE", default="0.01"
|
||||
) # send 1% of traces
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
|
||||
|
@ -64,20 +84,57 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
|||
|
||||
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
|
||||
# I.e., dev server could have `staging` here. Staging server might have `production` here.
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO")
|
||||
GIT_BRANCH = environ.get("GIT_BRANCH")
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
|
||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"
|
||||
)
|
||||
# This is the branch that the app automatically commits to every time the user clicks the save button
|
||||
# or otherwise changes a process model.
|
||||
# If publishing is enabled, the contents of this "staging area" / "scratch pad" / WIP spot will be used
|
||||
# as the relevant contents for process model that the user wants to publish.
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME")
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH", default=None
|
||||
)
|
||||
|
||||
# Datbase Configuration
|
||||
SPIFF_DATABASE_TYPE = environ.get(
|
||||
"SPIFF_DATABASE_TYPE", default="mysql"
|
||||
# Database Configuration
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_TYPE = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_TYPE", default="mysql"
|
||||
) # can also be sqlite, postgres
|
||||
# Overide above with specific sqlalchymy connection string.
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None
|
||||
)
|
||||
SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get(
|
||||
"SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID",
|
||||
SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID",
|
||||
default="Message_SystemMessageNotification",
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int(
|
||||
environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600"
|
||||
)
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody"
|
||||
)
|
||||
|
||||
# this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration
|
||||
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None
|
||||
)
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
"""Demo environment."""
|
||||
from os import environ
|
||||
|
||||
GIT_COMMIT_ON_SAVE = True
|
||||
GIT_USERNAME = "demo"
|
||||
GIT_USER_EMAIL = "demo@example.com"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "demo"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = "demo@example.com"
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
|
||||
default="terraform_deployed_environment.yml",
|
||||
)
|
||||
|
||||
RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
)
|
||||
|
|
|
@ -1,9 +1,14 @@
|
|||
"""Dev."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
|
||||
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
|
||||
GIT_USER_EMAIL = environ.get(
|
||||
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="staging"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_USERNAME", default="sartography-automated-committer"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL",
|
||||
default="sartography-automated-committer@users.noreply.github.com",
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml"
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
"""Development."""
|
||||
from os import environ
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="development.yml"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
|
||||
RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
)
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
|
||||
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
|
||||
)
|
||||
GIT_USERNAME = "sartography-automated-committer"
|
||||
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
|
|
@ -0,0 +1,23 @@
|
|||
"""Development."""
|
||||
from os import environ
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="local_development.yml"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
|
||||
default="https://github.com/sartography/sample-process-models.git",
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = (
|
||||
f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||
)
|
|
@ -16,6 +16,12 @@ users:
|
|||
email: malala@spiffworkflow.org
|
||||
password: malala
|
||||
preferred_username: Malala
|
||||
oskar:
|
||||
service: local_open_id
|
||||
email: oskar@spiffworkflow.org
|
||||
password: oskar
|
||||
preferred_username: Oskar
|
||||
|
||||
|
||||
groups:
|
||||
admin:
|
||||
|
@ -23,16 +29,6 @@ groups:
|
|||
[
|
||||
admin@spiffworkflow.org,
|
||||
]
|
||||
Education:
|
||||
users:
|
||||
[
|
||||
malala@spiffworkflow.org
|
||||
]
|
||||
President:
|
||||
users:
|
||||
[
|
||||
nelson@spiffworkflow.org
|
||||
]
|
||||
|
||||
permissions:
|
||||
# Admins have access to everything.
|
||||
|
@ -75,16 +71,8 @@ permissions:
|
|||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /processes
|
||||
# Members of the Education group can change the processes under "education".
|
||||
education-admin:
|
||||
groups: ["Education", "President"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-groups/education:*
|
||||
|
||||
# Anyone can start an education process.
|
||||
education-everybody:
|
||||
groups-everybody:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /process-instances/misc:category_number_one:process-model-with-form/*
|
||||
uri: /v1.0/user-groups/for-current-user
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
"""Qa1."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2")
|
||||
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
|
||||
GIT_USER_EMAIL = environ.get(
|
||||
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="qa2"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_USERNAME", default="sartography-automated-committer"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL",
|
||||
default=f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com",
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
"""Qa2."""
|
||||
from os import environ
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org"
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = (
|
||||
"https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api"
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = (
|
||||
"https://qa2.spiffworkflow.org/connector-proxy"
|
||||
)
|
|
@ -0,0 +1,15 @@
|
|||
"""Default."""
|
||||
from os import environ
|
||||
|
||||
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = (
|
||||
f"https://keycloak.{environment_identifier_for_this_config_file_only}"
|
||||
".spiffworkflow.org/realms/sartography"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
|
||||
default="https://github.com/sartography/sartography-process-models.git",
|
||||
)
|
|
@ -1,7 +1,11 @@
|
|||
"""Staging."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH = environ.get("GIT_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
|
||||
GIT_COMMIT_ON_SAVE = False
|
||||
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="main"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"
|
||||
|
|
|
@ -4,26 +4,36 @@ from os import environ
|
|||
# default.py already ensured that this key existed as was not None
|
||||
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
||||
|
||||
GIT_COMMIT_ON_SAVE = True
|
||||
GIT_USERNAME = "sartography-automated-committer"
|
||||
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
|
||||
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = (
|
||||
f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
|
||||
default="terraform_deployed_environment.yml",
|
||||
)
|
||||
|
||||
RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
|
||||
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false")
|
||||
== "true"
|
||||
)
|
||||
|
||||
OPEN_ID_SERVER_URL = f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/spiffworkflow"
|
||||
SPIFFWORKFLOW_FRONTEND_URL = (
|
||||
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = (
|
||||
f"https://keycloak.{environment_identifier_for_this_config_file_only}"
|
||||
".spiffworkflow.org/realms/spiffworkflow"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = (
|
||||
f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_URL = (
|
||||
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
CONNECTOR_PROXY_URL = f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
|
||||
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
|
||||
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = (
|
||||
f"https://connector-proxy.{environment_identifier_for_this_config_file_only}"
|
||||
".spiffworkflow.org"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
|
||||
default="https://github.com/sartography/sample-process-models.git",
|
||||
)
|
||||
|
|
|
@ -9,17 +9,17 @@ SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
|
|||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="testing.yml"
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="unit_testing.yml"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
GIT_COMMIT_ON_SAVE = False
|
||||
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False
|
||||
|
||||
# NOTE: set this here since nox shoves tests and src code to
|
||||
# different places and this allows us to know exactly where we are at the start
|
||||
BPMN_SPEC_ABSOLUTE_DIR = os.path.join(
|
||||
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
"..",
|
||||
"..",
|
|
@ -0,0 +1,261 @@
|
|||
"""API Error functionality."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
from typing import Any
|
||||
|
||||
import flask.wrappers
|
||||
import sentry_sdk
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from sentry_sdk import capture_exception
|
||||
from sentry_sdk import set_tag
|
||||
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
from SpiffWorkflow.exceptions import WorkflowTaskException
|
||||
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
|
||||
from SpiffWorkflow.task import Task # type: ignore
|
||||
|
||||
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
|
||||
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
|
||||
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
|
||||
|
||||
|
||||
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ApiError(Exception):
|
||||
"""ApiError Class to help handle exceptions."""
|
||||
|
||||
error_code: str
|
||||
message: str
|
||||
error_line: str = ""
|
||||
error_type: str = ""
|
||||
file_name: str = ""
|
||||
line_number: int = 0
|
||||
offset: int = 0
|
||||
sentry_link: str | None = None
|
||||
status_code: int = 400
|
||||
tag: str = ""
|
||||
task_data: dict | str | None = field(default_factory=dict)
|
||||
task_id: str = ""
|
||||
task_name: str = ""
|
||||
task_trace: list | None = field(default_factory=list)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Instructions to print instance as a string."""
|
||||
msg = "ApiError: % s. " % self.message
|
||||
if self.task_name:
|
||||
msg += f"Error in task '{self.task_name}' ({self.task_id}). "
|
||||
if self.line_number:
|
||||
msg += "Error is on line %i. " % self.line_number
|
||||
if self.file_name:
|
||||
msg += "In file %s. " % self.file_name
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_task(
|
||||
cls,
|
||||
error_code: str,
|
||||
message: str,
|
||||
task: Task,
|
||||
status_code: int = 400,
|
||||
line_number: int = 0,
|
||||
offset: int = 0,
|
||||
error_type: str = "",
|
||||
error_line: str = "",
|
||||
task_trace: list | None = None,
|
||||
) -> ApiError:
|
||||
"""Constructs an API Error with details pulled from the current task."""
|
||||
instance = cls(error_code, message, status_code=status_code)
|
||||
instance.task_id = task.task_spec.name or ""
|
||||
instance.task_name = task.task_spec.description or ""
|
||||
instance.file_name = task.workflow.spec.file or ""
|
||||
instance.line_number = line_number
|
||||
instance.offset = offset
|
||||
instance.error_type = error_type
|
||||
instance.error_line = error_line
|
||||
if task_trace:
|
||||
instance.task_trace = task_trace
|
||||
else:
|
||||
instance.task_trace = WorkflowTaskException.get_task_trace(task)
|
||||
|
||||
# spiffworkflow is doing something weird where task ends up referenced in the data in some cases.
|
||||
if "task" in task.data:
|
||||
task.data.pop("task")
|
||||
|
||||
# Assure that there is nothing in the json data that can't be serialized.
|
||||
instance.task_data = ApiError.remove_unserializeable_from_dict(task.data)
|
||||
|
||||
return instance
|
||||
|
||||
@staticmethod
|
||||
def remove_unserializeable_from_dict(my_dict: dict) -> dict:
|
||||
"""Removes unserializeable from dict."""
|
||||
keys_to_delete = []
|
||||
for key, value in my_dict.items():
|
||||
if not ApiError.is_jsonable(value):
|
||||
keys_to_delete.append(key)
|
||||
for key in keys_to_delete:
|
||||
del my_dict[key]
|
||||
return my_dict
|
||||
|
||||
@staticmethod
|
||||
def is_jsonable(x: Any) -> bool:
|
||||
"""Attempts a json.dump on given input and returns false if it cannot."""
|
||||
try:
|
||||
json.dumps(x)
|
||||
return True
|
||||
except (TypeError, OverflowError, ValueError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def from_task_spec(
|
||||
cls,
|
||||
code: str,
|
||||
message: str,
|
||||
task_spec: TaskSpec,
|
||||
status_code: int = 400,
|
||||
) -> ApiError:
|
||||
"""Constructs an API Error with details pulled from the current task."""
|
||||
instance = cls(code, message, status_code=status_code)
|
||||
instance.task_id = task_spec.name or ""
|
||||
instance.task_name = task_spec.description or ""
|
||||
if task_spec._wf_spec:
|
||||
instance.file_name = task_spec._wf_spec.file
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def from_workflow_exception(
|
||||
cls,
|
||||
error_code: str,
|
||||
message: str,
|
||||
exp: SpiffWorkflowException,
|
||||
) -> ApiError:
|
||||
"""Deals with workflow exceptions.
|
||||
|
||||
We catch a lot of workflow exception errors,
|
||||
so consolidating the error_code, and doing the best things
|
||||
we can with the data we have.
|
||||
"""
|
||||
if isinstance(exp, WorkflowTaskException):
|
||||
# Note that WorkflowDataExceptions are also WorkflowTaskExceptions
|
||||
return ApiError.from_task(
|
||||
error_code,
|
||||
message,
|
||||
exp.task,
|
||||
line_number=exp.line_number,
|
||||
offset=exp.offset,
|
||||
error_type=exp.error_type,
|
||||
error_line=exp.error_line,
|
||||
task_trace=exp.task_trace,
|
||||
)
|
||||
elif isinstance(exp, WorkflowException):
|
||||
return ApiError.from_task_spec(error_code, message, exp.task_spec)
|
||||
else:
|
||||
return ApiError("workflow_error", str(exp))
|
||||
|
||||
|
||||
def set_user_sentry_context() -> None:
|
||||
"""Set_user_sentry_context."""
|
||||
try:
|
||||
username = g.user.username
|
||||
except Exception:
|
||||
username = "Unknown"
|
||||
# This is for sentry logging into Slack
|
||||
sentry_sdk.set_context("User", {"user": username})
|
||||
set_tag("username", username)
|
||||
|
||||
|
||||
def should_notify_sentry(exception: Exception) -> bool:
|
||||
"""Determine if we should notify sentry.
|
||||
|
||||
We want to capture_exception to log the exception to sentry, but we don't want to log:
|
||||
1. ApiErrors that are just invalid tokens
|
||||
2. NotAuthorizedError. we usually call check-permissions before calling an API to
|
||||
make sure we'll have access, but there are some cases
|
||||
where it's more convenient to just make the call from the frontend and handle the 403 appropriately.
|
||||
"""
|
||||
if isinstance(exception, ApiError):
|
||||
if exception.error_code == "invalid_token":
|
||||
return False
|
||||
if isinstance(exception, NotAuthorizedError):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@api_error_blueprint.app_errorhandler(Exception) # type: ignore
|
||||
def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||
"""Handles unexpected exceptions."""
|
||||
set_user_sentry_context()
|
||||
|
||||
sentry_link = None
|
||||
if should_notify_sentry(exception):
|
||||
id = capture_exception(exception)
|
||||
|
||||
if isinstance(exception, ApiError):
|
||||
current_app.logger.info(
|
||||
f"Sending ApiError exception to sentry: {exception} with error code"
|
||||
f" {exception.error_code}"
|
||||
)
|
||||
|
||||
organization_slug = current_app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG"
|
||||
)
|
||||
project_slug = current_app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG"
|
||||
)
|
||||
if organization_slug and project_slug:
|
||||
sentry_link = (
|
||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
# an event id or send out tags like username
|
||||
current_app.logger.exception(exception)
|
||||
else:
|
||||
current_app.logger.warning(
|
||||
f"Received exception: {exception}. Since we do not want this particular"
|
||||
" exception in sentry, we cannot use logger.exception or logger.error, so"
|
||||
" there will be no backtrace. see api_error.py"
|
||||
)
|
||||
|
||||
error_code = "internal_server_error"
|
||||
status_code = 500
|
||||
if (
|
||||
isinstance(exception, NotAuthorizedError)
|
||||
or isinstance(exception, TokenNotProvidedError)
|
||||
or isinstance(exception, TokenInvalidError)
|
||||
):
|
||||
error_code = "not_authorized"
|
||||
status_code = 403
|
||||
if isinstance(exception, UserNotLoggedInError):
|
||||
error_code = "not_authenticated"
|
||||
status_code = 401
|
||||
|
||||
# set api_exception like this to avoid confusing mypy
|
||||
# about what type the object is
|
||||
api_exception = None
|
||||
if isinstance(exception, ApiError):
|
||||
api_exception = exception
|
||||
elif isinstance(exception, SpiffWorkflowException):
|
||||
api_exception = ApiError.from_workflow_exception(
|
||||
"unexpected_workflow_exception", "Unexpected Workflow Error", exception
|
||||
)
|
||||
else:
|
||||
api_exception = ApiError(
|
||||
error_code=error_code,
|
||||
message=f"{exception.__class__.__name__}",
|
||||
sentry_link=sentry_link,
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return make_response(jsonify(api_exception), api_exception.status_code)
|
|
@ -2,7 +2,8 @@
|
|||
import time
|
||||
|
||||
import sqlalchemy
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
|
||||
|
||||
def try_to_connect(start_time: float) -> None:
|
||||
|
|
|
@ -8,7 +8,7 @@ avoid circular imports
|
|||
"""
|
||||
|
||||
|
||||
from flask_bpmn.models.db import add_listeners
|
||||
from spiffworkflow_backend.models.db import add_listeners
|
||||
|
||||
# must load these before UserModel and GroupModel for relationships
|
||||
from spiffworkflow_backend.models.user_group_assignment import (
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
"""Db."""
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from flask_migrate import Migrate # type: ignore
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.orm.mapper import Mapper
|
||||
|
||||
db = SQLAlchemy()
|
||||
migrate = Migrate()
|
||||
|
||||
|
||||
class SpiffworkflowBaseDBModel(db.Model): # type: ignore
|
||||
"""SpiffworkflowBaseDBModel."""
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
@classmethod
|
||||
def _all_subclasses(cls) -> list[type[SpiffworkflowBaseDBModel]]:
|
||||
"""Get all subclasses of cls, descending.
|
||||
|
||||
So, if A is a subclass of B is a subclass of cls, this
|
||||
will include A and B.
|
||||
(Does not include cls)
|
||||
"""
|
||||
children = cls.__subclasses__()
|
||||
result = []
|
||||
while children:
|
||||
next = children.pop()
|
||||
subclasses = next.__subclasses__()
|
||||
result.append(next)
|
||||
# check subclasses of subclasses SpiffworkflowBaseDBModel. i guess we only go down to grandchildren, which seems cool.
|
||||
for subclass in subclasses:
|
||||
children.append(subclass)
|
||||
return result
|
||||
|
||||
def validate_enum_field(
|
||||
self, key: str, value: Any, enum_variable: enum.EnumMeta
|
||||
) -> Any:
|
||||
"""Validate_enum_field."""
|
||||
try:
|
||||
m_type = getattr(enum_variable, value, None)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"{self.__class__.__name__}: invalid {key}: {value}"
|
||||
) from e
|
||||
|
||||
if m_type is None:
|
||||
raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}")
|
||||
|
||||
return m_type.value
|
||||
|
||||
|
||||
def update_created_modified_on_create_listener(
|
||||
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
|
||||
) -> None:
|
||||
"""Event listener that runs before a record is updated, and sets the create/modified field accordingly."""
|
||||
if "created_at_in_seconds" in mapper.columns.keys():
|
||||
target.created_at_in_seconds = round(time.time())
|
||||
if "updated_at_in_seconds" in mapper.columns.keys():
|
||||
target.updated_at_in_seconds = round(time.time())
|
||||
|
||||
|
||||
def update_modified_on_update_listener(
|
||||
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
|
||||
) -> None:
|
||||
"""Event listener that runs before a record is updated, and sets the modified field accordingly."""
|
||||
if "updated_at_in_seconds" in mapper.columns.keys():
|
||||
if db.session.is_modified(target, include_collections=False):
|
||||
target.updated_at_in_seconds = round(time.time())
|
||||
|
||||
|
||||
def add_listeners() -> None:
|
||||
"""Adds the listeners to all subclasses.
|
||||
|
||||
This should be called after importing all subclasses
|
||||
"""
|
||||
for cls in SpiffworkflowBaseDBModel._all_subclasses():
|
||||
event.listen(cls, "before_insert", update_created_modified_on_create_listener) # type: ignore
|
||||
event.listen(cls, "before_update", update_modified_on_update_listener) # type: ignore
|
|
@ -3,10 +3,11 @@ from __future__ import annotations
|
|||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.group import FlaskBpmnGroupModel
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.user_group_assignment import ( # noqa: F401
|
||||
UserGroupAssignmentModel,
|
||||
|
@ -18,12 +19,14 @@ class GroupNotFoundError(Exception):
|
|||
"""GroupNotFoundError."""
|
||||
|
||||
|
||||
class GroupModel(FlaskBpmnGroupModel):
|
||||
class GroupModel(SpiffworkflowBaseDBModel):
|
||||
"""GroupModel."""
|
||||
|
||||
__tablename__ = "group"
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(255))
|
||||
identifier = db.Column(db.String(255))
|
||||
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
|
||||
|
|
|
@ -4,11 +4,11 @@ from __future__ import annotations
|
|||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
|
@ -26,9 +26,6 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
|
|||
"""HumanTaskModel."""
|
||||
|
||||
__tablename__ = "human_task"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"),
|
||||
)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
|
|
|
@ -3,10 +3,11 @@ from __future__ import annotations
|
|||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
@ -30,3 +31,5 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel):
|
|||
ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
|
||||
|
||||
human_task = relationship(HumanTaskModel)
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
)
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""Message_correlation_message_instance."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Message_correlation_property."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
|
|
|
@ -5,14 +5,14 @@ from typing import Any
|
|||
from typing import Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""Message_model."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
class MessageModel(SpiffworkflowBaseDBModel):
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Message_correlation_property."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
import enum
|
||||
from typing import Any
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
|
||||
|
|
|
@ -3,10 +3,11 @@ import re
|
|||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
class InvalidPermissionTargetUriError(Exception):
|
||||
"""InvalidPermissionTargetUriError."""
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
"""Principal."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.schema import CheckConstraint
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
|
|
@ -5,8 +5,6 @@ from typing import Any
|
|||
from typing import cast
|
||||
|
||||
import marshmallow
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import INCLUDE
|
||||
from marshmallow import Schema
|
||||
from marshmallow_enum import EnumField # type: ignore
|
||||
|
@ -17,6 +15,8 @@ from sqlalchemy.orm import relationship
|
|||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.task import TaskSchema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
@ -75,6 +75,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
) # type: ignore
|
||||
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
|
||||
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
|
||||
process_metadata = relationship(
|
||||
"ProcessInstanceMetadataModel",
|
||||
cascade="delete",
|
||||
) # type: ignore
|
||||
|
||||
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
|
||||
start_in_seconds: int | None = db.Column(db.Integer)
|
||||
|
@ -83,11 +87,16 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
status: str = db.Column(db.String(50))
|
||||
|
||||
bpmn_xml_file_contents: str | None = None
|
||||
bpmn_version_control_type: str = db.Column(db.String(50))
|
||||
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
||||
spiff_step: int = db.Column(db.Integer)
|
||||
|
||||
locked_by: str | None = db.Column(db.String(80))
|
||||
locked_at_in_seconds: int | None = db.Column(db.Integer)
|
||||
|
||||
bpmn_xml_file_contents: str | None = None
|
||||
process_model_with_diagram_identifier: str | None = None
|
||||
|
||||
@property
|
||||
def serialized(self) -> dict[str, Any]:
|
||||
"""Return object data in serializeable format."""
|
||||
|
@ -108,6 +117,14 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"process_initiator_username": self.process_initiator.username,
|
||||
}
|
||||
|
||||
def serialized_with_metadata(self) -> dict[str, Any]:
|
||||
process_instance_attributes = self.serialized
|
||||
process_instance_attributes["process_metadata"] = self.process_metadata
|
||||
process_instance_attributes["process_model_with_diagram_identifier"] = (
|
||||
self.process_model_with_diagram_identifier
|
||||
)
|
||||
return process_instance_attributes
|
||||
|
||||
@property
|
||||
def serialized_flat(self) -> dict:
|
||||
"""Return object in serializeable format with data merged together with top-level attributes.
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
"""Process_instance_metadata."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
|
|
|
@ -7,8 +7,6 @@ from typing import cast
|
|||
from typing import Optional
|
||||
from typing import TypedDict
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
from sqlalchemy.orm import relationship
|
||||
|
@ -16,6 +14,8 @@ from sqlalchemy.orm import relationship
|
|||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
"""Refresh_token."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
# from sqlalchemy.orm import relationship
|
||||
|
||||
# from spiffworkflow_backend.models.user import UserModel
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
"""Secret_model."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
"""Message_model."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from flask_marshmallow import Schema # type: ignore
|
||||
from marshmallow import INCLUDE
|
||||
from sqlalchemy import UniqueConstraint
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
class SpecReferenceNotFoundError(Exception):
|
||||
"""SpecReferenceNotFoundError."""
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
@dataclass
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
"""Spiff_step_details."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy import UniqueConstraint
|
||||
from sqlalchemy.orm import deferred
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
|
@ -14,17 +16,25 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
|||
"""SpiffStepDetailsModel."""
|
||||
|
||||
__tablename__ = "spiff_step_details"
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"process_instance_id", "spiff_step", name="process_instance_id_spiff_step"
|
||||
),
|
||||
)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
# human_task_id: int = db.Column(
|
||||
# ForeignKey(HumanTaskModel.id) # type: ignore
|
||||
# )
|
||||
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
# completed_by_user_id: int = db.Column(db.Integer, nullable=True)
|
||||
# lane_assignment_id: Optional[int] = db.Column(
|
||||
# ForeignKey(GroupModel.id), nullable=True
|
||||
# )
|
||||
task_id: str = db.Column(db.String(50), nullable=False)
|
||||
task_state: str = db.Column(db.String(50), nullable=False)
|
||||
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
|
||||
|
||||
start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
|
||||
# to fix mypy in 3.9 - not sure why syntax like:
|
||||
# float | None
|
||||
# works in other dataclass db models
|
||||
end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))
|
||||
|
|
|
@ -7,6 +7,7 @@ from typing import Union
|
|||
import marshmallow
|
||||
from marshmallow import Schema
|
||||
from marshmallow_enum import EnumField # type: ignore
|
||||
from SpiffWorkflow.task import TaskStateNames # type: ignore
|
||||
|
||||
|
||||
class MultiInstanceType(enum.Enum):
|
||||
|
@ -115,12 +116,13 @@ class Task:
|
|||
process_model_display_name: Union[str, None] = None,
|
||||
process_group_identifier: Union[str, None] = None,
|
||||
process_model_identifier: Union[str, None] = None,
|
||||
form_schema: Union[str, None] = None,
|
||||
form_ui_schema: Union[str, None] = None,
|
||||
form_schema: Union[dict, None] = None,
|
||||
form_ui_schema: Union[dict, None] = None,
|
||||
parent: Optional[str] = None,
|
||||
event_definition: Union[dict[str, Any], None] = None,
|
||||
call_activity_process_identifier: Optional[str] = None,
|
||||
calling_subprocess_task_id: Optional[str] = None,
|
||||
task_spiff_step: Optional[int] = None,
|
||||
):
|
||||
"""__init__."""
|
||||
self.id = id
|
||||
|
@ -135,6 +137,7 @@ class Task:
|
|||
self.event_definition = event_definition
|
||||
self.call_activity_process_identifier = call_activity_process_identifier
|
||||
self.calling_subprocess_task_id = calling_subprocess_task_id
|
||||
self.task_spiff_step = task_spiff_step
|
||||
|
||||
self.data = data
|
||||
if self.data is None:
|
||||
|
@ -196,6 +199,7 @@ class Task:
|
|||
"event_definition": self.event_definition,
|
||||
"call_activity_process_identifier": self.call_activity_process_identifier,
|
||||
"calling_subprocess_task_id": self.calling_subprocess_task_id,
|
||||
"task_spiff_step": self.task_spiff_step,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
@ -212,6 +216,12 @@ class Task:
|
|||
value for name, value in vars(cls).items() if name.startswith("FIELD_TYPE")
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def task_state_name_to_int(cls, task_state_name: str) -> int:
|
||||
task_state_integers = {v: k for k, v in TaskStateNames.items()}
|
||||
task_state_int: int = task_state_integers[task_state_name]
|
||||
return task_state_int
|
||||
|
||||
|
||||
class OptionSchema(Schema):
|
||||
"""OptionSchema."""
|
||||
|
|
|
@ -6,11 +6,11 @@ from dataclasses import dataclass
|
|||
import jwt
|
||||
import marshmallow
|
||||
from flask import current_app
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
|
||||
|
||||
|
@ -34,6 +34,9 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||
display_name = db.Column(db.String(255))
|
||||
email = db.Column(db.String(255))
|
||||
tenant_specific_field_1: str | None = db.Column(db.String(255))
|
||||
tenant_specific_field_2: str | None = db.Column(db.String(255))
|
||||
tenant_specific_field_3: str | None = db.Column(db.String(255))
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
"""UserGroupAssignment."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
"""UserGroupAssignment."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
"""__init__."""
|
|
@ -1,187 +0,0 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
from typing import Union
|
||||
|
||||
from flask import Blueprint
|
||||
from flask import flash
|
||||
from flask import redirect
|
||||
from flask import render_template
|
||||
from flask import request
|
||||
from flask import url_for
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
admin_blueprint = Blueprint(
|
||||
"admin", __name__, template_folder="templates", static_folder="static"
|
||||
)
|
||||
|
||||
ALLOWED_BPMN_EXTENSIONS = {"bpmn", "dmn"}
|
||||
|
||||
|
||||
@admin_blueprint.route("/process-groups", methods=["GET"])
|
||||
def process_group_list() -> str:
|
||||
"""Process_group_list."""
|
||||
process_groups = ProcessModelService.get_process_groups()
|
||||
return render_template("process_group_list.html", process_groups=process_groups)
|
||||
|
||||
|
||||
@admin_blueprint.route("/process-groups/<process_group_id>", methods=["GET"])
|
||||
def process_group_show(process_group_id: str) -> str:
|
||||
"""Show_process_group."""
|
||||
process_group = ProcessModelService.get_process_group(process_group_id)
|
||||
return render_template("process_group_show.html", process_group=process_group)
|
||||
|
||||
|
||||
@admin_blueprint.route("/process-models/<process_model_id>", methods=["GET"])
|
||||
def process_model_show(process_model_id: str) -> Union[str, Response]:
|
||||
"""Show_process_model."""
|
||||
process_model = ProcessModelService.get_process_model(process_model_id)
|
||||
files = SpecFileService.get_files(process_model, extension_filter="bpmn")
|
||||
current_file_name = process_model.primary_file_name
|
||||
if current_file_name is None:
|
||||
flash("No primary_file_name", "error")
|
||||
return redirect(url_for("admin.process_group_list"))
|
||||
bpmn_xml = SpecFileService.get_data(process_model, current_file_name)
|
||||
return render_template(
|
||||
"process_model_show.html",
|
||||
process_model=process_model,
|
||||
bpmn_xml=bpmn_xml,
|
||||
files=files,
|
||||
current_file_name=current_file_name,
|
||||
)
|
||||
|
||||
|
||||
@admin_blueprint.route(
|
||||
"/process-models/<process_model_id>/<file_name>", methods=["GET"]
|
||||
)
|
||||
def process_model_show_file(process_model_id: str, file_name: str) -> str:
|
||||
"""Process_model_show_file."""
|
||||
process_model = ProcessModelService.get_process_model(process_model_id)
|
||||
bpmn_xml = SpecFileService.get_data(process_model, file_name)
|
||||
files = SpecFileService.get_files(process_model, extension_filter="bpmn")
|
||||
return render_template(
|
||||
"process_model_show.html",
|
||||
process_model=process_model,
|
||||
bpmn_xml=bpmn_xml,
|
||||
files=files,
|
||||
current_file_name=file_name,
|
||||
)
|
||||
|
||||
|
||||
@admin_blueprint.route(
|
||||
"/process-models/<process_model_id>/upload-file", methods=["POST"]
|
||||
)
|
||||
def process_model_upload_file(process_model_id: str) -> Response:
|
||||
"""Process_model_upload_file."""
|
||||
process_model = ProcessModelService.get_process_model(process_model_id)
|
||||
|
||||
if "file" not in request.files:
|
||||
flash("No file part", "error")
|
||||
request_file = request.files["file"]
|
||||
# If the user does not select a file, the browser submits an
|
||||
# empty file without a filename.
|
||||
if request_file.filename == "" or request_file.filename is None:
|
||||
flash("No selected file", "error")
|
||||
else:
|
||||
if request_file and _allowed_file(request_file.filename):
|
||||
if request_file.filename is not None:
|
||||
SpecFileService.add_file(
|
||||
process_model, request_file.filename, request_file.stream.read()
|
||||
)
|
||||
ProcessModelService.save_process_model(process_model)
|
||||
|
||||
return redirect(
|
||||
url_for("admin.process_model_show", process_model_id=process_model.id)
|
||||
)
|
||||
|
||||
|
||||
@admin_blueprint.route(
|
||||
"/process_models/<process_model_id>/edit/<file_name>", methods=["GET"]
|
||||
)
|
||||
def process_model_edit(process_model_id: str, file_name: str) -> str:
|
||||
"""Edit_bpmn."""
|
||||
process_model = ProcessModelService.get_process_model(process_model_id)
|
||||
bpmn_xml = SpecFileService.get_data(process_model, file_name)
|
||||
|
||||
return render_template(
|
||||
"process_model_edit.html",
|
||||
bpmn_xml=bpmn_xml.decode("utf-8"),
|
||||
process_model=process_model,
|
||||
file_name=file_name,
|
||||
)
|
||||
|
||||
|
||||
@admin_blueprint.route(
|
||||
"/process-models/<process_model_id>/save/<file_name>", methods=["POST"]
|
||||
)
|
||||
def process_model_save(process_model_id: str, file_name: str) -> Union[str, Response]:
|
||||
"""Process_model_save."""
|
||||
process_model = ProcessModelService.get_process_model(process_model_id)
|
||||
SpecFileService.update_file(process_model, file_name, request.get_data())
|
||||
if process_model.primary_file_name is None:
|
||||
flash("No primary_file_name", "error")
|
||||
return redirect(url_for("admin.process_group_list"))
|
||||
bpmn_xml = SpecFileService.get_data(process_model, process_model.primary_file_name)
|
||||
return render_template(
|
||||
"process_model_edit.html",
|
||||
bpmn_xml=bpmn_xml.decode("utf-8"),
|
||||
process_model=process_model,
|
||||
file_name=file_name,
|
||||
)
|
||||
|
||||
|
||||
@admin_blueprint.route("/process-models/<process_model_id>/run", methods=["GET"])
|
||||
def process_model_run(process_model_id: str) -> Union[str, Response]:
|
||||
"""Process_model_run."""
|
||||
user = UserService.create_user("Mr. Test", "internal", "Mr. Test")
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_id, user
|
||||
)
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps()
|
||||
result = processor.get_data()
|
||||
|
||||
process_model = ProcessModelService.get_process_model(process_model_id)
|
||||
files = SpecFileService.get_files(process_model, extension_filter="bpmn")
|
||||
current_file_name = process_model.primary_file_name
|
||||
if current_file_name is None:
|
||||
flash("No primary_file_name", "error")
|
||||
return redirect(url_for("admin.process_group_list"))
|
||||
bpmn_xml = SpecFileService.get_data(process_model, current_file_name)
|
||||
|
||||
return render_template(
|
||||
"process_model_show.html",
|
||||
process_model=process_model,
|
||||
bpmn_xml=bpmn_xml,
|
||||
result=result,
|
||||
files=files,
|
||||
current_file_name=current_file_name,
|
||||
)
|
||||
|
||||
|
||||
# def _find_or_create_user(username: str = "test_user1") -> Any:
|
||||
# """Find_or_create_user."""
|
||||
# user = UserModel.query.filter_by(username=username).first()
|
||||
# if user is None:
|
||||
# user = UserModel(username=username)
|
||||
# db.session.add(user)
|
||||
# db.session.commit()
|
||||
# return user
|
||||
|
||||
|
||||
def _allowed_file(filename: str) -> bool:
|
||||
"""_allowed_file."""
|
||||
return (
|
||||
"." in filename
|
||||
and filename.rsplit(".", 1)[1].lower() in ALLOWED_BPMN_EXTENSIONS
|
||||
)
|
|
@ -1,26 +0,0 @@
|
|||
import BpmnViewer from "bpmn-js";
|
||||
|
||||
var viewer = new BpmnViewer({
|
||||
container: "#canvas",
|
||||
});
|
||||
|
||||
viewer
|
||||
.importXML(pizzaDiagram)
|
||||
.then(function (result) {
|
||||
const { warnings } = result;
|
||||
|
||||
console.log("success !", warnings);
|
||||
|
||||
viewer.get("canvas").zoom("fit-viewport");
|
||||
})
|
||||
.catch(function (err) {
|
||||
const { warnings, message } = err;
|
||||
|
||||
console.log("something went wrong:", warnings, message);
|
||||
});
|
||||
|
||||
export function sayHello() {
|
||||
console.log("hello");
|
||||
}
|
||||
|
||||
window.foo = "bar";
|
File diff suppressed because it is too large
Load Diff
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"name": "spiffworkflow-backend",
|
||||
"version": "0.0.0",
|
||||
"description": "Serve up Spiff Workflows to the World!",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"bpmn-js": "^9.1.0",
|
||||
"bpmn-js-properties-panel": "^1.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"webpack-cli": "^4.9.2"
|
||||
}
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
.example {
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
{% block head %}
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="{{ url_for('admin.static', filename='style.css') }}"
|
||||
/>
|
||||
<title>{% block title %}{% endblock %}</title>
|
||||
{% endblock %}
|
||||
</head>
|
||||
<body>
|
||||
<h1>{{ self.title() }}</h1>
|
||||
{% with messages = get_flashed_messages(with_categories=true) %} {% if
|
||||
messages %}
|
||||
<ul class="flashes">
|
||||
{% for category, message in messages %}
|
||||
<li class="{{ category }}">{{ message }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %} {% endwith %} {% block content %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
|
@ -1,18 +0,0 @@
|
|||
{% extends "layout.html" %} {% block title %}Process Groups{% endblock %} {%
|
||||
block content %}
|
||||
<table>
|
||||
<tbody>
|
||||
{# here we iterate over every item in our list#} {% for process_group in
|
||||
process_groups %}
|
||||
<tr>
|
||||
<td>
|
||||
<a
|
||||
href="{{ url_for('admin.process_group_show', process_group_id=process_group.id) }}"
|
||||
>{{ process_group.display_name }}</a
|
||||
>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
|
@ -1,25 +0,0 @@
|
|||
{% extends "layout.html" %}
|
||||
{% block title %}Process Group: {{ process_group.id }}{% endblock %}
|
||||
{% block content %}
|
||||
<button
|
||||
type="button"
|
||||
onclick="window.location.href='{{ url_for( 'admin.process_group_list') }}';"
|
||||
>
|
||||
Back
|
||||
</button>
|
||||
<table>
|
||||
<tbody>
|
||||
{# here we iterate over every item in our list#}
|
||||
{% for process_model in process_group.process_models %}
|
||||
<tr>
|
||||
<td>
|
||||
<a
|
||||
href="{{ url_for('admin.process_model_show', process_model_id=process_model.id) }}"
|
||||
>{{ process_model.display_name }}</a
|
||||
>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endblock %}
|
|
@ -1,167 +0,0 @@
|
|||
{% extends "layout.html" %} {% block title %}
|
||||
Process Model Edit: {{ process_model.id }}
|
||||
{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<meta charset="UTF-8" />
|
||||
|
||||
<!-- example styles -->
|
||||
<!-- required modeler styles -->
|
||||
<link rel="stylesheet" href="https://unpkg.com/bpmn-js@9.1.0/dist/assets/bpmn-js.css" />
|
||||
<link rel="stylesheet" href="https://unpkg.com/bpmn-js@9.1.0/dist/assets/diagram-js.css" />
|
||||
<link rel="stylesheet" href="https://unpkg.com/bpmn-js@9.1.0/dist/assets/bpmn-font/css/bpmn.css" />
|
||||
|
||||
<link rel="stylesheet" href="https://unpkg.com/bpmn-js-properties-panel/dist/assets/properties-panel.css">
|
||||
<link rel="stylesheet" href="https://unpkg.com/bpmn-js-properties-panel/dist/assets/element-templates.css">
|
||||
|
||||
<!-- modeler distro -->
|
||||
<script src="https://unpkg.com/bpmn-js@9.1.0/dist/bpmn-modeler.development.js"></script>
|
||||
|
||||
<!-- needed for this example only -->
|
||||
<script src="https://unpkg.com/jquery@3.3.1/dist/jquery.js"></script>
|
||||
|
||||
<!-- example styles -->
|
||||
<style>
|
||||
html, body, #canvas {
|
||||
height: 100%;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.diagram-note {
|
||||
background-color: rgba(66, 180, 21, 0.7);
|
||||
color: White;
|
||||
border-radius: 5px;
|
||||
font-family: Arial;
|
||||
font-size: 12px;
|
||||
padding: 5px;
|
||||
min-height: 16px;
|
||||
width: 50px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.needs-discussion:not(.djs-connection) .djs-visual > :nth-child(1) {
|
||||
stroke: rgba(66, 180, 21, 0.7) !important; /* color elements as red */
|
||||
}
|
||||
|
||||
#save-button {
|
||||
position: fixed;
|
||||
bottom: 20px;
|
||||
left: 20px;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="result">{{ result }}</div>
|
||||
<button
|
||||
type="button"
|
||||
onclick="window.location.href='{{ url_for( 'admin.process_model_show_file', process_model_id=process_model.id, file_name=file_name ) }}';"
|
||||
>
|
||||
Back
|
||||
</button>
|
||||
<button type="button" onclick="exportDiagram()">Save</button>
|
||||
<!-- <div class="modeler"> -->
|
||||
<div id="canvas"></div>
|
||||
<div id="properties"></div>
|
||||
<!-- </div> -->
|
||||
|
||||
<meta id="bpmn_xml" data-name="{{bpmn_xml}}" />
|
||||
<script>
|
||||
// import BpmnModeler from '/admin/static/node_modules/bpmn-js/lib/Modeler.js';
|
||||
// import {
|
||||
// BpmnPropertiesPanelModule,
|
||||
// BpmnPropertiesProviderModule,
|
||||
// } from '/admin/static/node_modules/bpmn-js-properties-panel/dist/index.js';
|
||||
//
|
||||
// const bpmnModeler = new BpmnModeler({
|
||||
// container: '#canvas',
|
||||
// propertiesPanel: {
|
||||
// parent: '#properties'
|
||||
// },
|
||||
// additionalModules: [
|
||||
// BpmnPropertiesPanelModule,
|
||||
// BpmnPropertiesProviderModule
|
||||
// ]
|
||||
// });
|
||||
|
||||
// modeler instance
|
||||
var bpmnModeler = new BpmnJS({
|
||||
container: "#canvas",
|
||||
keyboard: {
|
||||
bindTo: window,
|
||||
},
|
||||
});
|
||||
|
||||
/**
|
||||
* Save diagram contents and print them to the console.
|
||||
*/
|
||||
async function exportDiagram() {
|
||||
try {
|
||||
var data = await bpmnModeler.saveXML({ format: true });
|
||||
//POST request with body equal on data in JSON format
|
||||
fetch("/admin/process-models/{{ process_model.id }}/save/{{ file_name }}", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "text/xml",
|
||||
},
|
||||
body: data.xml,
|
||||
})
|
||||
.then((response) => response.json())
|
||||
//Then with the data from the response in JSON...
|
||||
.then((data) => {
|
||||
console.log("Success:", data);
|
||||
})
|
||||
//Then with the error genereted...
|
||||
.catch((error) => {
|
||||
console.error("Error:", error);
|
||||
});
|
||||
|
||||
alert("Diagram exported. Check the developer tools!");
|
||||
} catch (err) {
|
||||
console.error("could not save BPMN 2.0 diagram", err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Open diagram in our modeler instance.
|
||||
*
|
||||
* @param {String} bpmnXML diagram to display
|
||||
*/
|
||||
async function openDiagram(bpmnXML) {
|
||||
// import diagram
|
||||
try {
|
||||
await bpmnModeler.importXML(bpmnXML);
|
||||
|
||||
// access modeler components
|
||||
var canvas = bpmnModeler.get("canvas");
|
||||
var overlays = bpmnModeler.get("overlays");
|
||||
|
||||
// zoom to fit full viewport
|
||||
canvas.zoom("fit-viewport");
|
||||
|
||||
// attach an overlay to a node
|
||||
overlays.add("SCAN_OK", "note", {
|
||||
position: {
|
||||
bottom: 0,
|
||||
right: 0,
|
||||
},
|
||||
html: '<div class="diagram-note">Mixed up the labels?</div>',
|
||||
});
|
||||
|
||||
// add marker
|
||||
canvas.addMarker("SCAN_OK", "needs-discussion");
|
||||
} catch (err) {
|
||||
console.error("could not import BPMN 2.0 diagram", err);
|
||||
}
|
||||
}
|
||||
|
||||
// trying to use the python variable bpmn_xml directly causes the xml to have escape sequences
|
||||
// and using the meta tag seems to help with that
|
||||
var bpmn_xml = $("#bpmn_xml").data();
|
||||
openDiagram(bpmn_xml.name);
|
||||
|
||||
// wire save button
|
||||
$("#save-button").click(exportDiagram);
|
||||
</script>
|
||||
{% endblock %}
|
|
@ -1,159 +0,0 @@
|
|||
{% extends "layout.html" %} {% block title %}Process Model: {{ process_model.id
|
||||
}}{% endblock %} {% block head %} {{ super() }}
|
||||
<meta charset="UTF-8" />
|
||||
|
||||
<script src="{{ url_for('admin.static', filename='node_modules/bpmn-js/dist/bpmn-viewer.development.js') }}"></script>
|
||||
|
||||
<!-- viewer distro (without pan and zoom) -->
|
||||
<!--
|
||||
<script src="https://unpkg.com/bpmn-js@9.1.0/dist/bpmn-viewer.development.js"></script>
|
||||
-->
|
||||
|
||||
<!-- required viewer styles -->
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="https://unpkg.com/bpmn-js@9.1.0/dist/assets/bpmn-js.css"
|
||||
/>
|
||||
|
||||
<!-- viewer distro (with pan and zoom) -->
|
||||
<script src="https://unpkg.com/bpmn-js@9.1.0/dist/bpmn-navigated-viewer.development.js"></script>
|
||||
|
||||
<!-- needed for this example only -->
|
||||
<script src="https://unpkg.com/jquery@3.3.1/dist/jquery.js"></script>
|
||||
|
||||
<!-- example styles -->
|
||||
<style>
|
||||
html,
|
||||
body,
|
||||
#canvas {
|
||||
height: 90%;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.diagram-note {
|
||||
background-color: rgba(66, 180, 21, 0.7);
|
||||
color: White;
|
||||
border-radius: 5px;
|
||||
font-family: Arial;
|
||||
font-size: 12px;
|
||||
padding: 5px;
|
||||
min-height: 16px;
|
||||
width: 50px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.needs-discussion:not(.djs-connection) .djs-visual > :nth-child(1) {
|
||||
stroke: rgba(66, 180, 21, 0.7) !important; /* color elements as red */
|
||||
}
|
||||
</style>
|
||||
{% endblock %} {% block content %}
|
||||
<div id="result">{{ result }}</div>
|
||||
<button
|
||||
type="button"
|
||||
onclick="window.location.href='{{ url_for( 'admin.process_group_show', process_group_id=process_model.process_group_id ) }}';"
|
||||
>
|
||||
Back
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onclick="window.location.href='{{ url_for( 'admin.process_model_run' , process_model_id=process_model.id ) }}';"
|
||||
>
|
||||
Run
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onclick="window.location.href='{{ url_for( 'admin.process_model_edit' , process_model_id=process_model.id, file_name=current_file_name ) }}';"
|
||||
>
|
||||
Edit
|
||||
</button>
|
||||
|
||||
{% if files %}
|
||||
<h3>BPMN Files</h3>
|
||||
<ul>
|
||||
{% for file in files %}
|
||||
<li>
|
||||
<a
|
||||
href="{{ url_for('admin.process_model_show_file', process_model_id=process_model.id, file_name=file.name) }}"
|
||||
>{{ file.name }}</a
|
||||
>
|
||||
{% if file.name == current_file_name %} (current) {% endif %}
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
<form
|
||||
method="post"
|
||||
action="/admin/process-models/{{process_model.id}}/upload-file"
|
||||
enctype="multipart/form-data"
|
||||
>
|
||||
<input type="file" name="file" />
|
||||
<input type="submit" value="Upload" />
|
||||
</form>
|
||||
|
||||
<div id="canvas"></div>
|
||||
|
||||
<meta id="bpmn_xml" data-name="{{bpmn_xml}}" />
|
||||
<script>
|
||||
var diagramUrl =
|
||||
"https://cdn.staticaly.com/gh/bpmn-io/bpmn-js-examples/dfceecba/starter/diagram.bpmn";
|
||||
|
||||
// viewer instance
|
||||
var bpmnViewer = new BpmnJS({
|
||||
container: "#canvas",
|
||||
});
|
||||
|
||||
/**
|
||||
* Open diagram in our viewer instance.
|
||||
*
|
||||
* @param {String} bpmnXML diagram to display
|
||||
*/
|
||||
async function openDiagram(bpmnXML) {
|
||||
// import diagram
|
||||
try {
|
||||
await bpmnViewer.importXML(bpmnXML);
|
||||
|
||||
// access viewer components
|
||||
var canvas = bpmnViewer.get("canvas");
|
||||
var overlays = bpmnViewer.get("overlays");
|
||||
|
||||
// zoom to fit full viewport
|
||||
canvas.zoom("fit-viewport");
|
||||
|
||||
// attach an overlay to a node
|
||||
overlays.add("SCAN_OK", "note", {
|
||||
position: {
|
||||
bottom: 0,
|
||||
right: 0,
|
||||
},
|
||||
html: '<div class="diagram-note">Mixed up the labels?</div>',
|
||||
});
|
||||
|
||||
// add marker
|
||||
canvas.addMarker("SCAN_OK", "needs-discussion");
|
||||
} catch (err) {
|
||||
console.error("could not import BPMN 2.0 diagram", err);
|
||||
}
|
||||
}
|
||||
var bpmn_xml = $("#bpmn_xml").data();
|
||||
openDiagram(bpmn_xml.name);
|
||||
|
||||
// load external diagram file via AJAX and open it
|
||||
//$.get(diagramUrl, openDiagram, 'text');
|
||||
</script>
|
||||
<!--
|
||||
Thanks for trying out our BPMN toolkit!
|
||||
If you'd like to learn more about what our library,
|
||||
continue with some more basic examples:
|
||||
* https://github.com/bpmn-io/bpmn-js-examples/overlays
|
||||
* https://github.com/bpmn-io/bpmn-js-examples/interaction
|
||||
* https://github.com/bpmn-io/bpmn-js-examples/colors
|
||||
* https://github.com/bpmn-io/bpmn-js-examples/commenting
|
||||
To get a bit broader overview over how bpmn-js works,
|
||||
follow our walkthrough:
|
||||
* https://bpmn.io/toolkit/bpmn-js/walkthrough/
|
||||
Related starters:
|
||||
* https://raw.githubusercontent.com/bpmn-io/bpmn-js-examples/starter/modeler.html
|
||||
-->
|
||||
{% endblock %}
|
|
@ -9,8 +9,8 @@ from flask import g
|
|||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
|
|
@ -13,16 +13,18 @@
|
|||
<div class="error">{{error_message}}</div>
|
||||
<div class="login">
|
||||
<form id="login" method="post" action="{{ url_for('openid.form_submit') }}">
|
||||
<input type="text" class="cds--text-input" name="Uname" id="Uname" placeholder="Username">
|
||||
<p><b>Important:</b> This login form is for demonstration purposes only. In production systems you should
|
||||
be using a real Open ID System.</p>
|
||||
<input type="text" class="cds--text-input" name="Uname" id="username" placeholder="Username">
|
||||
<br><br>
|
||||
<input type="Password" class="cds--text-input" name="Pass" id="Pass" placeholder="Password">
|
||||
<input type="Password" class="cds--text-input" name="Pass" id="password" placeholder="Password">
|
||||
<br><br>
|
||||
<input type="hidden" name="state" value="{{state}}"/>
|
||||
<input type="hidden" name="response_type" value="{{response_type}}"/>
|
||||
<input type="hidden" name="client_id" value="{{client_id}}"/>
|
||||
<input type="hidden" name="scope" value="{{scope}}"/>
|
||||
<input type="hidden" name="redirect_uri" value="{{redirect_uri}}"/>
|
||||
<input type="submit" name="log" class="cds--btn cds--btn--primary" value="Log In">
|
||||
<input type="submit" name="log" class="cds--btn cds--btn--primary" id="spiff-login-button" value="Log In">
|
||||
<br><br>
|
||||
<!-- should maybe add this stuff in eventually, but this is just for testing.
|
||||
<input type="checkbox" id="check">
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import base64
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import Blueprint
|
||||
|
@ -11,12 +13,12 @@ from flask import jsonify
|
|||
from flask import make_response
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
|
@ -81,18 +83,42 @@ def process_list() -> Any:
|
|||
return SpecReferenceSchema(many=True).dump(references)
|
||||
|
||||
|
||||
def process_data_show(
|
||||
def _process_data_fetcher(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
download_file_data: bool,
|
||||
index: Optional[int] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
all_process_data = processor.get_data()
|
||||
process_data_value = None
|
||||
if process_data_identifier in all_process_data:
|
||||
process_data_value = all_process_data[process_data_identifier]
|
||||
process_data_value = all_process_data.get(process_data_identifier)
|
||||
|
||||
if process_data_value is None:
|
||||
script_engine_last_result = processor._script_engine.environment.last_result()
|
||||
process_data_value = script_engine_last_result.get(process_data_identifier)
|
||||
|
||||
if process_data_value is not None and index is not None:
|
||||
process_data_value = process_data_value[index]
|
||||
|
||||
if (
|
||||
download_file_data
|
||||
and isinstance(process_data_value, str)
|
||||
and process_data_value.startswith("data:")
|
||||
):
|
||||
parts = process_data_value.split(";")
|
||||
mimetype = parts[0][4:]
|
||||
filename = parts[1].split("=")[1]
|
||||
base64_value = parts[2].split(",")[1]
|
||||
file_contents = base64.b64decode(base64_value)
|
||||
|
||||
return Response(
|
||||
file_contents,
|
||||
mimetype=mimetype,
|
||||
headers={"Content-disposition": f"attachment; filename={filename}"},
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
|
@ -105,6 +131,37 @@ def process_data_show(
|
|||
)
|
||||
|
||||
|
||||
def process_data_show(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_show."""
|
||||
return _process_data_fetcher(
|
||||
process_instance_id,
|
||||
process_data_identifier,
|
||||
modified_process_model_identifier,
|
||||
False,
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def process_data_file_download(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
index: Optional[int] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_file_download."""
|
||||
return _process_data_fetcher(
|
||||
process_instance_id,
|
||||
process_data_identifier,
|
||||
modified_process_model_identifier,
|
||||
True,
|
||||
index,
|
||||
)
|
||||
|
||||
|
||||
# sample body:
|
||||
# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
|
||||
# "full_name": "sartography/sample-process-models", "private": False .... }}
|
||||
|
@ -250,7 +307,7 @@ def manual_complete_task(
|
|||
|
||||
def _commit_and_push_to_git(message: str) -> None:
|
||||
"""Commit_and_push_to_git."""
|
||||
if current_app.config["GIT_COMMIT_ON_SAVE"]:
|
||||
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]:
|
||||
git_output = GitService.commit(message=message)
|
||||
current_app.logger.info(f"git output: {git_output}")
|
||||
else:
|
||||
|
|
|
@ -8,8 +8,8 @@ from flask import g
|
|||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
|
|
|
@ -11,12 +11,12 @@ from flask import jsonify
|
|||
from flask import make_response
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import TaskState # type: ignore
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.models.db import db
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
||||
|
@ -36,6 +36,7 @@ from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
|||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.task import Task
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
|
@ -72,6 +73,18 @@ def process_instance_create(
|
|||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
if process_model.primary_file_name is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_missing_primary_bpmn_file",
|
||||
message=(
|
||||
f"Process Model '{process_model_identifier}' does not have a primary"
|
||||
" bpmn file. One must be set in order to instantiate this model."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_identifier, g.user
|
||||
|
@ -102,6 +115,7 @@ def process_instance_run(
|
|||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.lock_process_instance("Web")
|
||||
|
||||
if do_engine_steps:
|
||||
try:
|
||||
|
@ -111,6 +125,7 @@ def process_instance_run(
|
|||
raise e
|
||||
except Exception as e:
|
||||
ErrorHandlingService().handle_error(processor, e)
|
||||
# fixme: this is going to point someone to the wrong task - it's misinformation for errors in sub-processes
|
||||
task = processor.bpmn_process_instance.last_task
|
||||
raise ApiError.from_task(
|
||||
error_code="unknown_exception",
|
||||
|
@ -118,8 +133,10 @@ def process_instance_run(
|
|||
status_code=400,
|
||||
task=task,
|
||||
) from e
|
||||
finally:
|
||||
processor.unlock_process_instance("Web")
|
||||
|
||||
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
|
||||
if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]:
|
||||
MessageService.process_message_instances()
|
||||
|
||||
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
|
||||
|
@ -182,16 +199,18 @@ def process_instance_log_list(
|
|||
)
|
||||
if not detailed:
|
||||
log_query = log_query.filter(
|
||||
# this was the previous implementation, where we only show completed tasks and skipped tasks.
|
||||
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
|
||||
# or_(
|
||||
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
|
||||
# )
|
||||
# 1. this was the previous implementation, where we only show completed tasks and skipped tasks.
|
||||
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
|
||||
# or_(
|
||||
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
|
||||
# )
|
||||
# 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023
|
||||
# we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities.
|
||||
and_(
|
||||
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
|
||||
SpiffLoggingModel.bpmn_task_type.in_( # type: ignore
|
||||
["Default Throwing Event", "End Event", "Default Start Event"]
|
||||
["Default Throwing Event"]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
@ -541,22 +560,71 @@ def process_instance_task_list(
|
|||
get_task_data: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list."""
|
||||
step_detail_query = db.session.query(SpiffStepDetailsModel).filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
)
|
||||
|
||||
if spiff_step > 0:
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
step_detail_query = step_detail_query.filter(
|
||||
SpiffStepDetailsModel.spiff_step <= spiff_step
|
||||
)
|
||||
if step_detail is not None and process_instance.bpmn_json is not None:
|
||||
bpmn_json = json.loads(process_instance.bpmn_json)
|
||||
bpmn_json["tasks"] = step_detail.task_json["tasks"]
|
||||
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
step_details = step_detail_query.all()
|
||||
bpmn_json = json.loads(process_instance.bpmn_json or "{}")
|
||||
tasks = bpmn_json["tasks"]
|
||||
subprocesses = bpmn_json["subprocesses"]
|
||||
|
||||
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
|
||||
|
||||
subprocess_state_overrides = {}
|
||||
for step_detail in step_details:
|
||||
if step_detail.task_id in tasks:
|
||||
task_data = (
|
||||
step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
||||
)
|
||||
if task_data is None:
|
||||
task_data = {}
|
||||
tasks[step_detail.task_id]["data"] = task_data
|
||||
tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int(
|
||||
step_detail.task_state
|
||||
)
|
||||
else:
|
||||
for subprocess_id, subprocess_info in subprocesses.items():
|
||||
if step_detail.task_id in subprocess_info["tasks"]:
|
||||
task_data = (
|
||||
step_detail.task_json["task_data"]
|
||||
| step_detail.task_json["python_env"]
|
||||
)
|
||||
if task_data is None:
|
||||
task_data = {}
|
||||
subprocess_info["tasks"][step_detail.task_id]["data"] = task_data
|
||||
subprocess_info["tasks"][step_detail.task_id]["state"] = (
|
||||
Task.task_state_name_to_int(step_detail.task_state)
|
||||
)
|
||||
subprocess_state_overrides[subprocess_id] = TaskState.WAITING
|
||||
|
||||
for subprocess_info in subprocesses.values():
|
||||
for spiff_task_id in subprocess_info["tasks"]:
|
||||
if spiff_task_id not in steps_by_id:
|
||||
subprocess_info["tasks"][spiff_task_id]["data"] = {}
|
||||
subprocess_info["tasks"][spiff_task_id]["state"] = (
|
||||
subprocess_state_overrides.get(spiff_task_id, TaskState.FUTURE)
|
||||
)
|
||||
for spiff_task_id in tasks:
|
||||
if spiff_task_id not in steps_by_id:
|
||||
tasks[spiff_task_id]["data"] = {}
|
||||
tasks[spiff_task_id]["state"] = subprocess_state_overrides.get(
|
||||
spiff_task_id, TaskState.FUTURE
|
||||
)
|
||||
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||
step_details[-1].bpmn_task_identifier, processor.bpmn_process_instance
|
||||
)
|
||||
if spiff_task is not None and spiff_task.state != TaskState.READY:
|
||||
spiff_task.complete()
|
||||
|
||||
spiff_tasks = None
|
||||
if all_tasks:
|
||||
|
@ -564,20 +632,27 @@ def process_instance_task_list(
|
|||
else:
|
||||
spiff_tasks = processor.get_all_user_tasks()
|
||||
|
||||
subprocesses_by_child_task_ids, task_typename_by_task_id = (
|
||||
processor.get_subprocesses_by_child_task_ids()
|
||||
)
|
||||
(
|
||||
subprocesses_by_child_task_ids,
|
||||
task_typename_by_task_id,
|
||||
) = processor.get_subprocesses_by_child_task_ids()
|
||||
processor.get_highest_level_calling_subprocesses_by_child_task_ids(
|
||||
subprocesses_by_child_task_ids, task_typename_by_task_id
|
||||
)
|
||||
|
||||
tasks = []
|
||||
for spiff_task in spiff_tasks:
|
||||
task_spiff_step: Optional[int] = None
|
||||
if str(spiff_task.id) in steps_by_id:
|
||||
task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step
|
||||
calling_subprocess_task_id = subprocesses_by_child_task_ids.get(
|
||||
str(spiff_task.id), None
|
||||
)
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(
|
||||
processor, spiff_task, calling_subprocess_task_id=calling_subprocess_task_id
|
||||
processor,
|
||||
spiff_task,
|
||||
calling_subprocess_task_id=calling_subprocess_task_id,
|
||||
task_spiff_step=task_spiff_step,
|
||||
)
|
||||
if get_task_data:
|
||||
task.data = spiff_task.data
|
||||
|
@ -657,6 +732,9 @@ def _get_process_instance(
|
|||
spec_reference.process_model_id
|
||||
)
|
||||
name_of_file_with_diagram = spec_reference.file_name
|
||||
process_instance.process_model_with_diagram_identifier = (
|
||||
process_model_with_diagram.id
|
||||
)
|
||||
else:
|
||||
process_model_with_diagram = _get_process_model(process_model_identifier)
|
||||
if process_model_with_diagram.primary_file_name:
|
||||
|
@ -678,7 +756,8 @@ def _get_process_instance(
|
|||
)
|
||||
process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
|
||||
|
||||
return make_response(jsonify(process_instance), 200)
|
||||
process_instance_as_dict = process_instance.serialized_with_metadata()
|
||||
return make_response(jsonify(process_instance_as_dict), 200)
|
||||
|
||||
|
||||
def _find_process_instance_for_me_or_raise(
|
||||
|
|
|
@ -14,9 +14,9 @@ from flask import g
|
|||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from werkzeug.datastructures import FileStorage
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.interfaces import IdToProcessGroupMapping
|
||||
from spiffworkflow_backend.models.file import FileSchema
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
|
@ -149,7 +149,30 @@ def process_model_update(
|
|||
}
|
||||
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
||||
# FIXME: the logic to update the the process id would be better if it could go into the
|
||||
# process model save method but this causes circular imports with SpecFileService.
|
||||
# All we really need this for is to get the process id from a bpmn file so maybe that could
|
||||
# all be moved to FileSystemService.
|
||||
update_primary_bpmn_file = False
|
||||
if (
|
||||
"primary_file_name" in body_filtered
|
||||
and "primary_process_id" not in body_filtered
|
||||
):
|
||||
if process_model.primary_file_name != body_filtered["primary_file_name"]:
|
||||
update_primary_bpmn_file = True
|
||||
|
||||
ProcessModelService.update_process_model(process_model, body_filtered)
|
||||
|
||||
# update the file to ensure we get the correct process id if the primary file changed.
|
||||
if update_primary_bpmn_file and process_model.primary_file_name:
|
||||
primary_file_contents = SpecFileService.get_data(
|
||||
process_model, process_model.primary_file_name
|
||||
)
|
||||
SpecFileService.update_file(
|
||||
process_model, process_model.primary_file_name, primary_file_contents
|
||||
)
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process model {process_model_identifier}"
|
||||
)
|
||||
|
@ -202,10 +225,12 @@ def process_model_publish(
|
|||
) -> flask.wrappers.Response:
|
||||
"""Process_model_publish."""
|
||||
if branch_to_update is None:
|
||||
branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
|
||||
branch_to_update = current_app.config[
|
||||
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"
|
||||
]
|
||||
if branch_to_update is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
|
||||
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
|
@ -277,6 +302,18 @@ def process_model_file_delete(
|
|||
"""Process_model_file_delete."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
||||
if process_model.primary_file_name == file_name:
|
||||
raise ApiError(
|
||||
error_code="process_model_file_cannot_be_deleted",
|
||||
message=(
|
||||
f"'{file_name}' is the primary bpmn file for"
|
||||
f" '{process_model_identifier}' and cannot be deleted. Please set"
|
||||
" another file as the primary before attempting to delete this one."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
try:
|
||||
SpecFileService.delete_file(process_model, file_name)
|
||||
except FileNotFoundError as exception:
|
||||
|
@ -548,7 +585,7 @@ def _create_or_update_process_model_file(
|
|||
ApiError(
|
||||
error_code="process_model_file_invalid",
|
||||
message=(
|
||||
f"Invalid Process model file cannot be save: {request_file.name}."
|
||||
f"Invalid Process model file: {request_file.filename}."
|
||||
f" Received error: {str(exception)}"
|
||||
),
|
||||
status_code=400,
|
||||
|
|
|
@ -10,10 +10,10 @@ from flask import current_app
|
|||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from lxml import etree # type: ignore
|
||||
from lxml.builder import ElementMaker # type: ignore
|
||||
|
||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_get_required_parameter_or_raise,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue