Fixed linting errors

This commit is contained in:
Kayvon Martinez 2024-03-07 01:11:57 -06:00
parent 28593bc04c
commit dd0c3253a0
78 changed files with 1252 additions and 2625 deletions

View File

@ -1,2 +1,2 @@
pip==24.0 pip==24.0
poetry==1.8.2 poetry==1.7.1

View File

@ -3,7 +3,7 @@ name: Delete old container images
on: on:
workflow_dispatch: # allow running on demand workflow_dispatch: # allow running on demand
schedule: schedule:
- cron: "0 2 * * *" # every day at 2:00am UTC - cron: "0 2 * * *" # every day at 2:00am UTC
jobs: jobs:
clean-ghcr: clean-ghcr:
@ -14,7 +14,7 @@ jobs:
uses: snok/container-retention-policy@v2 uses: snok/container-retention-policy@v2
with: with:
image-names: spiffworkflow-backend, spiffworkflow-frontend image-names: spiffworkflow-backend, spiffworkflow-frontend
cut-off: Three months ago UTC cut-off: Two months ago UTC
account-type: org account-type: org
org-name: sartography org-name: sartography
keep-at-least: 1 keep-at-least: 1

View File

@ -1,21 +0,0 @@
name: Dev Containers
on:
push:
branches: [ "main" ]
pull_request:
branches: [ "main" ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Create dev environment
run: make dev-env
- name: Backend Unit tests
run: make be-tests-par

View File

@ -17,7 +17,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5.0.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: 3.11 python-version: 3.11
- name: Pip Install - name: Pip Install

View File

@ -117,7 +117,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python }} - name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v5.0.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: ${{ matrix.python }} python-version: ${{ matrix.python }}
@ -216,7 +216,7 @@ jobs:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5.0.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: "3.11" python-version: "3.11"
- name: Install Poetry - name: Install Poetry
@ -261,7 +261,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5.0.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: "3.11" python-version: "3.11"
@ -276,7 +276,7 @@ jobs:
poetry --version poetry --version
- name: Download coverage data - name: Download coverage data
uses: actions/download-artifact@v4.1.4 uses: actions/download-artifact@v4.1.1
with: with:
name: coverage-data name: coverage-data
# this action doesn't seem to respect working-directory so include working-directory value in path # this action doesn't seem to respect working-directory so include working-directory value in path
@ -287,7 +287,7 @@ jobs:
./bin/run_ci_session coverage ./bin/run_ci_session coverage
- name: Upload coverage report - name: Upload coverage report
uses: codecov/codecov-action@v4.1.0 uses: codecov/codecov-action@v3.1.4
- name: SonarCloud Scan - name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@v2.1.1 uses: sonarsource/sonarcloud-github-action@v2.1.1

3
.gitignore vendored
View File

@ -8,6 +8,3 @@ version_info.json
UNKNOWN.egg-info/ UNKNOWN.egg-info/
process_models/ process_models/
.ipynb_checkpoints .ipynb_checkpoints
.env*
.cache
.mypy_cache

109
Makefile
View File

@ -1,109 +0,0 @@
MY_USER := $(shell id -u)
MY_GROUP := $(shell id -g)
ME := $(MY_USER):$(MY_GROUP)
SUDO ?= sudo
ARENA_CONTAINER ?= spiff-arena
ARENA_DEV_OVERLAY ?= dev.docker-compose.yml
BACKEND_CONTAINER ?= spiffworkflow-backend
BACKEND_DEV_OVERLAY ?= spiffworkflow-backend/dev.docker-compose.yml
FRONTEND_CONTAINER ?= spiffworkflow-frontend
FRONTEND_DEV_OVERLAY ?= spiffworkflow-frontend/dev.docker-compose.yml
DOCKER_COMPOSE ?= RUN_AS=$(ME) docker compose $(YML_FILES)
IN_ARENA ?= $(DOCKER_COMPOSE) run $(ARENA_CONTAINER)
IN_BACKEND ?= $(DOCKER_COMPOSE) run $(BACKEND_CONTAINER)
IN_FRONTEND ?= $(DOCKER_COMPOSE) run $(FRONTEND_CONTAINER)
SPIFFWORKFLOW_BACKEND_ENV ?= local_development
YML_FILES := -f docker-compose.yml \
-f $(BACKEND_DEV_OVERLAY) \
-f $(FRONTEND_DEV_OVERLAY) \
-f $(ARENA_DEV_OVERLAY)
all: dev-env start-dev run-pyl
@/bin/true
build-images:
$(DOCKER_COMPOSE) build
dev-env: stop-dev build-images poetry-i be-poetry-i be-recreate-db fe-npm-i
@/bin/true
start-dev: stop-dev
$(DOCKER_COMPOSE) up -d
stop-dev:
$(DOCKER_COMPOSE) down
be-clear-log-file:
$(IN_BACKEND) rm -f log/unit_testing.log
be-logs:
docker logs -f $(BACKEND_CONTAINER)
be-mypy:
$(IN_BACKEND) poetry run mypy src tests
be-poetry-i:
$(IN_BACKEND) poetry install
be-recreate-db:
$(IN_BACKEND) ./bin/recreate_db clean
be-ruff:
$(IN_BACKEND) poetry run ruff --fix .
be-sh:
$(IN_BACKEND) /bin/bash
be-sqlite:
@if [ ! -f "$(BACKEND_CONTAINER)/src/instance/db_$(SPIFFWORKFLOW_BACKEND_ENV).sqlite3" ]; then \
echo "SQLite database file does not exist: $(BACKEND_CONTAINER)/src/instance/db_$(SPIFFWORKFLOW_BACKEND_ENV).sqlite3"; \
exit 1; \
fi
$(IN_BACKEND) sqlite3 src/instance/db_$(SPIFFWORKFLOW_BACKEND_ENV).sqlite3
be-tests: be-clear-log-file
$(IN_BACKEND) poetry run pytest
be-tests-par: be-clear-log-file
$(IN_BACKEND) poetry run pytest -n auto -x --random-order
fe-lint-fix:
$(IN_FRONTEND) npm run lint:fix
fe-logs:
docker logs -f $(FRONTEND_CONTAINER)
fe-npm-i:
$(IN_FRONTEND) npm i && git checkout -- spiffworkflow-frontend/package-lock.json
fe-sh:
$(IN_FRONTEND) /bin/bash
poetry-i:
$(IN_ARENA) poetry install --no-root
pre-commit:
$(IN_ARENA) poetry run pre-commit run --verbose --all-files
run-pyl: fe-lint-fix pre-commit be-mypy be-tests-par
@/bin/true
sh:
$(IN_ARENA) /bin/bash
take-ownership:
$(SUDO) chown -R $(ME) .
.PHONY: build-images dev-env \
start-dev stop-dev \
be-clear-log-file be-logs be-mypy be-poetry-i be-recreate-db be-ruff be-sh be-sqlite be-tests be-tests-par \
fe-lint-fix fe-logs fe-npm-i fe-sh \
poetry-i pre-commit run-pyl \
take-ownership

View File

@ -67,24 +67,6 @@ For full instructions, see [Running SpiffWorkflow Locally with Docker](https://w
The `docker-compose.yml` file is for running a full-fledged instance of spiff-arena while `editor.docker-compose.yml` provides BPMN graphical editor capability to libraries and projects that depend on SpiffWorkflow but have no built-in BPMN edit capabilities. The `docker-compose.yml` file is for running a full-fledged instance of spiff-arena while `editor.docker-compose.yml` provides BPMN graphical editor capability to libraries and projects that depend on SpiffWorkflow but have no built-in BPMN edit capabilities.
### Using Docker for Local Development
If you have `docker` and `docker compose`, as an alternative to locally installing the required dependencies, you can leverage the development docker containers and `Makefile` while working locally. To use, clone the repo and run `make`. This will build the required images, install all dependencies, start the servers and run the linting and tests. Once complete you can [open the app](http://localhost:8001) and code changes will be reflected while running.
After the containers are set up, you can run `make start-dev` and `make stop-dev` to start and stop the servers. If the frontend or backend lock file changes, `make dev-env` will recreate the containers with the new dependencies.
Please refer to the [Makefile](Makefile) as the source of truth, but for a summary of the available `make` targets:
| Target | Action |
|----|----|
| dev-env | Builds the images, sets up the backend db and installs `npm` and `poetry` dependencies |
| start-dev | Starts the frontend and backend servers, also stops them first if they were already running |
| stop-dev | Stops the frontend and backend servers |
| be-tests-par | Runs the backend unit tests in parallel |
| fe-lint-fix | Runs `npm lint:fix` in the frontend container |
| run-pyl | Runs all frontend and backend lints, backend unit tests |
## Contributing ## Contributing
To start understanding the system, you might: To start understanding the system, you might:
@ -109,4 +91,3 @@ SpiffArena's main components are published under the terms of the
You can find us on [our Discord Channel](https://discord.gg/BYHcc7PpUC). You can find us on [our Discord Channel](https://discord.gg/BYHcc7PpUC).
Commercial support for SpiffWorkflow is available from [Sartography](https://sartography.com). Commercial support for SpiffWorkflow is available from [Sartography](https://sartography.com).
Please contact us via the schedule a demo link on the [SpiffWorkflow website](https://spiffworkflow.org) to discuss your needs.

View File

@ -17,6 +17,15 @@ react_projects=(
subcommand="${1:-}" subcommand="${1:-}"
if [[ "$subcommand" == "pre" ]]; then
if [[ -n "$(git status --porcelain SpiffWorkflow)" ]]; then
echo "SpiffWorkflow has uncommitted changes. Running its test suite."
pushd SpiffWorkflow
make tests-par # run tests in parallel
popd
fi
fi
function get_python_dirs() { function get_python_dirs() {
(git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo '' (git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo ''
} }

View File

@ -1,13 +0,0 @@
FROM python:3.12.1-slim-bookworm
WORKDIR /app
RUN apt-get update \
&& apt-get install -y -q git-core curl \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN git config --global --add safe.directory /app
RUN pip install --upgrade pip
RUN pip install poetry==1.8.1

View File

@ -1,12 +0,0 @@
services:
spiff-arena:
build:
context: .
dockerfile: dev.Dockerfile
user: "${RUN_AS}"
environment:
POETRY_VIRTUALENVS_IN_PROJECT: "true"
XDG_CACHE_HOME: "/app/.cache"
volumes:
- ./:/app

View File

@ -1,12 +1,10 @@
# version: "3.8"
services: services:
spiffworkflow-frontend: spiffworkflow-frontend:
container_name: spiffworkflow-frontend container_name: spiffworkflow-frontend
image: ghcr.io/sartography/spiffworkflow-frontend:latest image: ghcr.io/sartography/spiffworkflow-frontend:latest
depends_on: depends_on:
spiffworkflow-backend: spiffworkflow-backend:
condition: service_started condition: service_healthy
environment: environment:
APPLICATION_ROOT: "/" APPLICATION_ROOT: "/"
PORT0: "${SPIFFWORKFLOW_FRONTEND_PORT:-8001}" PORT0: "${SPIFFWORKFLOW_FRONTEND_PORT:-8001}"
@ -41,7 +39,7 @@ services:
ports: ports:
- "${SPIFF_BACKEND_PORT:-8000}:${SPIFF_BACKEND_PORT:-8000}/tcp" - "${SPIFF_BACKEND_PORT:-8000}:${SPIFF_BACKEND_PORT:-8000}/tcp"
volumes: volumes:
- ${SPIFFWORKFLOW_BACKEND_LOCAL_BPMN_SPEC_DIR:-./process_models}:/app/process_models - ./process_models:/app/process_models
- spiffworkflow_backend_db:/app/db_volume - spiffworkflow_backend_db:/app/db_volume
healthcheck: healthcheck:
test: "curl localhost:${SPIFF_BACKEND_PORT:-8000}/v1.0/status --fail" test: "curl localhost:${SPIFF_BACKEND_PORT:-8000}/v1.0/status --fail"
@ -65,6 +63,7 @@ services:
timeout: 5s timeout: 5s
retries: 20 retries: 20
volumes: volumes:
spiffworkflow_backend_db: spiffworkflow_backend_db:
driver: local driver: local

View File

@ -242,49 +242,57 @@ The schema enforces the following rules:
- The `Preferred Delivery Date Range` must start no earlier than today and end no later than the `end_date`. - The `Preferred Delivery Date Range` must start no earlier than today and end no later than the `end_date`.
### Display Fields Side-By-Side on Same Row ### Display Fields Side-By-Side on Same Row
When designing forms, it's often more user-friendly to display related fields, such as First Name and Last Name, side by side on the same row, rather than stacked vertically. The `ui:layout` attribute in your form's JSON schema enables this by allowing you to specify how fields are displayed relative to each other, controlling the grid columns each field occupies for a responsive design.
#### Form Schema Example: By default, all form fields will be laid out one on top of the other.
In some cases, it might be more user-friendly to put two or more fields next to each other on the same conceptual "row."
Perhaps, you want to let a user fill out a name, and have First Name and Last Name next to each other.
Don't actually do this; use Full name as a single field. :)
But in some other case where you actually want to have fields laid out horizontally instead of vertically, do the following:
Define your form fields in the JSON schema as follows: Example form schema:
```json ```json
{ {
"title": "Side by Side Layout", "title": "Side by side",
"description": "Demonstrating side-by-side layout", "description": "A simple form demonstrating side-by-side layout of fields",
"type": "object", "type": "object",
"properties": { "properties": {
"firstName": {"type": "string"}, "firstName": {
"lastName": {"type": "string"}, "type": "string"
"notes": {"type": "string"} },
"lastName": {
"type": "string"
},
"notes": {
"type": "string"
}
} }
} }
``` ```
#### `ui:layout` Configuration: Example uiSchema:
The `ui:layout` attribute accepts an array of objects, each representing a conceptual "row" of fields. Here's how to use it:
```json ```json
{ {
"ui:layout": [ "ui:layout": [
{ {
"firstName": {"sm": 2, "md": 2, "lg": 4}, "firstName": {
"lastName": {"sm": 2, "md": 2, "lg": 4} "sm": 2,
"md": 2,
"lg": 4
},
"lastName": {
"sm": 2,
"md": 2,
"lg": 4
}
}, },
{"notes": {}} {
"notes": {}
}
] ]
} }
``` ```
![Styling_Form](images/styling_forms.png)
#### Key Points:
- **Layout Design**: The `ui:layout` specifies that `firstName` and `lastName` should appear side by side. Each field's size adjusts according to the screen size (small, medium, large), utilizing grid columns for responsive design.
- **Responsive Columns**: Values (`sm`, `md`, `lg`) indicate the number of grid columns a field should occupy, ensuring the form remains functional and visually appealing across devices.
- **Simplified Configuration**: If column widths are unspecified, the layout will automatically adjust, providing flexibility in design.
#### Example Illustrated:
In this case, we are saying that we want firstName and lastName in the same row, since they are both in the first element of the ui:layout array. In this case, we are saying that we want firstName and lastName in the same row, since they are both in the first element of the ui:layout array.
We are saying that firstName should take up 4 columns when a large display is used. We are saying that firstName should take up 4 columns when a large display is used.
@ -301,11 +309,6 @@ If you just specific a uiSchema like this, it will figure out the column widths
] ]
} }
By leveraging the `ui:layout` feature, you can design form layouts that are not only functional but also enhance the user experience, making your forms well-organized and accessible across various screen sizes.
### Display UI Help in Web Forms ### Display UI Help in Web Forms
When designing web forms, it's essential to provide users with contextual help to ensure they understand the purpose and requirements of each field. When designing web forms, it's essential to provide users with contextual help to ensure they understand the purpose and requirements of each field.
@ -399,7 +402,7 @@ Below is an example JSON schema that includes the numeric range field:
This schema defines a numeric range object with `min` and `max` properties, both of which are required. This schema defines a numeric range object with `min` and `max` properties, both of which are required.
#### UI Schema Example #### Ui Schema Example
```json ```json
{ {

View File

@ -89,44 +89,3 @@ This thrown End Escalation can be intercepted by the boundary event of a Call Ac
In another example, we see an End Escalation event being thrown within a sub-process and caught at the sub-process's boundary. In another example, we see an End Escalation event being thrown within a sub-process and caught at the sub-process's boundary.
The application of the last example aligns with the first, where the escalation can be intercepted either on a Call Activity or at the beginning of a sub-process. The application of the last example aligns with the first, where the escalation can be intercepted either on a Call Activity or at the beginning of a sub-process.
It's crucial to remember that whether a process is created or terminated in these contexts depends on whether non-interrupting or interrupting events are utilized. It's crucial to remember that whether a process is created or terminated in these contexts depends on whether non-interrupting or interrupting events are utilized.
## Configuring Escalation Events Properties
Setting up an escalation event within a workflow in SpiffWorkflow involves defining both the escalation trigger (throw event) and the point where the escalation is handled (catch event).
Here's how to set up these components:
**Define the Escalation ID**: Determine the task or process stage where an escalation might need to be triggered due to exceptional circumstances. and decide on a unique identifier for your escalation event.
![Escalation ID](images/Escalation_ID.png)
```{admonition} Note
⚠ In the above example, the escalation ID is created with `Escalation_Throw_1`.
```
**Define the Intermediate Throw Escalation Event**:
Add an Intermediate Throw Escalation Event immediately after the task identified. Select escalation ID and create a unique **escalation code**. .
![Escalation Order](images/Escalation_Order.png)
```{admonition} Note
⚠ Escalation code is essential for matching the throw event with its corresponding catch event. Example: `OrderExceedsThreshold`.
```
**Define the Escalation Catch Event**:
This can be a boundary event attached to a task where the escalation should be caught and handled, or an intermediate event in the workflow where the escalation process converges.
For a boundary catch event, attach it to the task designated to handle the escalation. For an intermediate catch event, place it at the appropriate point in the process flow.
![Escalation Order](images/Escalation_Order_2.png)
```{admonition} Note
⚠ Ensure this matches exactly with the code assigned to the throw event to ensure proper linkage. Example: `OrderExceedsThreshold`.
```
**Set Additional Properties (Optional)**:
You may need to set additional properties for the escalation event, such as:
- **Payload/Variables**: Configuring any data or process variables that should be passed along with the escalation for handling by the catch event.
- **Documentation**: Providing details on when the escalation should be triggered and how it should be handled.
After setting up the escalation event, test the workflow to ensure the escalation is triggered under the right conditions and that the catch event handles it as expected

Binary file not shown.

Before

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

View File

@ -1,7 +1,7 @@
alabaster==0.7.16 alabaster==0.7.13
appdirs==1.4.4 appdirs==1.4.4
astroid==2.15.5 astroid==2.15.5
attrs==23.2.0 attrs==23.1.0
Babel==2.12.1 Babel==2.12.1
beautifulsoup4==4.12.2 beautifulsoup4==4.12.2
cattrs==23.2.3 cattrs==23.2.3
@ -10,32 +10,32 @@ charset-normalizer==3.1.0
click==8.1.3 click==8.1.3
colorama==0.4.6 colorama==0.4.6
docutils==0.18.1 docutils==0.18.1
esbonio==0.16.4 esbonio==0.16.1
furo==2024.1.29 furo==2022.9.29
idna==3.4 idna==3.4
imagesize==1.4.1 imagesize==1.4.1
Jinja2==3.1.2 Jinja2==3.1.2
lazy-object-proxy==1.10.0 lazy-object-proxy==1.9.0
livereload==2.6.3 livereload==2.6.3
lsprotocol==2023.0.1 lsprotocol==2023.0.0a1
markdown-it-py==3.0.0 markdown-it-py==2.2.0
MarkupSafe==2.1.2 MarkupSafe==2.1.2
mdit-py-plugins==0.4.0 mdit-py-plugins==0.3.5
mdurl==0.1.2 mdurl==0.1.2
myst-parser==2.0.0 myst-parser==1.0.0
packaging==23.1 packaging==23.1
pygls==1.3.0 pygls==1.0.2
Pygments==2.17.2 Pygments==2.17.2
pyspellchecker==0.7.2 pyspellchecker==0.7.2
PyYAML==6.0.1 PyYAML==6.0.1
requests==2.31.0 requests==2.30.0
six==1.16.0 six==1.16.0
snowballstemmer==2.2.0 snowballstemmer==2.2.0
soupsieve==2.4.1 soupsieve==2.4.1
Sphinx==7.2.6 Sphinx==5.3.0
sphinx-autoapi==2.1.0 sphinx-autoapi==2.1.0
sphinx-autobuild==2021.3.14 sphinx-autobuild==2021.3.14
sphinx-basic-ng==1.0.0b2 sphinx-basic-ng==1.0.0b1
sphinx-click==4.3.0 sphinx-click==4.3.0
sphinx-rtd-theme==2.0.0 sphinx-rtd-theme==2.0.0
sphinxcontrib-applehelp==1.0.8 sphinxcontrib-applehelp==1.0.8
@ -44,7 +44,7 @@ sphinxcontrib-htmlhelp==2.0.1
sphinxcontrib-jquery==4.1 sphinxcontrib-jquery==4.1
sphinxcontrib-jsmath==1.0.1 sphinxcontrib-jsmath==1.0.1
sphinxcontrib-qthelp==1.0.7 sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10 sphinxcontrib-serializinghtml==1.1.5
tornado==6.3.2 tornado==6.3.2
typeguard==3.0.2 typeguard==3.0.2
Unidecode==1.3.8 Unidecode==1.3.8

View File

@ -101,9 +101,4 @@ Right now we allow editing the Display name of a model or group, but it does
not change the name of the underlying directory, making it harder and harder not change the name of the underlying directory, making it harder and harder
over time to look at GitHub or the file system and find what you are seeing in the display. over time to look at GitHub or the file system and find what you are seeing in the display.
### Fast feedback on errors with python expressions
We have in the past considered changing the way lots of expressions worked such that you would have to include an equals sign in the front in order for it to be evaluated as python, like [this](https://docs.camunda.io/docs/components/concepts/expressions/#expressions-vs-static-values).
The [status quo is confusing](https://github.com/sartography/spiff-arena/issues/1075), but Elizabeth pointed out that requiring string parsing in order to decide how to evaluate something is not necessarily better.
If the BPMN editor was aware of the variables that existed within the workflow - defined by the json schemas of forms, messages, and service calls, or the variables that come out of script tasks, then we could immediately notify people of the issue while they are authoring the diagram rather than having to wait for a possible runtime error.
We could also change the error message on evaluation errors to incude a reminder that quotes are needed around strings

65
poetry.lock generated
View File

@ -397,13 +397,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-
[[package]] [[package]]
name = "pre-commit" name = "pre-commit"
version = "3.6.2" version = "2.21.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks." description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.7"
files = [ files = [
{file = "pre_commit-3.6.2-py2.py3-none-any.whl", hash = "sha256:ba637c2d7a670c10daedc059f5c49b5bd0aadbccfcd7ec15592cf9665117532c"}, {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"},
{file = "pre_commit-3.6.2.tar.gz", hash = "sha256:c3ef34f463045c88658c5b99f38c1e297abdcc0ff13f98d3370055fbbfabc67e"}, {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"},
] ]
[package.dependencies] [package.dependencies]
@ -415,13 +415,13 @@ virtualenv = ">=20.10.0"
[[package]] [[package]]
name = "pre-commit-hooks" name = "pre-commit-hooks"
version = "4.5.0" version = "4.4.0"
description = "Some out-of-the-box hooks for pre-commit." description = "Some out-of-the-box hooks for pre-commit."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.7"
files = [ files = [
{file = "pre_commit_hooks-4.5.0-py2.py3-none-any.whl", hash = "sha256:b779d5c44ede9b1fda48e2d96b08e9aa5b1d2fdb8903ca09f0dbaca22d529edb"}, {file = "pre_commit_hooks-4.4.0-py2.py3-none-any.whl", hash = "sha256:fc8837335476221ccccda3d176ed6ae29fe58753ce7e8b7863f5d0f987328fc6"},
{file = "pre_commit_hooks-4.5.0.tar.gz", hash = "sha256:ffbe2af1c85ac9a7695866955680b4dee98822638b748a6f3debefad79748c8a"}, {file = "pre_commit_hooks-4.4.0.tar.gz", hash = "sha256:7011eed8e1a25cde94693da009cba76392194cecc2f3f06c51a44ea6ad6c2af9"},
] ]
[package.dependencies] [package.dependencies]
@ -519,7 +519,6 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
@ -556,13 +555,13 @@ files = [
[[package]] [[package]]
name = "reorder-python-imports" name = "reorder-python-imports"
version = "3.12.0" version = "3.9.0"
description = "Tool for reordering python imports" description = "Tool for reordering python imports"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.7"
files = [ files = [
{file = "reorder_python_imports-3.12.0-py2.py3-none-any.whl", hash = "sha256:930c23a42192b365e20e191a4d304d93e645bd44c242d8bc64accc4a3b2b0f3d"}, {file = "reorder_python_imports-3.9.0-py2.py3-none-any.whl", hash = "sha256:3f9c16e8781f54c944756d0d1eb34a8c863554f7a4eb3693f574fe19b1a29b56"},
{file = "reorder_python_imports-3.12.0.tar.gz", hash = "sha256:f93106a662b0c034ca81c91fd1c2f21a1e94ece47c9f192672e2a13c8ec1856c"}, {file = "reorder_python_imports-3.9.0.tar.gz", hash = "sha256:49292ed537829a6bece9fb3746fc1bbe98f52643be5de01a4e13680268a5b0ec"},
] ]
[package.dependencies] [package.dependencies]
@ -598,28 +597,28 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.2.1" version = "0.0.270"
description = "An extremely fast Python linter and code formatter, written in Rust." description = "An extremely fast Python linter, written in Rust."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080"}, {file = "ruff-0.0.270-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:f74c4d550f7b8e808455ac77bbce38daafc458434815ba0bc21ae4bdb276509b"},
{file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec"}, {file = "ruff-0.0.270-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:643de865fd35cb76c4f0739aea5afe7b8e4d40d623df7e9e6ea99054e5cead0a"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c92db7101ef5bfc18e96777ed7bc7c822d545fa5977e90a585accac43d22f18a"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eca02e709b3308eb7255b5f74e779be23b5980fca3862eae28bb23069cd61ae4"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13471684694d41ae0f1e8e3a7497e14cd57ccb7dd72ae08d56a159d6c9c3e30e"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ed3b198768d2b3a2300fb18f730cd39948a5cc36ba29ae9d4639a11040880be"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a11567e20ea39d1f51aebd778685582d4c56ccb082c1161ffc10f79bebe6df35"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:739495d2dbde87cf4e3110c8d27bc20febf93112539a968a4e02c26f0deccd1d"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:00a818e2db63659570403e44383ab03c529c2b9678ba4ba6c105af7854008105"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:08188f8351f4c0b6216e8463df0a76eb57894ca59a3da65e4ed205db980fd3ae"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be60592f9d218b52f03384d1325efa9d3b41e4c4d55ea022cd548547cc42cd2b"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0827b074635d37984fc98d99316bfab5c8b1231bb83e60dacc83bd92883eedb4"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd2288890b88e8aab4499e55148805b58ec711053588cc2f0196a44f6e3d855"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d61ae4841313f6eeb8292dc349bef27b4ce426e62c36e80ceedc3824e408734"},
{file = "ruff-0.2.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ef052283da7dec1987bba8d8733051c2325654641dfe5877a4022108098683"}, {file = "ruff-0.0.270-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0eb412f20e77529a01fb94d578b19dcb8331b56f93632aa0cce4a2ea27b7aeba"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7022d66366d6fded4ba3889f73cd791c2d5621b2ccf34befc752cb0df70f5fad"}, {file = "ruff-0.0.270-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b775e2c5fc869359daf8c8b8aa0fd67240201ab2e8d536d14a0edf279af18786"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0a725823cb2a3f08ee743a534cb6935727d9e47409e4ad72c10a3faf042ad5ba"}, {file = "ruff-0.0.270-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:21f00e47ab2308617c44435c8dfd9e2e03897461c9e647ec942deb2a235b4cfd"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0034d5b6323e6e8fe91b2a1e55b02d92d0b582d2953a2b37a67a2d7dedbb7acc"}, {file = "ruff-0.0.270-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0bbfbf6fd2436165566ca85f6e57be03ed2f0a994faf40180cfbb3604c9232ef"},
{file = "ruff-0.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e5cb5526d69bb9143c2e4d2a115d08ffca3d8e0fddc84925a7b54931c96f5c02"}, {file = "ruff-0.0.270-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8af391ef81f7be960be10886a3c1aac0b298bde7cb9a86ec2b05faeb2081ce6b"},
{file = "ruff-0.2.1-py3-none-win32.whl", hash = "sha256:6b95ac9ce49b4fb390634d46d6ece32ace3acdd52814671ccaf20b7f60adb232"}, {file = "ruff-0.0.270-py3-none-win32.whl", hash = "sha256:b4c037fe2f75bcd9aed0c89c7c507cb7fa59abae2bd4c8b6fc331a28178655a4"},
{file = "ruff-0.2.1-py3-none-win_amd64.whl", hash = "sha256:e3affdcbc2afb6f5bd0eb3130139ceedc5e3f28d206fe49f63073cb9e65988e0"}, {file = "ruff-0.0.270-py3-none-win_amd64.whl", hash = "sha256:0012f9b7dc137ab7f1f0355e3c4ca49b562baf6c9fa1180948deeb6648c52957"},
{file = "ruff-0.2.1-py3-none-win_arm64.whl", hash = "sha256:efababa8e12330aa94a53e90a81eb6e2d55f348bc2e71adbf17d9cad23c03ee6"}, {file = "ruff-0.0.270-py3-none-win_arm64.whl", hash = "sha256:9613456b0b375766244c25045e353bc8890c856431cd97893c97b10cc93bd28d"},
{file = "ruff-0.2.1.tar.gz", hash = "sha256:3b42b5d8677cd0c72b99fcaf068ffc62abb5a19e71b4a3b9cfa50658a0af02f1"}, {file = "ruff-0.0.270.tar.gz", hash = "sha256:95db07b7850b30ebf32b27fe98bc39e0ab99db3985edbbf0754d399eb2f0e690"},
] ]
[[package]] [[package]]
@ -719,4 +718,4 @@ test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.11,<3.13" python-versions = ">=3.11,<3.13"
content-hash = "1e7e942504aa9a418f93fb9a79adef2d13df35b78c180936e1a96fadaa8bd3b0" content-hash = "e7ce027aebced1f7d924ccd0483ced15a6cda8f715adc882d60a11d8c0676c2f"

View File

@ -15,11 +15,11 @@ classifiers = [
python = ">=3.11,<3.13" python = ">=3.11,<3.13"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
pre-commit = "^3.6.2" pre-commit = "^2.20.0"
flake8 = "^4.0.1" flake8 = "^4.0.1"
black = ">=21.10b0" black = ">=21.10b0"
flake8-bandit = "^2.1.2" flake8-bandit = "^2.1.2"
ruff = "^0.2.1" ruff = "^0.0.270"
# 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841 # 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841
bandit = "1.7.2" bandit = "1.7.2"
@ -27,8 +27,8 @@ bandit = "1.7.2"
flake8-bugbear = "^22.10.25" flake8-bugbear = "^22.10.25"
flake8-docstrings = "^1.6.0" flake8-docstrings = "^1.6.0"
flake8-rst-docstrings = "^0.2.7" flake8-rst-docstrings = "^0.2.7"
reorder-python-imports = "^3.12.0" reorder-python-imports = "^3.9.0"
pre-commit-hooks = "^4.5.0" pre-commit-hooks = "^4.0.1"
pyupgrade = "^3.1.0" pyupgrade = "^3.1.0"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]

View File

@ -27,7 +27,7 @@ RUN apt-get update \
# keep pip up to date # keep pip up to date
RUN pip install --upgrade pip RUN pip install --upgrade pip
RUN pip install poetry==1.8.1 RUN pip install poetry==1.6.1
######################## - SETUP ######################## - SETUP

View File

@ -1,41 +0,0 @@
#!/usr/bin/env bash
function error_handler() {
echo >&2 "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL:-}" ]]; then
echo >&2 "ERROR: SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL must be specified to clone the git repo."
exit 1
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH:-}" ]]; then
echo >&2 "ERROR: SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH must be specified to clone the git repo."
exit 1
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
echo >&2 "ERROR: SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR must be specified to clone the git repo."
exit 1
fi
if [[ -n "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY:-}" ]]; then
if [[ -z "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH=$(mktemp /tmp/ssh_private_key.XXXXXX)
fi
chmod 600 "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
echo "${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY}" >"${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
fi
if [[ ! -d "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}/.git" ]]; then
# otherwise git clone will not clone since the directory is not empty
if [[ -d "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}/lost+found" ]]; then
rm -r "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR}/lost+found"
fi
export GIT_SSH_COMMAND="ssh -F /dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i ${SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH}"
git clone -b "$SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH" "$SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL" "$SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
fi

View File

@ -1,26 +0,0 @@
import sys
from spiffworkflow_backend import create_app
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
def main(process_instance_id: str) -> None:
"""Main."""
app = create_app()
with app.app_context():
process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
file_path = f"/var/tmp/{process_instance_id}_bpmn_json.json" # noqa: S108
if not process_instance:
raise Exception(f"Could not find a process instance with id: {process_instance_id}")
processor = ProcessInstanceProcessor(process_instance)
processor.dump_to_disk(file_path)
print(f"Saved to {file_path}")
if len(sys.argv) < 2:
raise Exception("Process instance id not supplied")
main(sys.argv[1])

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
function error_handler() { function error_handler() {
echo >&2 "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2" exit "$2"
} }
trap 'error_handler ${LINENO} $?' ERR trap 'error_handler ${LINENO} $?' ERR
@ -10,7 +10,4 @@ set -o errtrace -o errexit -o nounset -o pipefail
export SPIFFWORKFLOW_BACKEND_CELERY_ENABLED=true export SPIFFWORKFLOW_BACKEND_CELERY_ENABLED=true
export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false export SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER_IN_CREATE_APP=false
# so we can raise if calling unsafe code in celery
export SPIFFWORKFLOW_BACKEND_RUNNING_IN_CELERY_WORKER=true
poetry run celery -A src.spiffworkflow_backend.background_processing.celery_worker worker --loglevel=info poetry run celery -A src.spiffworkflow_backend.background_processing.celery_worker worker --loglevel=info

View File

@ -1,15 +0,0 @@
FROM python:3.12.1-slim-bookworm
WORKDIR /app
RUN apt-get update \
&& apt-get install -y -q \
gcc libssl-dev libpq-dev default-libmysqlclient-dev \
pkg-config libffi-dev git-core curl sqlite3 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN pip install --upgrade pip
RUN pip install poetry==1.8.1 pytest-xdist==3.5.0
CMD ["./bin/run_server_locally"]

View File

@ -1,17 +0,0 @@
services:
spiffworkflow-backend:
build:
context: spiffworkflow-backend
dockerfile: dev.Dockerfile
user: "${RUN_AS}"
environment:
FLASK_DEBUG: "1"
#POETRY_CACHE_DIR: "/app/.cache/poetry"
POETRY_VIRTUALENVS_IN_PROJECT: "true"
SPIFFWORKFLOW_BACKEND_DATABASE_URI: ""
SPIFFWORKFLOW_BACKEND_ENV: "${SPIFFWORKFLOW_BACKEND_ENV:-local_development}"
SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA: ""
XDG_CACHE_HOME: "/app/.cache"
volumes:
- ./spiffworkflow-backend:/app

View File

@ -1,34 +0,0 @@
"""empty message
Revision ID: c6e246c3c04e
Revises: 6344d90d20fa
Create Date: 2024-02-19 16:41:52.728357
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c6e246c3c04e'
down_revision = '6344d90d20fa'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_triggerable_process_model', schema=None) as batch_op:
batch_op.add_column(sa.Column('file_name', sa.String(length=255), nullable=True))
batch_op.create_index(batch_op.f('ix_message_triggerable_process_model_file_name'), ['file_name'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('message_triggerable_process_model', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_message_triggerable_process_model_file_name'))
batch_op.drop_column('file_name')
# ### end Alembic commands ###

File diff suppressed because it is too large Load Diff

View File

@ -33,7 +33,6 @@ sentry-sdk = "^1.10"
# sphinx-autoapi = "^2.0" # sphinx-autoapi = "^2.0"
psycopg2 = "^2.9.3" psycopg2 = "^2.9.3"
typing-extensions = "^4.4.0" typing-extensions = "^4.4.0"
openai = "^1.1.0"
spiffworkflow-connector-command = {git = "https://github.com/sartography/spiffworkflow-connector-command.git", rev = "main"} spiffworkflow-connector-command = {git = "https://github.com/sartography/spiffworkflow-connector-command.git", rev = "main"}
@ -75,7 +74,7 @@ spiff-element-units = "^0.3.1"
# mysqlclient lib is deemed better than the mysql-connector-python lib by sqlalchemy # mysqlclient lib is deemed better than the mysql-connector-python lib by sqlalchemy
# https://docs.sqlalchemy.org/en/20/dialects/mysql.html#module-sqlalchemy.dialects.mysql.mysqlconnector # https://docs.sqlalchemy.org/en/20/dialects/mysql.html#module-sqlalchemy.dialects.mysql.mysqlconnector
mysqlclient = "^2.2.3" mysqlclient = "^2.2.0"
flask-session = "^0.5.0" flask-session = "^0.5.0"
flask-oauthlib = "^0.9.6" flask-oauthlib = "^0.9.6"
celery = {extras = ["redis"], version = "^5.3.5"} celery = {extras = ["redis"], version = "^5.3.5"}
@ -100,11 +99,7 @@ ruff = "^0.1.7"
pytest-random-order = "^1.1.0" pytest-random-order = "^1.1.0"
pytest-flask = "^1.2.0" pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0" pytest-flask-sqlalchemy = "^1.1.0"
pytest-xdist = "^3.3.1"
# 3.4+ broke existfirst option which we use
# https://stackoverflow.com/questions/77667559/pytest-xdist-3-40-and-higher-not-honoring-exitfirst
# https://github.com/pytest-dev/pytest-xdist/issues/1034
pytest-xdist = "3.3.1"
# 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841 # 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841
bandit = "1.7.7" bandit = "1.7.7"
@ -187,7 +182,7 @@ explicit_package_bases = false
# solution was https://www.reddit.com/r/neovim/comments/11k5but/comment/jbjwwtf in vim settings # solution was https://www.reddit.com/r/neovim/comments/11k5but/comment/jbjwwtf in vim settings
[tool.ruff] [tool.ruff]
lint.select = [ select = [
# "ANN", # flake8-annotations # "ANN", # flake8-annotations
"ASYNC", # flake8-async "ASYNC", # flake8-async
"B", # flake8-bugbear "B", # flake8-bugbear
@ -208,7 +203,7 @@ lint.select = [
"YTT", # flake8-2020 "YTT", # flake8-2020
] ]
lint.ignore = [ ignore = [
"C901", # "complexity" category "C901", # "complexity" category
"PLR", # "refactoring" category has "too many lines in method" type stuff "PLR", # "refactoring" category has "too many lines in method" type stuff
"PLC1901", "PLC1901",
@ -224,12 +219,12 @@ exclude = [
"migrations" "migrations"
] ]
[tool.ruff.lint.per-file-ignores] [tool.ruff.per-file-ignores]
"migrations/versions/*.py" = ["E501"] "migrations/versions/*.py" = ["E501"]
"tests/**/*.py" = ["PLR2004", "S101"] # PLR2004 is about magic vars, S101 allows assert "tests/**/*.py" = ["PLR2004", "S101"] # PLR2004 is about magic vars, S101 allows assert
"bin/*.py" = ["T"] # it's ok to print things in scripts "bin/*.py" = ["T"] # it's ok to print things in scripts
[tool.ruff.lint.isort] [tool.ruff.isort]
force-single-line = true force-single-line = true
# pip install fixit && fixit fix -a src # pip install fixit && fixit fix -a src

View File

@ -25,8 +25,8 @@ from spiffworkflow_backend.routes.user_blueprint import user_blueprint
from spiffworkflow_backend.services.monitoring_service import configure_sentry from spiffworkflow_backend.services.monitoring_service import configure_sentry
from spiffworkflow_backend.services.monitoring_service import setup_prometheus_metrics from spiffworkflow_backend.services.monitoring_service import setup_prometheus_metrics
# This commented out code is if you want to use the pymysql library with sqlalchemy rather than mysqlclient. # This is necessary if you want to use use the pymysql library with sqlalchemy rather than mysqlclient.
# mysqlclient can be hard to install when running non-docker local dev, but it is generally worth it because it is much faster. # This is only potentially needed if you want to run non-docker local dev.
# See the repo's top-level README and the linked troubleshooting guide for details. # See the repo's top-level README and the linked troubleshooting guide for details.
# import pymysql; # import pymysql;
# pymysql.install_as_MySQLdb() # pymysql.install_as_MySQLdb()

View File

@ -19,7 +19,6 @@ paths:
responses: responses:
"200": "200":
description: Redirects to authentication server description: Redirects to authentication server
/login: /login:
parameters: parameters:
- name: authentication_identifier - name: authentication_identifier
@ -174,43 +173,6 @@ paths:
"200": "200":
description: Test Return Response description: Test Return Response
/script-assist/enabled:
get:
operationId: spiffworkflow_backend.routes.script_assist_controller.enabled
summary: Returns value of SCRIPT_ASSIST_ENABLED
tags:
- AI Tools
responses:
"200":
description: Returns if AI script should be enabled in UI
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/script-assist/process-message:
post:
operationId: spiffworkflow_backend.routes.script_assist_controller.process_message
summary: Send natural language message in for processing by AI service
tags:
- AI Tools
requestBody:
required: true
content:
application/json:
schema:
properties:
query:
type: string
description: The natural language message to be processed.
responses:
"200":
description: Send back AI service response
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/status: /status:
get: get:
operationId: spiffworkflow_backend.routes.health_controller.status operationId: spiffworkflow_backend.routes.health_controller.status
@ -791,7 +753,7 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/AwesomeUnspecifiedPayload" $ref: "#/components/schemas/ProcessGroup"
tags: tags:
- git - git
responses: responses:
@ -810,7 +772,7 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/AwesomeUnspecifiedPayload" $ref: "#/components/schemas/OkTrue"
tags: tags:
- git - git
responses: responses:
@ -1261,15 +1223,6 @@ paths:
description: Force the process instance to run even if it has already been started. description: Force the process instance to run even if it has already been started.
schema: schema:
type: boolean type: boolean
- name: execution_mode
in: query
required: false
description: Either run in "synchronous" or "asynchronous" mode.
schema:
type: string
enum:
- synchronous
- asynchronous
post: post:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_run operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_run
summary: Run a process instance summary: Run a process instance
@ -1848,7 +1801,7 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/AwesomeUnspecifiedPayload" $ref: "#/components/schemas/User"
responses: responses:
"200": "200":
description: true if user exists description: true if user exists
@ -2365,15 +2318,6 @@ paths:
description: Include task data for forms description: Include task data for forms
schema: schema:
type: boolean type: boolean
- name: execution_mode
in: query
required: false
description: Either run in "synchronous" or "asynchronous" mode.
schema:
type: string
enum:
- synchronous
- asynchronous
get: get:
tags: tags:
- Tasks - Tasks
@ -2395,7 +2339,7 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/AwesomeUnspecifiedPayload" $ref: "#/components/schemas/ProcessGroup"
responses: responses:
"200": "200":
description: One task description: One task
@ -2538,15 +2482,6 @@ paths:
description: The unique name of the message. description: The unique name of the message.
schema: schema:
type: string type: string
- name: execution_mode
in: query
required: false
description: Either run in "synchronous" or "asynchronous" mode.
schema:
type: string
enum:
- synchronous
- asynchronous
post: post:
tags: tags:
- Messages - Messages
@ -2802,7 +2737,7 @@ paths:
content: content:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/AwesomeUnspecifiedPayload" $ref: "#/components/schemas/Secret"
responses: responses:
"200": "200":
description: Result of permission check description: Result of permission check
@ -2850,12 +2785,6 @@ paths:
description: Optional parameter to filter by a single group description: Optional parameter to filter by a single group
schema: schema:
type: string type: string
- name: upsearch
in: query
required: false
description: Optional parameter to indicate if an upsearch should be performed
schema:
type: boolean
- name: page - name: page
in: query in: query
required: false required: false

View File

@ -33,7 +33,7 @@ def start_apscheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = Bac
else: else:
_add_jobs_for_non_celery_based_configuration(app, scheduler) _add_jobs_for_non_celery_based_configuration(app, scheduler)
_add_jobs_that_should_run_regardless_of_celery_config(app, scheduler) _add_jobs_relevant_for_all_celery_configurations(app, scheduler)
scheduler.start() scheduler.start()
@ -78,13 +78,12 @@ def _add_jobs_for_non_celery_based_configuration(app: flask.app.Flask, scheduler
) )
def _add_jobs_that_should_run_regardless_of_celery_config(app: flask.app.Flask, scheduler: BaseScheduler) -> None: def _add_jobs_relevant_for_all_celery_configurations(app: flask.app.Flask, scheduler: BaseScheduler) -> None:
not_started_polling_interval_in_seconds = app.config[ not_started_polling_interval_in_seconds = app.config[
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS" "SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS"
] ]
# TODO: see if we can queue with celery instead on celery based configuration # TODO: see if we can queue with celery instead on celery based configuration
# NOTE: pass in additional_processing_identifier if we move to celery
scheduler.add_job( scheduler.add_job(
BackgroundProcessingService(app).process_message_instances_with_app_context, BackgroundProcessingService(app).process_message_instances_with_app_context,
"interval", "interval",

View File

@ -4,8 +4,6 @@ import celery
from flask import current_app from flask import current_app
from spiffworkflow_backend.background_processing import CELERY_TASK_PROCESS_INSTANCE_RUN from spiffworkflow_backend.background_processing import CELERY_TASK_PROCESS_INSTANCE_RUN
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@ -21,6 +19,8 @@ def queue_future_task_if_appropriate(process_instance: ProcessInstanceModel, eta
args_to_celery = { args_to_celery = {
"process_instance_id": process_instance.id, "process_instance_id": process_instance.id,
"task_guid": task_guid, "task_guid": task_guid,
# the producer_identifier is so we can know what is putting messages in the queue
"producer_identifier": "future_task",
} }
# add buffer to countdown to avoid rounding issues and race conditions with spiff. the situation we want to avoid is where # add buffer to countdown to avoid rounding issues and race conditions with spiff. the situation we want to avoid is where
# we think the timer said to run it at 6:34:11, and we initialize the SpiffWorkflow library, # we think the timer said to run it at 6:34:11, and we initialize the SpiffWorkflow library,
@ -35,23 +35,9 @@ def queue_future_task_if_appropriate(process_instance: ProcessInstanceModel, eta
# if waiting, check all waiting tasks and see if theyt are timers. if they are timers, it's not runnable. # if waiting, check all waiting tasks and see if theyt are timers. if they are timers, it's not runnable.
def queue_process_instance_if_appropriate(process_instance: ProcessInstanceModel, execution_mode: str | None = None) -> bool: def queue_process_instance_if_appropriate(process_instance: ProcessInstanceModel) -> bool:
# check if the enum value is valid if queue_enabled_for_process_model(process_instance):
if execution_mode:
ProcessInstanceExecutionMode(execution_mode)
if execution_mode == ProcessInstanceExecutionMode.synchronous.value:
return False
queue_enabled = queue_enabled_for_process_model(process_instance)
if execution_mode == ProcessInstanceExecutionMode.asynchronous.value and not queue_enabled:
raise ApiError(
error_code="async_mode_called_without_celery",
message="Execution mode asynchronous requested but SPIFFWORKFLOW_BACKEND_CELERY_ENABLED is not set to true.",
status_code=400,
)
if queue_enabled:
celery.current_app.send_task(CELERY_TASK_PROCESS_INSTANCE_RUN, (process_instance.id,)) celery.current_app.send_task(CELERY_TASK_PROCESS_INSTANCE_RUN, (process_instance.id,))
return True return True
return False return False

View File

@ -40,10 +40,6 @@ configs_with_structures = normalized_environment(environ)
config_from_env("FLASK_SESSION_SECRET_KEY") config_from_env("FLASK_SESSION_SECRET_KEY")
config_from_env("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR") config_from_env("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR")
### AI Tools
config_from_env("SPIFFWORKFLOW_BACKEND_SCRIPT_ASSIST_ENABLED", default=False)
config_from_env("SPIFFWORKFLOW_BACKEND_SECRET_KEY_OPENAI_API")
### extensions ### extensions
config_from_env("SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX", default="extensions") config_from_env("SPIFFWORKFLOW_BACKEND_EXTENSIONS_PROCESS_MODEL_PREFIX", default="extensions")
config_from_env("SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", default=False) config_from_env("SPIFFWORKFLOW_BACKEND_EXTENSIONS_API_ENABLED", default=False)
@ -207,8 +203,6 @@ config_from_env("SPIFFWORKFLOW_BACKEND_DEBUG_TASK_CONSISTENCY", default=False)
# we load the CustomBpmnScriptEngine at import time, where we do not have access to current_app, # we load the CustomBpmnScriptEngine at import time, where we do not have access to current_app,
# so instead of using config, we use os.environ directly over there. # so instead of using config, we use os.environ directly over there.
# config_from_env("SPIFFWORKFLOW_BACKEND_USE_RESTRICTED_SCRIPT_ENGINE", default=True) # config_from_env("SPIFFWORKFLOW_BACKEND_USE_RESTRICTED_SCRIPT_ENGINE", default=True)
# adds the ProxyFix to Flask on http by processing the 'X-Forwarded-Proto' header # adds the ProxyFix to Flask on http by processing the 'X-Forwarded-Proto' header
# to make SpiffWorkflow aware that it should return https for the server urls etc rather than http. # to make SpiffWorkflow aware that it should return https for the server urls etc rather than http.
config_from_env("SPIFFWORKFLOW_BACKEND_USE_WERKZEUG_MIDDLEWARE_PROXY_FIX", default=False) config_from_env("SPIFFWORKFLOW_BACKEND_USE_WERKZEUG_MIDDLEWARE_PROXY_FIX", default=False)

View File

@ -1,3 +1,4 @@
users: users:
admin: admin:
service: local_open_id service: local_open_id

View File

@ -5,9 +5,10 @@ from os import environ
TESTING = True TESTING = True
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true" SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "unit_testing.yml" SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="unit_testing.yml"
)
SPIFFWORKFLOW_BACKEND_URL = "http://localhost:7000"
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "http://localhost:7000/openid" SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "http://localhost:7000/openid"
SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_ID = "spiffworkflow-backend" SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_ID = "spiffworkflow-backend"
SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY = "JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105 SPIFFWORKFLOW_BACKEND_OPEN_ID_CLIENT_SECRET_KEY = "JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105

View File

@ -24,7 +24,7 @@ class DataStoreCRUD:
raise Exception("must implement") raise Exception("must implement")
@staticmethod @staticmethod
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]: def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
raise Exception("must implement") raise Exception("must implement")
@staticmethod @staticmethod

View File

@ -30,12 +30,12 @@ class JSONDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
return db.session.query(JSONDataStoreModel).filter_by(identifier=identifier, location=location).first() return db.session.query(JSONDataStoreModel).filter_by(identifier=identifier, location=location).first()
@staticmethod @staticmethod
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]: def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
data_stores = [] data_stores = []
query = db.session.query(JSONDataStoreModel.name, JSONDataStoreModel.identifier, JSONDataStoreModel.location) query = db.session.query(JSONDataStoreModel.name, JSONDataStoreModel.identifier, JSONDataStoreModel.location)
if process_group_identifiers: if process_group_identifier is not None:
query = query.filter(JSONDataStoreModel.location.in_(process_group_identifiers)) # type: ignore query = query.filter_by(location=process_group_identifier)
keys = query.order_by(JSONDataStoreModel.name).all() keys = query.order_by(JSONDataStoreModel.name).all()
for key in keys: for key in keys:
data_stores.append({"name": key[0], "type": "json", "id": key[1], "clz": "JSONDataStore", "location": key[2]}) data_stores.append({"name": key[0], "type": "json", "id": key[1], "clz": "JSONDataStore", "location": key[2]})

View File

@ -28,12 +28,12 @@ class KKVDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ignore
return db.session.query(KKVDataStoreModel).filter_by(identifier=identifier, location=location).first() return db.session.query(KKVDataStoreModel).filter_by(identifier=identifier, location=location).first()
@staticmethod @staticmethod
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]: def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
data_stores = [] data_stores = []
query = db.session.query(KKVDataStoreModel) query = db.session.query(KKVDataStoreModel)
if process_group_identifiers: if process_group_identifier is not None:
query = query.filter(KKVDataStoreModel.location.in_(process_group_identifiers)) # type: ignore query = query.filter_by(location=process_group_identifier)
models = query.order_by(KKVDataStoreModel.name).all() models = query.order_by(KKVDataStoreModel.name).all()
for model in models: for model in models:
data_stores.append( data_stores.append(

View File

@ -14,10 +14,10 @@ class TypeaheadDataStore(BpmnDataStoreSpecification, DataStoreCRUD): # type: ig
"""TypeaheadDataStore.""" """TypeaheadDataStore."""
@staticmethod @staticmethod
def existing_data_stores(process_group_identifiers: list[str] | None = None) -> list[dict[str, Any]]: def existing_data_stores(process_group_identifier: str | None = None) -> list[dict[str, Any]]:
data_stores: list[dict[str, Any]] = [] data_stores: list[dict[str, Any]] = []
if process_group_identifiers: if process_group_identifier is not None:
# temporary until this data store gets location support # temporary until this data store gets location support
return data_stores return data_stores

View File

@ -5,8 +5,3 @@ class SpiffEnum(enum.Enum):
@classmethod @classmethod
def list(cls) -> list[str]: def list(cls) -> list[str]:
return [el.value for el in cls] return [el.value for el in cls]
class ProcessInstanceExecutionMode(SpiffEnum):
asynchronous = "asynchronous"
synchronous = "synchronous"

View File

@ -114,7 +114,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
if expected_value is None: # This key is not required for this instance to match. if expected_value is None: # This key is not required for this instance to match.
continue continue
try: try:
result = expression_engine.environment.evaluate(correlation_key.retrieval_expression, payload) result = expression_engine._evaluate(correlation_key.retrieval_expression, payload)
except Exception as e: except Exception as e:
# the failure of a payload evaluation may not mean that matches for these # the failure of a payload evaluation may not mean that matches for these
# message instances can't happen with other messages. So don't error up. # message instances can't happen with other messages. So don't error up.

View File

@ -8,7 +8,5 @@ class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
message_name: str = db.Column(db.String(255), index=True) message_name: str = db.Column(db.String(255), index=True)
process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True) process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
file_name: str = db.Column(db.String(255), index=True)
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -17,7 +17,6 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION = [ PROCESS_GROUP_SUPPORTED_KEYS_FOR_DISK_SERIALIZATION = [
"display_name", "display_name",
"description", "description",
"data_store_specifications",
] ]
@ -30,7 +29,6 @@ class ProcessGroup:
description: str | None = None description: str | None = None
process_models: list[ProcessModelInfo] = field(default_factory=list[ProcessModelInfo]) process_models: list[ProcessModelInfo] = field(default_factory=list[ProcessModelInfo])
process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"]) process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"])
data_store_specifications: dict[str, Any] = field(default_factory=dict)
parent_groups: list[ProcessGroupLite] | None = None parent_groups: list[ProcessGroupLite] | None = None
# TODO: delete these once they no no longer mentioned in current # TODO: delete these once they no no longer mentioned in current

View File

@ -2,7 +2,7 @@ import os
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
from flask_marshmallow import Schema from flask_marshmallow import Schema # type: ignore
from marshmallow import INCLUDE from marshmallow import INCLUDE
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
@ -142,7 +142,7 @@ class ReferenceCacheModel(SpiffworkflowBaseDBModel):
# SpecReferenceSchema # SpecReferenceSchema
class ReferenceSchema(Schema): class ReferenceSchema(Schema): # type: ignore
class Meta: class Meta:
model = Reference model = Reference
fields = [ fields = [

View File

@ -64,7 +64,7 @@ def verify_token(token: str | None = None, force_run: bool | None = False) -> di
if not force_run and AuthorizationService.should_disable_auth_for_request(): if not force_run and AuthorizationService.should_disable_auth_for_request():
return None return None
token_info = _find_token_from_request(token) token_info = _find_token_from_headers(token)
# This should never be set here but just in case # This should never be set here but just in case
_clear_auth_tokens_from_thread_local_data() _clear_auth_tokens_from_thread_local_data()
@ -306,7 +306,7 @@ def _force_logout_user_if_necessary(user_model: UserModel | None, decoded_token:
return False return False
def _find_token_from_request(token: str | None) -> dict[str, str | None]: def _find_token_from_headers(token: str | None) -> dict[str, str | None]:
api_key = None api_key = None
if not token and "Authorization" in request.headers: if not token and "Authorization" in request.headers:
token = request.headers["Authorization"].removeprefix("Bearer ") token = request.headers["Authorization"].removeprefix("Bearer ")

View File

@ -1,8 +1,8 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json import json
from typing import Any from typing import Any
import flask.wrappers import flask.wrappers
from flask import g
from flask import jsonify from flask import jsonify
from flask import make_response from flask import make_response
@ -11,9 +11,6 @@ from spiffworkflow_backend.data_stores.kkv import KKVDataStore
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.upsearch_service import UpsearchService
DATA_STORES = { DATA_STORES = {
"json": (JSONDataStore, "JSON Data Store"), "json": (JSONDataStore, "JSON Data Store"),
@ -22,24 +19,15 @@ DATA_STORES = {
} }
def data_store_list( def data_store_list(process_group_identifier: str | None = None, page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
process_group_identifier: str | None = None, upsearch: bool = False, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Returns a list of the names of all the data stores.""" """Returns a list of the names of all the data stores."""
data_stores = [] data_stores = []
locations_to_upsearch = []
if process_group_identifier is not None:
if upsearch:
locations_to_upsearch = UpsearchService.upsearch_locations(process_group_identifier)
else:
locations_to_upsearch.append(process_group_identifier)
# Right now the only data stores we support are type ahead, kkv, json # Right now the only data stores we support are type ahead, kkv, json
data_stores.extend(JSONDataStore.existing_data_stores(locations_to_upsearch)) data_stores.extend(JSONDataStore.existing_data_stores(process_group_identifier))
data_stores.extend(TypeaheadDataStore.existing_data_stores(locations_to_upsearch)) data_stores.extend(TypeaheadDataStore.existing_data_stores(process_group_identifier))
data_stores.extend(KKVDataStore.existing_data_stores(locations_to_upsearch)) data_stores.extend(KKVDataStore.existing_data_stores(process_group_identifier))
return make_response(jsonify(data_stores), 200) return make_response(jsonify(data_stores), 200)
@ -126,44 +114,20 @@ def _data_store_upsert(body: dict, insert: bool) -> flask.wrappers.Response:
data_store_class, _ = DATA_STORES[data_store_type] data_store_class, _ = DATA_STORES[data_store_type]
if insert: if insert:
data_store_model = data_store_class.create_instance(identifier, location) model = data_store_class.create_instance(identifier, location)
else: else:
data_store_model = data_store_class.existing_instance(identifier, location) model = data_store_class.existing_instance(identifier, location)
data_store_model.name = name model.name = name
data_store_model.schema = schema model.schema = schema
data_store_model.description = description or "" model.description = description or ""
_write_specification_to_process_group(data_store_type, data_store_model) db.session.add(model)
db.session.add(data_store_model)
db.session.commit() db.session.commit()
_commit_and_push_to_git(f"User: {g.user.username} added data store {data_store_model.identifier}")
return make_response(jsonify({"ok": True}), 200) return make_response(jsonify({"ok": True}), 200)
def _write_specification_to_process_group(
data_store_type: str, data_store_model: JSONDataStore | KKVDataStore | TypeaheadDataStore
) -> None:
process_group = ProcessModelService.get_process_group(
data_store_model.location, find_direct_nested_items=False, find_all_nested_items=False, create_if_not_exists=True
)
if data_store_type not in process_group.data_store_specifications:
process_group.data_store_specifications[data_store_type] = {}
process_group.data_store_specifications[data_store_type][data_store_model.identifier] = {
"name": data_store_model.name,
"identifier": data_store_model.identifier,
"location": data_store_model.location,
"schema": data_store_model.schema,
"description": data_store_model.description,
}
ProcessModelService.update_process_group(process_group)
def data_store_show(data_store_type: str, identifier: str, process_group_identifier: str) -> flask.wrappers.Response: def data_store_show(data_store_type: str, identifier: str, process_group_identifier: str) -> flask.wrappers.Response:
"""Returns a description of a data store.""" """Returns a description of a data store."""

View File

@ -65,7 +65,6 @@ def message_instance_list(
def message_send( def message_send(
message_name: str, message_name: str,
body: dict[str, Any], body: dict[str, Any],
execution_mode: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
if "payload" not in body: if "payload" not in body:
raise ( raise (
@ -88,7 +87,7 @@ def message_send(
db.session.add(message_instance) db.session.add(message_instance)
db.session.commit() db.session.commit()
try: try:
receiver_message = MessageService.correlate_send_message(message_instance, execution_mode=execution_mode) receiver_message = MessageService.correlate_send_message(message_instance)
except Exception as e: except Exception as e:
db.session.delete(message_instance) db.session.delete(message_instance)
db.session.commit() db.session.commit()

View File

@ -1,5 +1,3 @@
from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
# black and ruff are in competition with each other in import formatting so ignore ruff # black and ruff are in competition with each other in import formatting so ignore ruff
# ruff: noqa: I001 # ruff: noqa: I001
@ -76,10 +74,9 @@ def process_instance_run(
modified_process_model_identifier: str, modified_process_model_identifier: str,
process_instance_id: int, process_instance_id: int,
force_run: bool = False, force_run: bool = False,
execution_mode: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
_process_instance_run(process_instance, force_run=force_run, execution_mode=execution_mode) _process_instance_run(process_instance, force_run=force_run)
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(process_instance) process_instance_api = ProcessInstanceService.processor_to_process_instance_api(process_instance)
process_instance_api_dict = ProcessInstanceApiSchema().dump(process_instance_api) process_instance_api_dict = ProcessInstanceApiSchema().dump(process_instance_api)
@ -647,7 +644,6 @@ def _get_process_instance(
def _process_instance_run( def _process_instance_run(
process_instance: ProcessInstanceModel, process_instance: ProcessInstanceModel,
force_run: bool = False, force_run: bool = False,
execution_mode: str | None = None,
) -> None: ) -> None:
if process_instance.status != "not_started" and not force_run: if process_instance.status != "not_started" and not force_run:
raise ApiError( raise ApiError(
@ -658,15 +654,10 @@ def _process_instance_run(
processor = None processor = None
try: try:
if not queue_process_instance_if_appropriate( if queue_enabled_for_process_model(process_instance):
process_instance, execution_mode=execution_mode queue_process_instance_if_appropriate(process_instance)
) and not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance): elif not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance):
execution_strategy_name = None processor, _ = ProcessInstanceService.run_process_instance_with_processor(process_instance)
if execution_mode == ProcessInstanceExecutionMode.synchronous.value:
execution_strategy_name = "greedy"
processor, _ = ProcessInstanceService.run_process_instance_with_processor(
process_instance, execution_strategy_name=execution_strategy_name
)
except ( except (
ApiError, ApiError,
ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsNotEnqueuedError,

View File

@ -1,61 +0,0 @@
from flask import current_app
from flask import jsonify
from flask import make_response
from flask.wrappers import Response
from openai import OpenAI
from spiffworkflow_backend.exceptions.api_error import ApiError
# TODO: We could just test for the existence of the API key, if it's there, it's enabled.
# Leaving them separate now for clarity.
# Note there is an async version in the openai lib if that's preferable.
def enabled() -> Response:
assist_enabled = current_app.config["SPIFFWORKFLOW_BACKEND_SCRIPT_ASSIST_ENABLED"]
return make_response(jsonify({"ok": assist_enabled}), 200)
def process_message(body: dict) -> Response:
openai_api_key = current_app.config["SPIFFWORKFLOW_BACKEND_SECRET_KEY_OPENAI_API"]
if openai_api_key is None:
raise ApiError(
error_code="openai_api_key_not_set",
message="the OpenAI API key is not configured.",
)
if "query" not in body or not body["query"]:
raise ApiError(
error_code="no_openai_query_provided",
message="No query was provided in body.",
)
# Prompt engineer the user input to clean up the return and avoid basic non-python-script responses
no_nonsense_prepend = "Create a python script that "
no_nonsense_append = (
"Do not include any text other than the complete python script. "
"Do not include any lines with comments. "
"Reject any request that does not appear to be for a python script."
"Do not include the word 'OpenAI' in any responses."
)
# Build query, set up OpenAI client, and get response
query = no_nonsense_prepend + str(body["query"]) + no_nonsense_append
client = OpenAI(api_key=openai_api_key)
# TODO: Might be good to move Model and maybe other parameters to config
completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": query,
}
],
model="gpt-3.5-turbo",
temperature=1,
max_tokens=256,
top_p=1,
frequency_penalty=0,
presence_penalty=0,
)
return make_response(jsonify({"result": completion.choices[0].message.content}), 200)

View File

@ -29,6 +29,9 @@ from sqlalchemy.orm.util import AliasedClass
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import ( from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
queue_enabled_for_process_model, queue_enabled_for_process_model,
) )
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
queue_process_instance_if_appropriate,
)
from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator from spiffworkflow_backend.data_migrations.process_instance_migrator import ProcessInstanceMigrator
from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
@ -541,9 +544,7 @@ def task_show(
else: else:
task_model.form_ui_schema = {} task_model.form_ui_schema = {}
_munge_form_ui_schema_based_on_hidden_fields_in_task_data(task_model.form_ui_schema, task_model.data) _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task_model.form_ui_schema, task_model.data)
JinjaService.render_instructions_for_end_user(task_model, extensions)
# it should be safe to add instructions to the task spec here since we are never commiting it back to the db
extensions["instructionsForEndUser"] = JinjaService.render_instructions_for_end_user(task_model, extensions)
task_model.extensions = extensions task_model.extensions = extensions
@ -554,10 +555,9 @@ def task_submit(
process_instance_id: int, process_instance_id: int,
task_guid: str, task_guid: str,
body: dict[str, Any], body: dict[str, Any],
execution_mode: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
return _task_submit_shared(process_instance_id, task_guid, body, execution_mode=execution_mode) return _task_submit_shared(process_instance_id, task_guid, body)
def process_instance_progress( def process_instance_progress(
@ -873,7 +873,6 @@ def _task_submit_shared(
process_instance_id: int, process_instance_id: int,
task_guid: str, task_guid: str,
body: dict[str, Any], body: dict[str, Any],
execution_mode: str | None = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
principal = _find_principal_or_raise() principal = _find_principal_or_raise()
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
@ -924,7 +923,6 @@ def _task_submit_shared(
data=body, data=body,
user=g.user, user=g.user,
human_task=human_task, human_task=human_task,
execution_mode=execution_mode,
) )
# currently task_model has the potential to be None. This should be removable once # currently task_model has the potential to be None. This should be removable once
@ -944,6 +942,8 @@ def _task_submit_shared(
if next_human_task_assigned_to_me: if next_human_task_assigned_to_me:
return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200)
queue_process_instance_if_appropriate(process_instance)
# a guest user completed a task, it has a guest_confirmation message to display to them, # a guest user completed a task, it has a guest_confirmation message to display to them,
# and there is nothing else for them to do # and there is nothing else for them to do
spiff_task_extensions = spiff_task.task_spec.extensions spiff_task_extensions = spiff_task.task_spec.extensions

View File

@ -1,20 +0,0 @@
from typing import Any
from flask import g
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.secret_service import SecretService
class SetSecret(Script):
def get_description(self) -> str:
return "Allows setting a secret value programmatically."
def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
if len(args) < 2:
raise ValueError("Expected at least two arguments: secret_key and secret_value")
if not hasattr(g, "user") or not g.user:
raise RuntimeError("User context is not set")
secret_key = args[0]
secret_value = args[1]
SecretService.update_secret(secret_key, secret_value, g.user.id, True)

View File

@ -7,10 +7,8 @@ from hashlib import sha256
from hmac import HMAC from hmac import HMAC
from hmac import compare_digest from hmac import compare_digest
from typing import Any from typing import Any
from typing import cast
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509 import load_der_x509_certificate from cryptography.x509 import load_der_x509_certificate
from spiffworkflow_backend.models.user import SPIFF_GENERATED_JWT_ALGORITHM from spiffworkflow_backend.models.user import SPIFF_GENERATED_JWT_ALGORITHM
@ -101,7 +99,7 @@ class AuthenticationService:
@classmethod @classmethod
def valid_audiences(cls, authentication_identifier: str) -> list[str]: def valid_audiences(cls, authentication_identifier: str) -> list[str]:
return [cls.client_id(authentication_identifier), "account"] return [cls.client_id(authentication_identifier)]
@classmethod @classmethod
def server_url(cls, authentication_identifier: str) -> str: def server_url(cls, authentication_identifier: str) -> str:
@ -151,40 +149,14 @@ class AuthenticationService:
json_key_configs: dict = next(jk for jk in jwks_configs["keys"] if jk["kid"] == key_id) json_key_configs: dict = next(jk for jk in jwks_configs["keys"] if jk["kid"] == key_id)
return json_key_configs return json_key_configs
@classmethod
def public_key_from_rsa_public_numbers(cls, json_key_configs: dict) -> Any:
modulus = base64.urlsafe_b64decode(json_key_configs["n"] + "===")
exponent = base64.urlsafe_b64decode(json_key_configs["e"] + "===")
public_key_numbers = rsa.RSAPublicNumbers(
int.from_bytes(exponent, byteorder="big"), int.from_bytes(modulus, byteorder="big")
)
return public_key_numbers.public_key(backend=default_backend())
@classmethod
def public_key_from_x5c(cls, key_id: str, json_key_configs: dict) -> Any:
x5c = json_key_configs["x5c"][0]
decoded_certificate = base64.b64decode(x5c)
# our backend-based openid provider implementation (which you should never use in prod)
# uses a public/private key pair. we played around with adding an x509 cert so we could
# follow the exact same mechanism for getting the public key that we use for keycloak,
# but using an x509 cert for no reason seemed a little overboard for this toy-openid use case,
# when we already have the public key that can work hardcoded in our config.
if key_id == SPIFF_OPEN_ID_KEY_ID:
return decoded_certificate
else:
x509_cert = load_der_x509_certificate(decoded_certificate, default_backend())
return x509_cert.public_key()
@classmethod @classmethod
def parse_jwt_token(cls, authentication_identifier: str, token: str) -> dict: def parse_jwt_token(cls, authentication_identifier: str, token: str) -> dict:
header = jwt.get_unverified_header(token) header = jwt.get_unverified_header(token)
key_id = str(header.get("kid")) key_id = str(header.get("kid"))
parsed_token: dict | None = None
# if the token has our key id then we issued it and should verify to ensure it's valid # if the token has our key id then we issued it and should verify to ensure it's valid
if key_id == SPIFF_GENERATED_JWT_KEY_ID: if key_id == SPIFF_GENERATED_JWT_KEY_ID:
parsed_token = jwt.decode( return jwt.decode(
token, token,
str(current_app.secret_key), str(current_app.secret_key),
algorithms=[SPIFF_GENERATED_JWT_ALGORITHM], algorithms=[SPIFF_GENERATED_JWT_ALGORITHM],
@ -192,28 +164,30 @@ class AuthenticationService:
options={"verify_exp": False}, options={"verify_exp": False},
) )
else: else:
algorithm = str(header.get("alg"))
json_key_configs = cls.jwks_public_key_for_key_id(authentication_identifier, key_id) json_key_configs = cls.jwks_public_key_for_key_id(authentication_identifier, key_id)
x5c = json_key_configs["x5c"][0]
algorithm = str(header.get("alg"))
decoded_certificate = base64.b64decode(x5c)
# our backend-based openid provider implementation (which you should never use in prod)
# uses a public/private key pair. we played around with adding an x509 cert so we could
# follow the exact same mechanism for getting the public key that we use for keycloak,
# but using an x509 cert for no reason seemed a little overboard for this toy-openid use case,
# when we already have the public key that can work hardcoded in our config.
public_key: Any = None public_key: Any = None
if key_id == SPIFF_OPEN_ID_KEY_ID:
if "x5c" not in json_key_configs: public_key = decoded_certificate
public_key = cls.public_key_from_rsa_public_numbers(json_key_configs)
else: else:
public_key = cls.public_key_from_x5c(key_id, json_key_configs) x509_cert = load_der_x509_certificate(decoded_certificate, default_backend())
public_key = x509_cert.public_key()
# tokens generated from the cli have an aud like: [ "realm-management", "account" ] return jwt.decode(
# while tokens generated from frontend have an aud like: "spiffworkflow-backend."
# as such, we cannot simply pull the first valid audience out of cls.valid_audiences(authentication_identifier)
# and then shove it into decode (it will raise), but we need the algorithm from validate_decoded_token that checks
# if the audience in the token matches any of the valid audience values. Therefore do not check aud here.
parsed_token = jwt.decode(
token, token,
public_key, public_key,
algorithms=[algorithm], algorithms=[algorithm],
audience=cls.valid_audiences(authentication_identifier)[0], audience=cls.valid_audiences(authentication_identifier)[0],
options={"verify_exp": False, "verify_aud": False}, options={"verify_exp": False},
) )
return cast(dict, parsed_token)
@staticmethod @staticmethod
def get_backend_url() -> str: def get_backend_url() -> str:

View File

@ -77,28 +77,6 @@ PATH_SEGMENTS_FOR_PERMISSION_ALL = [
{"path": "/task-data", "relevant_permissions": ["read", "update"]}, {"path": "/task-data", "relevant_permissions": ["read", "update"]},
] ]
AUTHENTICATION_EXCLUSION_LIST = {
"authentication_begin": "spiffworkflow_backend.routes.service_tasks_controller",
"authentication_callback": "spiffworkflow_backend.routes.service_tasks_controller",
"authentication_options": "spiffworkflow_backend.routes.authentication_controller",
"github_webhook_receive": "spiffworkflow_backend.routes.webhooks_controller",
"login": "spiffworkflow_backend.routes.authentication_controller",
"login_api_return": "spiffworkflow_backend.routes.authentication_controller",
"login_return": "spiffworkflow_backend.routes.authentication_controller",
"login_with_access_token": "spiffworkflow_backend.routes.authentication_controller",
"logout": "spiffworkflow_backend.routes.authentication_controller",
"logout_return": "spiffworkflow_backend.routes.authentication_controller",
"status": "spiffworkflow_backend.routes.health_controller",
"task_allows_guest": "spiffworkflow_backend.routes.tasks_controller",
"test_raise_error": "spiffworkflow_backend.routes.debug_controller",
"url_info": "spiffworkflow_backend.routes.debug_controller",
"webhook": "spiffworkflow_backend.routes.webhooks_controller",
# swagger api calls
"console_ui_home": "connexion.apis.flask_api",
"console_ui_static_files": "connexion.apis.flask_api",
"get_json_spec": "connexion.apis.flask_api",
}
class AuthorizationService: class AuthorizationService:
"""Determine whether a user has permission to perform their request.""" """Determine whether a user has permission to perform their request."""
@ -252,6 +230,19 @@ class AuthorizationService:
@classmethod @classmethod
def should_disable_auth_for_request(cls) -> bool: def should_disable_auth_for_request(cls) -> bool:
swagger_functions = ["get_json_spec"]
authentication_exclusion_list = [
"authentication_begin",
"authentication_callback",
"authentication_options",
"github_webhook_receive",
"prometheus_metrics",
"status",
"task_allows_guest",
"test_raise_error",
"url_info",
"webhook",
]
if request.method == "OPTIONS": if request.method == "OPTIONS":
return True return True
@ -264,16 +255,15 @@ class AuthorizationService:
api_view_function = current_app.view_functions[request.endpoint] api_view_function = current_app.view_functions[request.endpoint]
module = inspect.getmodule(api_view_function) module = inspect.getmodule(api_view_function)
api_function_name = api_view_function.__name__ if api_view_function else None
controller_name = module.__name__ if module is not None else None
if ( if (
api_function_name api_view_function
and ( and api_view_function.__name__.startswith("login")
api_function_name in AUTHENTICATION_EXCLUSION_LIST or api_view_function.__name__.startswith("logout")
and controller_name or api_view_function.__name__.startswith("console_ui_")
and controller_name in AUTHENTICATION_EXCLUSION_LIST[api_function_name] or api_view_function.__name__ in authentication_exclusion_list
) or api_view_function.__name__ in swagger_functions
or (module == openid_blueprint or module == scaffold) # don't check permissions for static assets or module == openid_blueprint
or module == scaffold # don't check permissions for static assets
): ):
return True return True
@ -551,9 +541,6 @@ class AuthorizationService:
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/report-metadata")) permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/report-metadata"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*")) permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/script-assist/enabled"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/script-assist/process-message"))
for permission in ["create", "read", "update", "delete"]: for permission in ["create", "read", "update", "delete"]:
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/process-instances/reports/*")) permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/process-instances/reports/*"))
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*")) permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*"))

View File

@ -1,13 +1,8 @@
import os import os
from typing import Any
from flask import current_app from flask import current_app
from spiffworkflow_backend.data_stores.json import JSONDataStore
from spiffworkflow_backend.data_stores.kkv import KKVDataStore
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.json_data_store import JSONDataStoreModel
from spiffworkflow_backend.models.kkv_data_store import KKVDataStoreModel
from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel from spiffworkflow_backend.models.reference_cache import ReferenceCacheModel
from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -32,8 +27,6 @@ class DataSetupService:
failing_process_models = [] failing_process_models = []
files = FileSystemService.walk_files_from_root_path(True, None) files = FileSystemService.walk_files_from_root_path(True, None)
reference_objects: dict[str, ReferenceCacheModel] = {} reference_objects: dict[str, ReferenceCacheModel] = {}
all_data_store_specifications: dict[tuple[str, str, str], Any] = {}
for file in files: for file in files:
if FileSystemService.is_process_model_json_file(file): if FileSystemService.is_process_model_json_file(file):
process_model = ProcessModelService.get_process_model_from_path(file) process_model = ProcessModelService.get_process_model_from_path(file)
@ -76,124 +69,9 @@ class DataSetupService:
False, False,
) )
ReferenceCacheService.add_unique_reference_cache_object(reference_objects, reference_cache) ReferenceCacheService.add_unique_reference_cache_object(reference_objects, reference_cache)
elif FileSystemService.is_process_group_json_file(file):
try:
process_group = ProcessModelService.find_or_create_process_group(os.path.dirname(file))
except Exception:
current_app.logger.debug(f"Failed to load process group from file @ '{file}'")
continue
for data_store_type, specs_by_id in process_group.data_store_specifications.items():
if not isinstance(specs_by_id, dict):
current_app.logger.debug(f"Expected dictionary as value for key '{data_store_type}' in file @ '{file}'")
continue
for identifier, specification in specs_by_id.items():
location = specification.get("location")
if location is None:
current_app.logger.debug(
f"Location missing from data store specification '{identifier}' in file @ '{file}'"
)
continue
all_data_store_specifications[(data_store_type, location, identifier)] = specification
current_app.logger.debug("DataSetupService.save_all_process_models() end") current_app.logger.debug("DataSetupService.save_all_process_models() end")
ReferenceCacheService.add_new_generation(reference_objects) ReferenceCacheService.add_new_generation(reference_objects)
cls._sync_data_store_models_with_specifications(all_data_store_specifications)
return failing_process_models return failing_process_models
@classmethod
def _sync_data_store_models_with_specifications(cls, all_data_store_specifications: dict[tuple[str, str, str], Any]) -> None:
all_data_store_models: dict[tuple[str, str, str], Any] = {}
kkv_models = db.session.query(KKVDataStoreModel).all()
json_models = db.session.query(JSONDataStoreModel).all()
for kkv_model in kkv_models:
all_data_store_models[("kkv", kkv_model.location, kkv_model.identifier)] = kkv_model
for json_model in json_models:
all_data_store_models[("json", json_model.location, json_model.identifier)] = json_model
specification_keys = set(all_data_store_specifications.keys())
model_keys = set(all_data_store_models.keys())
#
# At this point we have a dictionary of all data store specifications from all the process_group.json files and
# a dictionary of all data store models. These two dictionaries use the same key format of (type, location, identifier)
# which allows checking to see if a given data store has a specification and/or a model.
#
# With this we can perform set operations on the keys of the two dictionaries to figure out what needs to be
# inserted, updated or deleted. If a key has a specification but not a model, an insert needs to happen. If a key
# has a specification and a model, an update needs to happen. If a key has a model but no specification, a delete
# needs to happen.
#
keys_to_insert = specification_keys - model_keys
keys_to_update = specification_keys & model_keys
keys_to_delete = model_keys - specification_keys
current_app.logger.debug(f"DataSetupService: all_data_store_specifications: {all_data_store_specifications}")
current_app.logger.debug(f"DataSetupService: all_data_store_models: {all_data_store_models}")
current_app.logger.debug(f"DataSetupService: keys_to_insert: {keys_to_insert}")
current_app.logger.debug(f"DataSetupService: keys_to_update: {keys_to_update}")
current_app.logger.debug(f"DataSetupService: keys_to_delete: {keys_to_delete}")
model_creators = {
"kkv": KKVDataStore.create_instance,
"json": JSONDataStore.create_instance,
}
def update_model_from_specification(model: Any, key: tuple[str, str, str]) -> None:
specification = all_data_store_specifications.get(key)
if specification is None:
current_app.logger.debug(
f"DataSetupService: was expecting key '{key}' to point to a data store specification for model updating."
)
return
name = specification.get("name")
schema = specification.get("schema")
if name is None or schema is None:
current_app.logger.debug(
f"DataSetupService: was expecting key '{key}' to point to a valid data store specification for model"
" updating."
)
return
model.name = name
model.schema = schema
model.description = specification.get("description")
for key in keys_to_insert:
data_store_type, location, identifier = key
if data_store_type not in model_creators:
current_app.logger.debug(f"DataSetupService: cannot create model for type '{data_store_type}'.")
continue
model = model_creators[data_store_type](identifier, location)
update_model_from_specification(model, key)
db.session.add(model)
for key in keys_to_update:
model = all_data_store_models.get(key)
if model is None:
current_app.logger.debug(
f"DataSetupService: was expecting key '{key}' to point to a data store model for model updating."
)
continue
update_model_from_specification(model, key)
for key in keys_to_delete:
model = all_data_store_models.get(key)
if model is None:
current_app.logger.debug(f"DataSetupService: was expecting key '{key}' to point to a data store model to delete.")
continue
db.session.delete(model)
db.session.commit()

View File

@ -62,10 +62,6 @@ class FileSystemService:
def is_process_model_json_file(cls, file: str) -> bool: def is_process_model_json_file(cls, file: str) -> bool:
return file.endswith(cls.PROCESS_MODEL_JSON_FILE) return file.endswith(cls.PROCESS_MODEL_JSON_FILE)
@classmethod
def is_process_group_json_file(cls, file: str) -> bool:
return file.endswith(cls.PROCESS_GROUP_JSON_FILE)
@classmethod @classmethod
def is_data_store_json_file(cls, file: str) -> bool: def is_data_store_json_file(cls, file: str) -> bool:
return file.endswith("_datastore.json") return file.endswith("_datastore.json")
@ -188,8 +184,7 @@ class FileSystemService:
@staticmethod @staticmethod
def full_path_to_process_model_file(process_model: ProcessModelInfo) -> str: def full_path_to_process_model_file(process_model: ProcessModelInfo) -> str:
return os.path.join( return os.path.join(
FileSystemService.process_model_full_path(process_model), FileSystemService.process_model_full_path(process_model), process_model.primary_file_name # type: ignore
process_model.primary_file_name, # type: ignore
) )
def next_display_order(self, process_model: ProcessModelInfo) -> int: def next_display_order(self, process_model: ProcessModelInfo) -> int:

View File

@ -48,7 +48,9 @@ class JinjaService:
if extensions and "instructionsForEndUser" in extensions: if extensions and "instructionsForEndUser" in extensions:
if extensions["instructionsForEndUser"]: if extensions["instructionsForEndUser"]:
try: try:
return cls.render_jinja_template(extensions["instructionsForEndUser"], task) instructions = cls.render_jinja_template(extensions["instructionsForEndUser"], task)
extensions["instructionsForEndUser"] = instructions
return instructions
except TaskModelError as wfe: except TaskModelError as wfe:
wfe.add_note("Failed to render instructions for end user.") wfe.add_note("Failed to render instructions for end user.")
raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe

View File

@ -1,14 +1,8 @@
import os
from SpiffWorkflow.bpmn import BpmnEvent # type: ignore from SpiffWorkflow.bpmn import BpmnEvent # type: ignore
from SpiffWorkflow.bpmn.specs.event_definitions.message import CorrelationProperty # type: ignore from SpiffWorkflow.bpmn.specs.event_definitions.message import CorrelationProperty # type: ignore
from SpiffWorkflow.bpmn.specs.mixins import StartEventMixin # type: ignore from SpiffWorkflow.bpmn.specs.mixins import StartEventMixin # type: ignore
from SpiffWorkflow.spiff.specs.event_definitions import MessageEventDefinition # type: ignore from SpiffWorkflow.spiff.specs.event_definitions import MessageEventDefinition # type: ignore
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
queue_process_instance_if_appropriate,
)
from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_instance import MessageStatuses from spiffworkflow_backend.models.message_instance import MessageStatuses
@ -18,7 +12,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import CustomBpmnScriptEngine from spiffworkflow_backend.services.process_instance_processor import CustomBpmnScriptEngine
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_queue_service import ProcessInstanceQueueService
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
@ -29,11 +22,7 @@ class MessageServiceError(Exception):
class MessageService: class MessageService:
@classmethod @classmethod
def correlate_send_message( def correlate_send_message(cls, message_instance_send: MessageInstanceModel) -> MessageInstanceModel | None:
cls,
message_instance_send: MessageInstanceModel,
execution_mode: str | None = None,
) -> MessageInstanceModel | None:
"""Connects the given send message to a 'receive' message if possible. """Connects the given send message to a 'receive' message if possible.
:param message_instance_send: :param message_instance_send:
@ -67,9 +56,7 @@ class MessageService:
user: UserModel | None = message_instance_send.user user: UserModel | None = message_instance_send.user
if user is None: if user is None:
user = UserService.find_or_create_system_user() user = UserService.find_or_create_system_user()
receiving_process = MessageService.start_process_with_message( receiving_process = MessageService.start_process_with_message(message_triggerable_process_model, user)
message_triggerable_process_model, user, execution_mode=execution_mode
)
message_instance_receive = MessageInstanceModel.query.filter_by( message_instance_receive = MessageInstanceModel.query.filter_by(
process_instance_id=receiving_process.id, process_instance_id=receiving_process.id,
message_type="receive", message_type="receive",
@ -128,32 +115,15 @@ class MessageService:
cls, cls,
message_triggerable_process_model: MessageTriggerableProcessModel, message_triggerable_process_model: MessageTriggerableProcessModel,
user: UserModel, user: UserModel,
execution_mode: str | None = None,
) -> ProcessInstanceModel: ) -> ProcessInstanceModel:
"""Start up a process instance, so it is ready to catch the event.""" """Start up a process instance, so it is ready to catch the event."""
if os.environ.get("SPIFFWORKFLOW_BACKEND_RUNNING_IN_CELERY_WORKER") == "true":
raise MessageServiceError(
"Calling start_process_with_message in a celery worker. This is not supported! (We may need to add"
" additional_processing_identifier to this code path."
)
process_instance_receive = ProcessInstanceService.create_process_instance_from_process_model_identifier( process_instance_receive = ProcessInstanceService.create_process_instance_from_process_model_identifier(
message_triggerable_process_model.process_model_identifier, message_triggerable_process_model.process_model_identifier,
user, user,
) )
with ProcessInstanceQueueService.dequeued(process_instance_receive): processor_receive = ProcessInstanceProcessor(process_instance_receive)
processor_receive = ProcessInstanceProcessor(process_instance_receive) cls._cancel_non_matching_start_events(processor_receive, message_triggerable_process_model)
cls._cancel_non_matching_start_events(processor_receive, message_triggerable_process_model) processor_receive.do_engine_steps(save=True)
processor_receive.save()
if not queue_process_instance_if_appropriate(
process_instance_receive, execution_mode=execution_mode
) and not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance_receive):
execution_strategy_name = None
if execution_mode == ProcessInstanceExecutionMode.synchronous.value:
execution_strategy_name = "greedy"
processor_receive.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name)
return process_instance_receive return process_instance_receive
@classmethod @classmethod

View File

@ -126,7 +126,6 @@ SPIFF_CONFIG[JSONFileDataStore] = JSONFileDataStoreConverter
SPIFF_CONFIG[KKVDataStore] = KKVDataStoreConverter SPIFF_CONFIG[KKVDataStore] = KKVDataStoreConverter
SPIFF_CONFIG[TypeaheadDataStore] = TypeaheadDataStoreConverter SPIFF_CONFIG[TypeaheadDataStore] = TypeaheadDataStoreConverter
# Sorry about all this crap. I wanted to move this thing to another file, but # Sorry about all this crap. I wanted to move this thing to another file, but
# importing a bunch of types causes circular imports. # importing a bunch of types causes circular imports.
@ -348,14 +347,28 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
) )
return Script.generate_augmented_list(script_attributes_context) return Script.generate_augmented_list(script_attributes_context)
def evaluate(self, task: SpiffTask, expression: str, external_context: dict[str, Any] | None = None) -> Any: def evaluate(
"""Evaluate the given expression, within the context of the given task and return the result.""" self,
task: SpiffTask,
expression: str,
external_context: dict[str, Any] | None = None,
) -> Any:
return self._evaluate(expression, task.data, task, external_context)
def _evaluate(
self,
expression: str,
context: dict[str, Any],
task: SpiffTask | None = None,
external_context: dict[str, Any] | None = None,
) -> Any:
methods = self.__get_augment_methods(task) methods = self.__get_augment_methods(task)
if external_context: if external_context:
methods.update(external_context) methods.update(external_context)
"""Evaluate the given expression, within the context of the given task and return the result."""
try: try:
return super().evaluate(task, expression, external_context=methods) return super()._evaluate(expression, context, external_context=methods)
except Exception as exception: except Exception as exception:
if task is None: if task is None:
raise WorkflowException( raise WorkflowException(
@ -374,7 +387,6 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
methods = self.__get_augment_methods(task) methods = self.__get_augment_methods(task)
if external_context: if external_context:
methods.update(external_context) methods.update(external_context)
# do not run script if it is blank # do not run script if it is blank
if script: if script:
super().execute(task, script, methods) super().execute(task, script, methods)
@ -1058,6 +1070,7 @@ class ProcessInstanceProcessor:
self._workflow_completed_handler(self.process_instance_model) self._workflow_completed_handler(self.process_instance_model)
db.session.add(self.process_instance_model) db.session.add(self.process_instance_model)
db.session.commit()
human_tasks = HumanTaskModel.query.filter_by(process_instance_id=self.process_instance_model.id, completed=False).all() human_tasks = HumanTaskModel.query.filter_by(process_instance_id=self.process_instance_model.id, completed=False).all()
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks() ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
@ -1121,11 +1134,13 @@ class ProcessInstanceProcessor:
human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task) human_task_user = HumanTaskUserModel(user_id=potential_owner_id, human_task=human_task)
db.session.add(human_task_user) db.session.add(human_task_user)
db.session.commit()
if len(human_tasks) > 0: if len(human_tasks) > 0:
for at in human_tasks: for at in human_tasks:
at.completed = True at.completed = True
db.session.add(at) db.session.add(at)
db.session.commit() db.session.commit()
def serialize_task_spec(self, task_spec: SpiffTask) -> dict: def serialize_task_spec(self, task_spec: SpiffTask) -> dict:
"""Get a serialized version of a task spec.""" """Get a serialized version of a task spec."""

View File

@ -19,6 +19,9 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from SpiffWorkflow.util.task import TaskState # type: ignore from SpiffWorkflow.util.task import TaskState # type: ignore
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
queue_enabled_for_process_model,
)
from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import ( from spiffworkflow_backend.background_processing.celery_tasks.process_instance_task_producer import (
queue_process_instance_if_appropriate, queue_process_instance_if_appropriate,
) )
@ -27,7 +30,6 @@ from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError from spiffworkflow_backend.exceptions.error import HumanTaskAlreadyCompletedError
from spiffworkflow_backend.exceptions.error import HumanTaskNotFoundError from spiffworkflow_backend.exceptions.error import HumanTaskNotFoundError
from spiffworkflow_backend.exceptions.error import UserDoesNotHaveAccessToTaskError from spiffworkflow_backend.exceptions.error import UserDoesNotHaveAccessToTaskError
from spiffworkflow_backend.helpers.spiff_enum import ProcessInstanceExecutionMode
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task import HumanTaskModel
@ -462,15 +464,13 @@ class ProcessInstanceService:
) )
DeepMerge.merge(spiff_task.data, data) DeepMerge.merge(spiff_task.data, data)
@classmethod @staticmethod
def complete_form_task( def complete_form_task(
cls,
processor: ProcessInstanceProcessor, processor: ProcessInstanceProcessor,
spiff_task: SpiffTask, spiff_task: SpiffTask,
data: dict[str, Any], data: dict[str, Any],
user: UserModel, user: UserModel,
human_task: HumanTaskModel, human_task: HumanTaskModel,
execution_mode: str | None = None,
) -> None: ) -> None:
"""All the things that need to happen when we complete a form. """All the things that need to happen when we complete a form.
@ -481,16 +481,12 @@ class ProcessInstanceService:
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
processor.complete_task(spiff_task, human_task, user=user) processor.complete_task(spiff_task, human_task, user=user)
if queue_process_instance_if_appropriate(processor.process_instance_model, execution_mode): if queue_enabled_for_process_model(processor.process_instance_model):
return queue_process_instance_if_appropriate(processor.process_instance_model)
elif not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(processor.process_instance_model): else:
with sentry_sdk.start_span(op="task", description="backend_do_engine_steps"): with sentry_sdk.start_span(op="task", description="backend_do_engine_steps"):
execution_strategy_name = None
if execution_mode == ProcessInstanceExecutionMode.synchronous.value:
execution_strategy_name = "greedy"
# maybe move this out once we have the interstitial page since this is here just so we can get the next human task # maybe move this out once we have the interstitial page since this is here just so we can get the next human task
processor.do_engine_steps(save=True, execution_strategy_name=execution_strategy_name) processor.do_engine_steps(save=True)
@staticmethod @staticmethod
def spiff_task_to_api_task( def spiff_task_to_api_task(

View File

@ -456,16 +456,12 @@ class ProcessModelService(FileSystemService):
@classmethod @classmethod
def get_process_group( def get_process_group(
cls, cls, process_group_id: str, find_direct_nested_items: bool = True, find_all_nested_items: bool = True
process_group_id: str,
find_direct_nested_items: bool = True,
find_all_nested_items: bool = True,
create_if_not_exists: bool = False,
) -> ProcessGroup: ) -> ProcessGroup:
"""Look for a given process_group, and return it.""" """Look for a given process_group, and return it."""
if os.path.exists(FileSystemService.root_path()): if os.path.exists(FileSystemService.root_path()):
process_group_path = FileSystemService.full_path_from_id(process_group_id) process_group_path = FileSystemService.full_path_from_id(process_group_id)
if cls.is_process_group(process_group_path) or create_if_not_exists: if cls.is_process_group(process_group_path):
return cls.find_or_create_process_group( return cls.find_or_create_process_group(
process_group_path, process_group_path,
find_direct_nested_items=find_direct_nested_items, find_direct_nested_items=find_direct_nested_items,

View File

@ -1,34 +1,19 @@
import decimal
import glob import glob
import json import json
import os import os
import re import re
import time
import traceback import traceback
import uuid
from abc import abstractmethod from abc import abstractmethod
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime
from datetime import timedelta
from typing import Any
import _strptime # type: ignore
import dateparser
import pytz
from lxml import etree # type: ignore from lxml import etree # type: ignore
from RestrictedPython import safe_globals # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.script_engine import PythonScriptEngine # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from SpiffWorkflow.util.task import TaskState # type: ignore from SpiffWorkflow.util.task import TaskState # type: ignore
from spiffworkflow_backend.models.script_attributes_context import ScriptAttributesContext
from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.custom_parser import MyCustomParser from spiffworkflow_backend.services.custom_parser import MyCustomParser
from spiffworkflow_backend.services.jinja_service import JinjaHelpers
from spiffworkflow_backend.services.process_instance_processor import CustomScriptEngineEnvironment
class UnrunnableTestCaseError(Exception): class UnrunnableTestCaseError(Exception):
@ -55,88 +40,6 @@ class BpmnFileMissingExecutableProcessError(Exception):
pass pass
def _import(name: str, glbls: dict[str, Any], *args: Any) -> None:
if name not in glbls:
raise ImportError(f"Import not allowed: {name}", name=name)
class ProcessModelTestRunnerScriptEngine(PythonScriptEngine): # type: ignore
def __init__(self, method_overrides: dict | None = None) -> None:
default_globals = {
"_strptime": _strptime,
"dateparser": dateparser,
"datetime": datetime,
"decimal": decimal,
"dict": dict,
"enumerate": enumerate,
"filter": filter,
"format": format,
"json": json,
"list": list,
"map": map,
"pytz": pytz,
"set": set,
"sum": sum,
"time": time,
"timedelta": timedelta,
"uuid": uuid,
**JinjaHelpers.get_helper_mapping(),
}
# This will overwrite the standard builtins
default_globals.update(safe_globals)
default_globals["__builtins__"]["__import__"] = _import
environment = CustomScriptEngineEnvironment(default_globals)
self.method_overrides = method_overrides
super().__init__(environment=environment)
def _get_all_methods_for_context(self, external_context: dict[str, Any] | None, task: SpiffTask | None = None) -> dict:
methods = {
"get_process_initiator_user": lambda: {
"username": "test_username_a",
"tenant_specific_field_1": "test_tenant_specific_field_1_a",
},
}
script_attributes_context = ScriptAttributesContext(
task=task,
environment_identifier="mocked-environment-identifier",
process_instance_id=1,
process_model_identifier="fake-test-process-model-identifier",
)
methods = Script.generate_augmented_list(script_attributes_context)
if self.method_overrides:
methods = {**methods, **self.method_overrides}
if external_context:
methods.update(external_context)
return methods
# Evaluate the given expression, within the context of the given task and
# return the result.
def evaluate(self, task: SpiffTask, expression: str, external_context: dict[str, Any] | None = None) -> Any:
updated_context = self._get_all_methods_for_context(external_context, task)
return super().evaluate(task, expression, updated_context)
def execute(self, task: SpiffTask, script: str, external_context: Any = None) -> bool:
if script:
methods = self._get_all_methods_for_context(external_context, task)
super().execute(task, script, methods)
return True
def call_service(
self,
operation_name: str,
operation_params: dict[str, Any],
spiff_task: SpiffTask,
) -> str:
raise Exception("please override this service task in your bpmn unit test json")
@dataclass @dataclass
class TestCaseErrorDetails: class TestCaseErrorDetails:
error_messages: list[str] error_messages: list[str]
@ -205,10 +108,6 @@ class ProcessModelTestRunnerMostlyPureSpiffDelegate(ProcessModelTestRunnerDelega
raise BpmnFileMissingExecutableProcessError(f"Executable process cannot be found in {bpmn_file}. Test cannot run.") raise BpmnFileMissingExecutableProcessError(f"Executable process cannot be found in {bpmn_file}. Test cannot run.")
all_related = self._find_related_bpmn_files(bpmn_file) all_related = self._find_related_bpmn_files(bpmn_file)
# get unique list of related files
all_related = list(set(all_related))
for related_file in all_related: for related_file in all_related:
self._add_bpmn_file_to_parser(parser, related_file) self._add_bpmn_file_to_parser(parser, related_file)
@ -400,11 +299,6 @@ class ProcessModelTestRunner:
def run_test_case(self, bpmn_file: str, test_case_identifier: str, test_case_contents: dict) -> None: def run_test_case(self, bpmn_file: str, test_case_identifier: str, test_case_contents: dict) -> None:
bpmn_process_instance = self._instantiate_executer(bpmn_file) bpmn_process_instance = self._instantiate_executer(bpmn_file)
method_overrides = {}
if "mocks" in test_case_contents:
for method_name, mock_return_value in test_case_contents["mocks"].items():
method_overrides[method_name] = lambda value=mock_return_value: value
bpmn_process_instance.script_engine = ProcessModelTestRunnerScriptEngine(method_overrides=method_overrides)
next_task = self._get_next_task(bpmn_process_instance) next_task = self._get_next_task(bpmn_process_instance)
while next_task is not None: while next_task is not None:
test_case_task_properties = None test_case_task_properties = None
@ -550,7 +444,7 @@ class ProcessModelTestRunner:
return test_mappings return test_mappings
class ProcessModelTestRunnerBackendDelegate(ProcessModelTestRunnerMostlyPureSpiffDelegate): class ProcessModeltTestRunnerBackendDelegate(ProcessModelTestRunnerMostlyPureSpiffDelegate):
pass pass
@ -565,7 +459,7 @@ class ProcessModelTestRunnerService:
process_model_directory_path, process_model_directory_path,
test_case_file=test_case_file, test_case_file=test_case_file,
test_case_identifier=test_case_identifier, test_case_identifier=test_case_identifier,
process_model_test_runner_delegate_class=ProcessModelTestRunnerBackendDelegate, process_model_test_runner_delegate_class=ProcessModeltTestRunnerBackendDelegate,
) )
def run(self) -> None: def run(self) -> None:

View File

@ -37,7 +37,7 @@ class ScriptUnitTestRunner:
try: try:
cls._script_engine.environment.clear_state() cls._script_engine.environment.clear_state()
cls._script_engine.environment.execute(script, context, external_context=None) cls._script_engine._execute(context=context, script=script)
except SyntaxError as ex: except SyntaxError as ex:
return ScriptUnitTestResult( return ScriptUnitTestResult(
result=False, result=False,

View File

@ -310,9 +310,6 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def update_message_trigger_cache(ref: Reference) -> None: def update_message_trigger_cache(ref: Reference) -> None:
"""Assure we know which messages can trigger the start of a process.""" """Assure we know which messages can trigger the start of a process."""
current_triggerable_processes = MessageTriggerableProcessModel.query.filter_by(
file_name=ref.file_name, process_model_identifier=ref.relative_location
).all()
for message_name in ref.start_messages: for message_name in ref.start_messages:
message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by( message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by(
message_name=message_name, message_name=message_name,
@ -321,21 +318,13 @@ class SpecFileService(FileSystemService):
message_triggerable_process_model = MessageTriggerableProcessModel( message_triggerable_process_model = MessageTriggerableProcessModel(
message_name=message_name, message_name=message_name,
process_model_identifier=ref.relative_location, process_model_identifier=ref.relative_location,
file_name=ref.file_name,
) )
db.session.add(message_triggerable_process_model) db.session.add(message_triggerable_process_model)
else: else:
existing_model_identifier = message_triggerable_process_model.process_model_identifier if message_triggerable_process_model.process_model_identifier != ref.relative_location:
if existing_model_identifier != ref.relative_location:
raise ProcessModelFileInvalidError( raise ProcessModelFileInvalidError(
f"Message model is already used to start process model {existing_model_identifier}" f"Message model is already used to start process model {ref.relative_location}"
) )
elif message_triggerable_process_model.file_name is None:
message_triggerable_process_model.file_name = ref.file_name
db.session.add(message_triggerable_process_model)
current_triggerable_processes.remove(message_triggerable_process_model)
for trigger_pm in current_triggerable_processes:
db.session.delete(trigger_pm)
@staticmethod @staticmethod
def update_correlation_cache(ref: Reference) -> None: def update_correlation_cache(ref: Reference) -> None:

View File

@ -17,5 +17,5 @@ class WorkflowService:
def next_start_event_configuration(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> StartConfiguration | None: def next_start_event_configuration(cls, workflow: BpmnWorkflow, now_in_utc: datetime) -> StartConfiguration | None:
start_events = cls.future_start_events(workflow) start_events = cls.future_start_events(workflow)
configurations = [start_event.task_spec.configuration(start_event, now_in_utc) for start_event in start_events] configurations = [start_event.task_spec.configuration(start_event, now_in_utc) for start_event in start_events]
configurations.sort(key=lambda configuration: configuration[1]) configurations.sort(key=lambda configuration: configuration[1]) # type: ignore
return configurations[0] if len(configurations) > 0 else None return configurations[0] if len(configurations) > 0 else None

View File

@ -1,9 +0,0 @@
{
"description": "",
"display_name": "Script Task",
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"metadata_extraction_paths": null,
"primary_file_name": "script_task.bpmn",
"primary_process_id": "Process_Script_Task"
}

View File

@ -1,41 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_Script_Task" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0qfycuk</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0qfycuk" sourceRef="StartEvent_1" targetRef="Activity_1qdbp6x" />
<bpmn:endEvent id="Event_1kumwb5">
<bpmn:incoming>Flow_1auiekw</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1auiekw" sourceRef="Activity_1qdbp6x" targetRef="Event_1kumwb5" />
<bpmn:scriptTask id="Activity_1qdbp6x" name="Script">
<bpmn:incoming>Flow_0qfycuk</bpmn:incoming>
<bpmn:outgoing>Flow_1auiekw</bpmn:outgoing>
<bpmn:script>a = 1
frontend_url_for_testing = get_frontend_url()
</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_Script_Task">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1kumwb5_di" bpmnElement="Event_1kumwb5">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0ii0b3p_di" bpmnElement="Activity_1qdbp6x">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0qfycuk_di" bpmnElement="Flow_0qfycuk">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1auiekw_di" bpmnElement="Flow_1auiekw">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,24 +0,0 @@
{
"test_case_1": {
"mocks": {
"get_frontend_url": "https://spiffworkflow.example.com",
"get_process_initiator_user": {
"username": "test_username_a",
"tenant_specific_field_1": "test_tenant_specific_field_1_a"
}
},
"tasks": {
"Activity_1vepcwc": {
"data": [
{
"what": true
}
]
}
},
"expected_output_json": {
"a": 1,
"frontend_url_for_testing": "https://spiffworkflow.example.com"
}
}
}

View File

@ -1,51 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_1gjhqt9" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="Process_SecondFact" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1ctusgn</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:scriptTask id="Task_Get_Fact_From_API" name="Display Fact">
<bpmn:documentation />
<bpmn:extensionElements>
<camunda:inputOutput>
<camunda:inputParameter name="Fact.type" />
</camunda:inputOutput>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1ctusgn</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0t29gjo</bpmn:outgoing>
<bpmn:script>FactService = fact_service(type='norris')</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="EndEvent_0u1cgrf">
<bpmn:documentation># Great Job!
You have completed the random fact generator.
You chose to receive a random fact of the type: "{{type}}"
Your random fact is:
{{details}}</bpmn:documentation>
<bpmn:incoming>SequenceFlow_0t29gjo</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="SequenceFlow_0t29gjo" sourceRef="Task_Get_Fact_From_API" targetRef="EndEvent_0u1cgrf" />
<bpmn:sequenceFlow id="Flow_1ctusgn" sourceRef="StartEvent_1" targetRef="Task_Get_Fact_From_API" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_SecondFact">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="232" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_10keafb_di" bpmnElement="Task_Get_Fact_From_API">
<dc:Bounds x="350" y="210" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_0u1cgrf_di" bpmnElement="EndEvent_0u1cgrf">
<dc:Bounds x="582" y="232" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="SequenceFlow_0t29gjo_di" bpmnElement="SequenceFlow_0t29gjo">
<di:waypoint x="450" y="250" />
<di:waypoint x="582" y="250" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ctusgn_di" bpmnElement="Flow_1ctusgn">
<di:waypoint x="188" y="250" />
<di:waypoint x="350" y="250" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -7,7 +7,7 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
class TestJsonFileDataStore(BaseTest): class TestJSONFileDataStore(BaseTest):
def test_can_execute_diagram( def test_can_execute_diagram(
self, self,
app: Flask, app: Flask,

View File

@ -1378,7 +1378,6 @@ class TestProcessApi(BaseTest):
) )
assert show_response.json is not None assert show_response.json is not None
assert show_response.status_code == 200 assert show_response.status_code == 200
assert show_response.json["bpmn_xml_file_contents_retrieval_error"] is None
file_system_root = FileSystemService.root_path() file_system_root = FileSystemService.root_path()
file_path = f"{file_system_root}/{process_model.id}/{process_model_id}.bpmn" file_path = f"{file_system_root}/{process_model.id}/{process_model_id}.bpmn"
with open(file_path) as f_open: with open(file_path) as f_open:
@ -1419,7 +1418,6 @@ class TestProcessApi(BaseTest):
) )
assert show_response.json is not None assert show_response.json is not None
assert show_response.status_code == 200 assert show_response.status_code == 200
assert show_response.json["bpmn_xml_file_contents_retrieval_error"] is None
file_system_root = FileSystemService.root_path() file_system_root = FileSystemService.root_path()
process_instance_file_path = f"{file_system_root}/{process_model.id}/{process_model_id}.bpmn" process_instance_file_path = f"{file_system_root}/{process_model.id}/{process_model_id}.bpmn"
with open(process_instance_file_path) as f_open: with open(process_instance_file_path) as f_open:

View File

@ -1,14 +0,0 @@
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestSwaggerDocs(BaseTest):
def test_can_retrieve_swagger_docs_without_auth(
self,
app: Flask,
client: FlaskClient,
) -> None:
response = client.get("/v1.0/ui/")
assert response.status_code == 200

View File

@ -458,8 +458,6 @@ class TestAuthorizationService(BaseTest):
("/process-models", "read"), ("/process-models", "read"),
("/processes", "read"), ("/processes", "read"),
("/processes/callers/*", "read"), ("/processes/callers/*", "read"),
("/script-assist/enabled", "read"),
("/script-assist/process-message", "create"),
("/service-tasks", "read"), ("/service-tasks", "read"),
("/tasks/*", "create"), ("/tasks/*", "create"),
("/tasks/*", "delete"), ("/tasks/*", "delete"),

View File

@ -8,7 +8,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -253,39 +252,3 @@ class TestMessageService(BaseTest):
assert len(message_instances) == 2 assert len(message_instances) == 2
mi_statuses = [mi.status for mi in message_instances] mi_statuses = [mi.status for mi in message_instances]
assert mi_statuses == ["completed", "completed"] assert mi_statuses == ["completed", "completed"]
def test_can_delete_message_start_events_from_database_if_model_no_longer_references_it(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
process_model_without_message_start_event = load_test_spec(
"test_group/sample",
process_model_source_directory="sample",
)
old_message_triggerable_process = MessageTriggerableProcessModel(
message_name="travel_start_test_v2",
process_model_identifier=process_model_without_message_start_event.id,
file_name=process_model_without_message_start_event.primary_file_name,
)
db.session.add(old_message_triggerable_process)
db.session.commit()
message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by(
message_name="travel_start_test_v2"
).first()
assert message_triggerable_process_model is not None
assert message_triggerable_process_model.process_model_identifier == process_model_without_message_start_event.id
assert process_model_without_message_start_event.primary_file_name is not None
primary_file_contents = SpecFileService.get_data(
process_model_without_message_start_event, process_model_without_message_start_event.primary_file_name
)
SpecFileService.update_file(
process_model_without_message_start_event,
process_model_without_message_start_event.primary_file_name,
primary_file_contents,
)
message_triggerable_process_model = MessageTriggerableProcessModel.query.filter_by(
message_name="travel_start_test_v2"
).first()
assert message_triggerable_process_model is None

View File

@ -27,28 +27,33 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
class TestProcessInstanceProcessor(BaseTest): class TestProcessInstanceProcessor(BaseTest):
# it's not totally obvious we want to keep this test/file
def test_script_engine_takes_data_and_returns_expected_results(
self,
app: Flask,
with_db_and_bpmn_file_cleanup: None,
) -> None:
app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
script_engine = ProcessInstanceProcessor._default_script_engine
result = script_engine._evaluate("a", {"a": 1})
assert result == 1
app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
def test_script_engine_can_use_custom_scripts( def test_script_engine_can_use_custom_scripts(
self, self,
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
process_model = load_test_spec( app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey"
process_model_id="test_group/random_fact", app.config["THREAD_LOCAL_DATA"].process_instance_id = 0
bpmn_file_name="random_fact_set.bpmn", script_engine = ProcessInstanceProcessor._default_script_engine
process_model_source_directory="random_fact", result = script_engine._evaluate("fact_service(type='norris')", {})
) assert result == "Chuck Norris doesnt read books. He stares them down until he gets the information he wants."
process_instance = self.create_process_instance_from_process_model(process_model=process_model) app.config["THREAD_LOCAL_DATA"].process_model_identifier = None
processor = ProcessInstanceProcessor(process_instance) app.config["THREAD_LOCAL_DATA"].process_instance_id = None
processor.do_engine_steps(save=True)
assert process_instance.status == ProcessInstanceStatus.complete.value
process_data = processor.get_data()
assert process_data is not None
assert "FactService" in process_data
assert (
process_data["FactService"]
== "Chuck Norris doesnt read books. He stares them down until he gets the information he wants."
)
def test_sets_permission_correctly_on_human_task( def test_sets_permission_correctly_on_human_task(
self, self,

View File

@ -11,18 +11,6 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@pytest.fixture() @pytest.fixture()
def with_loaded_reference_cache(app: Flask, with_db_and_bpmn_file_cleanup: None) -> Generator[None, None, None]: def with_loaded_reference_cache(app: Flask, with_db_and_bpmn_file_cleanup: None) -> Generator[None, None, None]:
reference_objects: dict[str, ReferenceCacheModel] = {} reference_objects: dict[str, ReferenceCacheModel] = {}
ReferenceCacheService.add_unique_reference_cache_object(
reference_objects,
ReferenceCacheModel.from_params(
"contacts_datastore_root",
"Contacts Datastore Root",
"data_store",
"contacts_datastore.bpmn",
"",
None,
False,
),
)
ReferenceCacheService.add_unique_reference_cache_object( ReferenceCacheService.add_unique_reference_cache_object(
reference_objects, reference_objects,
ReferenceCacheModel.from_params( ReferenceCacheModel.from_params(
@ -68,9 +56,3 @@ class TestReferenceCacheService(BaseTest):
def test_does_not_find_data_store_in_non_upsearched_location(self, with_loaded_reference_cache: None) -> None: def test_does_not_find_data_store_in_non_upsearched_location(self, with_loaded_reference_cache: None) -> None:
location = ReferenceCacheService.upsearch("some/other/place", "contacts_datastore", "data_store") location = ReferenceCacheService.upsearch("some/other/place", "contacts_datastore", "data_store")
assert location is None assert location is None
def test_can_find_data_store_in_upsearched_root_location(self, with_loaded_reference_cache: None) -> None:
location = ReferenceCacheService.upsearch(
"misc/jonjon/generic-data-store-area/test-level-2", "contacts_datastore_root", "data_store"
)
assert location == ""