Merge remote-tracking branch 'origin/main' into feature/interstitial_process_instance_show

This commit is contained in:
danfunk 2023-05-24 15:32:43 -04:00
commit 77e375a86a
121 changed files with 4986 additions and 3617 deletions

View File

@ -75,7 +75,7 @@ jobs:
database: "sqlite",
}
- { python: "3.11", os: "ubuntu-latest", session: "xdoctest" }
- { python: "3.11", os: "ubuntu-latest", session: "docs-build" }
# - { python: "3.11", os: "ubuntu-latest", session: "docs-build" }
env:
FLASK_SESSION_SECRET_KEY: super_secret_key
@ -84,6 +84,7 @@ jobs:
PRE_COMMIT_COLOR: "always"
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD: password
SPIFFWORKFLOW_BACKEND_DATABASE_TYPE: ${{ matrix.database }}
SPIFFWORKFLOW_BACKEND_RUNNING_IN_CI: 'true'
steps:
- name: Check out the repository
@ -119,20 +120,23 @@ jobs:
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
nox --version
- name: Checkout Samples
if: matrix.database == 'sqlite'
uses: actions/checkout@v3
with:
repository: sartography/sample-process-models
path: sample-process-models
- name: Poetry Install
if: matrix.database == 'sqlite'
run: poetry install
- name: Setup sqlite
if: matrix.database == 'sqlite'
env:
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR: "${GITHUB_WORKSPACE}/sample-process-models"
run: ./bin/recreate_db clean rmall
# when we get an imcompatible sqlite migration again and need to combine all migrations into one for the benefit of sqlite
# see if we can get the sqlite-specific block in the noxfile.py to work instead of this block in the github workflow,
# which annoyingly runs python setup outside of the nox environment (which seems to be flakier on poetry install).
# - name: Checkout Samples
# if: matrix.database == 'sqlite'
# uses: actions/checkout@v3
# with:
# repository: sartography/sample-process-models
# path: sample-process-models
# - name: Poetry Install
# if: matrix.database == 'sqlite'
# run: poetry install
# - name: Setup sqlite
# if: matrix.database == 'sqlite'
# env:
# SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR: "${GITHUB_WORKSPACE}/sample-process-models"
# run: ./bin/recreate_db clean rmall
- name: Setup Mysql
uses: mirromutth/mysql-action@v1.1
@ -162,13 +166,13 @@ jobs:
name: coverage-data
path: "spiffworkflow-backend/.coverage.*"
- name: Upload documentation
if: matrix.session == 'docs-build'
uses: actions/upload-artifact@v3
with:
name: docs
path: docs/_build
# - name: Upload documentation
# if: matrix.session == 'docs-build'
# uses: actions/upload-artifact@v3
# with:
# name: docs
# path: docs/_build
#
- name: Upload logs
if: failure() && matrix.session == 'tests'
uses: "actions/upload-artifact@v3"

View File

@ -91,6 +91,11 @@ jobs:
- name: wait_for_keycloak
working-directory: ./spiffworkflow-backend
run: ./keycloak/bin/wait_for_keycloak 5
- name: Dump GitHub context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: |
echo "$GITHUB_CONTEXT"
- name: Cypress run
uses: cypress-io/github-action@v5
with:
@ -99,7 +104,11 @@ jobs:
# only record on push, not pull_request, since we do not have secrets for PRs,
# so the required CYPRESS_RECORD_KEY will not be available.
# we have limited runs in cypress cloud, so only record main builds
record: ${{ github.ref_name == 'main' && github.event_name == 'push' }}
# the direct check for github.event_name == 'push' is for if we want to go back to triggering this workflow
# directly, rather than when Backend Tests complete.
# note that github.event.workflow_run is referring to the Backend Tests workflow and another option
# for github.event.workflow_run.event is 'pull_request', which we want to ignore.
record: ${{ github.ref_name == 'main' && ((github.event_name == 'workflow_run' && github.event.workflow_run.event == 'push') || (github.event_name == 'push')) }}
env:
# pass the Dashboard record key as an environment variable
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}

4
.gitignore vendored
View File

@ -3,6 +3,6 @@ pyrightconfig.json
t
*~
.dccache
*~
version_info.json
.coverage*
.coverage*
UNKNOWN.egg-info/

View File

@ -15,14 +15,23 @@ If you need to push back from the monorepo to one of the individual repos, here'
git subtree push --prefix=spiffworkflow-frontend git@github.com:sartography/spiffworkflow-frontend.git add_md_file
Setup
-----
## Backend Setup
First install python and poetry, and then:
cd spiffworkflow-backend
poetry install
./bin/run_server_locally
Run tests
---------
## Frontend Setup
First install nodejs, ideally the version in .tool-versions (but likely other versions will work). Then:
cd spiffworkflow-frontend
npm install
npm start
## Run tests
./bin/run_pyl
Requires at root:
@ -31,26 +40,19 @@ Requires at root:
- .pre-commit-config.yaml
- pyproject.toml
Run cypress automated browser tests
-----------------------------------
## Run cypress automated browser tests
Get the app running so you can access the frontend at http://localhost:7001 in your browser.
Get the app running so you can access the frontend at http://localhost:7001 in your browser by following the frontend and backend setup steps above, and then:
First install nodejs, ideally the version in .tool-versions (but likely other versions will work).
Then:
cd spiffworkflow-frontend
npm install
./bin/run_cypress_tests_locally
License
-------
## License
SpiffArena's main components are published under the terms of the
[GNU Lesser General Public License (LGPL) Version 3](https://www.gnu.org/licenses/lgpl-3.0.txt).
Support
-------
## Support
You can find us on [our Discord Channel](https://discord.gg/BYHcc7PpUC).
Commercial support for SpiffWorkflow is available from [Sartography](https://sartography.com).

5
docs/.markdownlint.jsonc Normal file
View File

@ -0,0 +1,5 @@
{
"default": true,
"MD013": false,
"whitespace": false
}

View File

@ -10,7 +10,10 @@ BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) && echo " \033[0;34mlint\033[0m runs markdownlint on all markdown files (this was added to the Makefile manually. pardon formatting)"
lint:
markdownlint **/*.md
.PHONY: help Makefile

View File

@ -2,56 +2,61 @@
This documentation is currently hosted live at [Spiff-Arena's ReadTheDocs](https://spiff-arena.readthedocs.io/en/latest/)
Please set aside a couple of hours to work through this, as getting this setup correctly once is 10,000 times better than having problems every day for the rest of your life.
## Our Methodology
The methodology we are following is knowns as ["Docs as Code"](https://www.writethedocs.org/guide/docs-as-code/)
The methodology we are following is known as ["Docs as Code"](https://www.writethedocs.org/guide/docs-as-code/).
This means using the same tools and processes that software developers use for writing code to write the documenation for code.
In following this methodoloy, you will have to pick up some tools you haven't had to use before (Git, Sphinx).
Why would a technical writer need to learn these software engineering tools?
This means using the same tools and processes that software developers use for writing code to write the documentation for code.
In following this methodology, you will have to pick up some tools you haven't had to use before (Git, Sphinx).
Why would a technical writer need to learn these software engineering tools?
I'll never make the case as well as an article by [Tom Johnson](https://idratherbewriting.com/trends/trends-to-follow-or-forget-docs-as-code.html).
You might notice, when looking at the markdown files, that every sentence starts on a new line.
Like this one.
Unless there is a blank line between sentences, Markdown will still render this as a paragraph.
This is called [Ventilated Code](https://vanemden.wordpress.com/2009/01/01/ventilated-prose/) and can be very helpful when working in Markdown.
This is called [Ventilated Prose](https://vanemden.wordpress.com/2009/01/01/ventilated-prose/) and can be very helpful when working in Markdown.
## Our Tools
[Markdown](https://www.markdownguide.org/getting-started/) is a "markup language that you can use to add formatting elements to plain text documents.
You won't be writing the documentation in a word processor, but in simple plain text, and using some special syntax that will consistently and professionally format that text.
[Markdown](https://www.markdownguide.org/getting-started/) is a "markup language that you can use to add formatting elements to plain text documents."
You won't be writing the documentation in a word processor, but in simple plain text, and some special syntax that will consistently and professionally format that text.
The basic Markdown syntax is very simple. Here are some [quick examples](https://commonmark.org/help/). And here is a great [10 minute tutorial](https://commonmark.org/help/tutorial/).
The basic Markdown syntax is very simple.
Here are some [quick examples](https://commonmark.org/help/). And here is a great [10 minute tutorial](https://commonmark.org/help/tutorial/).
This will cover a lot of the basics, like bolding text, italics, paragraphs, lists and other common formatting techniques.
![Markdown screenshot](./images/markdown.png "Markdown example")
### MyST
Markdown doesn't support some really useful things.
Markdown doesn't support some really useful formatting options.
You can't add footnotes, or create an "aside" comment or build a table.
Because of this there are many extensions typically referened to as Markdown "Flavors".
Because of this, there are many extensions, and these are typically referred to as Markdown "Flavors."
The flavor we are using is MyST.
There is [excellent documentation on MyST](https://myst-parser.readthedocs.io/en/v0.13.5/using/syntax.html) that you should definitely review, so you know everthing that is available to you.
There is [excellent documentation on MyST](https://myst-parser.readthedocs.io/en/v0.13.5/using/syntax.html) that you should definitely review, so you know everything that is available to you.
### Sphinx
This is a large documenation effort. Many different Markdown pages will together make up the full website.
This is a large documentation effort.
Many different Markdown pages will together make up the full website.
You will mostly use Sphinx in the background - you won't be aware of it.
But if you decide that you want to alter the theme (the colors, styles, etc...) of the final website, Sphinx controls this and offers [themes](https://sphinx-themes.org/) and the ability to change styles / colors and formatting through the site.
But if you decide that you want to alter the theme (the colors, styles, etc...) of the final website, Sphinx controls this and offers [themes](https://sphinx-themes.org/) and the ability to change styles / colors and formatting through the site.
You just need to learn a little CSS to control it.
### GitHub
Our project is managed by a version control system called Git.
You can use GIT to submit changes to the documenation, in the same we use to submit changes to our code.
It is avilable on GitHub as the [spiff-arena project](https://github.com/sartography/spiff-arena). Git also manages versions of the code, and handles running tests, and causing our documenation to be built and deployed.
It will take a bit to get comfortable with Git, but when you do, you will come to love it (or maybe hate it, but with a lot of respect)
You can use GIT to submit changes to the documentation, in the same we use to submit changes to our code.
It is available on GitHub as the [spiff-arena project](https://github.com/sartography/spiff-arena).
GitHub also manages versions of the code and handles running tests.
Readthedocs observes changes in git and manages an automated process that causes our documentation to be built and deployed.
It will take a bit to get comfortable with Git, but when you do, you will come to love it (or maybe hate it, but with a lot of respect).
## Setup
@ -60,72 +65,96 @@ But you will find that most of it just works - and that once you get into a regu
### Step 1: Pre-Requisites
Assure you have been granted write access to our repository.
Make sure you have an account on GitHub and then contact dan@sartography.com and ask him to add you as a contributor.
Assure you have been granted write access to our git repository.
Make sure you have an account on GitHub and then contact `dan@sartography.com` and ask him to add you as a contributor.
### Step 2: Install VSCode
[Download VSCode](https://code.visualstudio.com/) and install it on your computer.
### Step 3: Install Python
We need python in order to build the website locally so we can really see what our content is going to look like once we publish. It's going to be handy for other reasons as well. We'll want python to be properly set up inside of VS Code. Follow [these directions and brief tutorial](https://code.visualstudio.com/docs/python/python-tutorial
) to assure this is set up.
We need python in order to build the website locally so we can really see what our content is going to look like once we publish.
It's going to be handy for other reasons as well.
We'll want python to be properly set up inside of VS Code.
Follow [these directions and brief tutorial](https://code.visualstudio.com/docs/python/python-tutorial) to assure this is set up.
### Step 3: Connect VSCode to Git
VSCode comes with Git built in.
So you can use VSCode to "pull" changes from others down to your local computer and "push" changes back up to share with others (and to cause our docs site to rebuild)
Here are directions for how to [clone Spiff-Arena](https://learn.microsoft.com/en-us/azure/developer/javascript/how-to/with-visual-studio-code/clone-github-repository?tabs=create-repo-command-palette%2Cinitialize-repo-activity-bar%2Ccreate-branch-command-palette%2Ccommit-changes-command-palette%2Cpush-command-palette#clone-repository). **IMPORTANT**: Follow those directions, but be sure to checkout https://github.com/sartography/spiff-arena instead of the project they are using!
VSCode comes with Git built in.
So you can use VSCode to "pull" changes from others down to your local computer and "push" changes back up to share with others (and to cause our docs site to rebuild).
Here are directions for how to [clone Spiff-Arena](https://learn.microsoft.com/en-us/azure/developer/javascript/how-to/with-visual-studio-code/clone-github-repository?tabs=create-repo-command-palette%2Cinitialize-repo-activity-bar%2Ccreate-branch-command-palette%2Ccommit-changes-command-palette%2Cpush-command-palette#clone-repository).
**IMPORTANT**: Follow those directions, but be sure to checkout `https://github.com/sartography/spiff-arena` instead of the project they are using!
You can save the project to any directory on your computer.
We strongly suggest you create a sub-folder called "projects" in your "home" or "Desktop" folder and checkout the code into this directory.
### Step 4: Open just the Docs Folder
We've checked out the whole spiff-arena project, but we are only going to be working inside of the docs directory. So let's open just that folder in VSCode.
We've checked out the whole spiff-arena project, but we are only going to be working inside of the docs directory.
So let's open just that folder in VSCode.
* Go to File -> Open Folder
* Select the "docs" folder inside of spiff-arena.
Now clikc on the two pieces of paper at the top corner of your screen, and you should see a project that looks like this:
Now click on the two pieces of paper at the top corner of your screen, and you should see a project that looks like this without all the rest of the code in your way:
![Docs Directory](./images/docs_dir.png "Docs Directory")
Without all the rest of the code in your way.
### Step 4: Add some extensions
* Inside VSCode, go to File -> Preferences -> Extensions
* Search for "myst"
* click the "install" button.
* Repeat, this time doing it for "python extension for VS Code"
* Inside VSCode, go to File -> Preferences -> Extensions
* Search for "myst"
* click the "install" button.
* Repeat, this time installing the "Python" extension for VS Code (from Microsoft)
![Myst Extension](./images/myst.png "Search or MyST in extensions")
### Step 5: Install Python Dependencies
This project requires a few Python dependencies to work correctly. We are going to set up a Virtual Evironment for Python to keep us sane later on. You can do that by following these steps:
This project requires a few Python dependencies to work correctly.
We are going to set up a Virtual Environment for Python to keep us sane later on.
You can do that by following these steps:
1. Open the Command Palette (Ctrl+Shift+P), start typing the **Python: Create Environment** command to search, and then select the command.
1. Select **Venv**
1. Select Python 3.11 from the list of options if there is nore than one thing to select.
1. Be sure the the checkbox next to "requirements.txt" is selected.
1. Select Python 3.11 from the list of options if there is more than one thing to select.
1. Be sure the checkbox next to "requirements.txt" is selected.
1. Click OK.
### Step 6: Fire up the website
1. Go to Terminial -> New Terminal
1. Go to Terminal -> New Terminal
1. type: **sphinx-autobuild . _build/html** at the prompt and hit enter.
1. Open your browser and go to http://127.0.0.1:8000
1. Open your browser and go to [http://127.0.0.1:8000](http://127.0.0.1:8000).
### Step 7: Make a chance
### Step 7: Make a change
1. Open up a markdown file, and make a change.
### Step 8: Commit your changes and push them up for everyone.
1. Select the "git" button on the left hand side of the toolbar (cricles with lines between them) ![Git button](./images/git.png "Git button")
### Step 8: Commit your changes and push them up for everyone
1. Select the "git" button on the left hand side of the toolbar (circles with lines between them) ![Git button](./images/git.png "Git button")
2. Press the blue "Commit" button.
3. Any changes you pushed up, should be live on our website within 5 to 10 minutes.
3. Any changes you pushed up should be live on our website within 5 to 10 minutes.
## Linting
```{admonition} Linting is just an idea
:class: warning
Documentation people: please ignore this for now.
```
We may decide to check the documentation with a "linter" which is designed to keep the documentation consistent and standardized.
One option is [markdownlint-cli](https://github.com/igorshubovych/markdownlint-cli), which uses David Anson's [NodeJS-based markdownlint](https://github.com/DavidAnson/markdownlint), which these days seems to be more popular than the [ruby-based markdownlint](https://github.com/markdownlint/markdownlint).
A `.markdownlint.jsonc` file has been added that configures the same markdownlint program (basically to ignore the rule about long lines, since we are using ventilated prose).

View File

@ -0,0 +1,48 @@
# BPMN Unit Tests
Software Engineers test their code.
With this feature, BPMN authors can test their creations, too.
These tests can provide you with faster feedback than you would get by simply running your process model, and they allow you to mock out form input and service task connections as well as provide specific input to exercise different branches of your process model.
BPMN unit tests are designed to give you greater confidence that your process models will work as designed when they are run in the wild, both the first time they are used by real users and also after you make changes to them.
## Creating BPMN Unit Tests
First, create a process model that you want to test.
Navigate to the process model and add a JSON file based on the name of one of the BPMN files.
For example, if you have a process model that includes a file called `awesome_script_task.bpmn`, your test JSON file would be called `test_awesome_script_task.json`.
If you have multiple BPMN files you want to test, you can have multiple test JSON files.
The BPMN files you test do not have to be marked as the primary file for the process model in question.
The structure of your json should be as follows:
{
"test_case_1": {
"tasks": {
"ServiceTaskProcess:service_task_one": {
"data": [{ "the_result": "result_from_service" }]
}
},
"expected_output_json": { "the_result": "result_from_service" }
}
}
The top-level keys should be names of unit tests.
In this example, the unit test is named "test_case_1."
Under that, you can specify "tasks" and "expected_output_json."
Under "tasks," each key is the BPMN id of a specific task.
If you are testing a file that uses Call Activities and therefore calls other processes, there can be conflicting BPMN ids.
In this case, you can specify the unique activity by prepending the Process id (in the above example, that is "ServiceTaskProcess").
For simple processes, "service_task_one" (for example) would be sufficient as the BPMN id.
For User Tasks, the "data" (under a specific task) represents the data that will be entered by the user in the form.
For Service Tasks, the data represents the data that will be returned by the service.
Note that all User Tasks and Service Tasks must have their BPMN ids mentioned in the JSON file (with mock task data as desired), since otherwise we won't know what to do when the flow arrives at these types of tasks.
The "expected_output_json" represents the state of the task data that you expect when the process completes.
When the test is run, if the actual task data differs from this expectation, the test will fail.
The test will also fail if the process never completes or if an error occurs.
## Running BPMN Unit Tests
Go to a process model and either click “Run Unit Tests” to run all tests for the process model or click on the “play icon” next to a "test_something.json" file.
Then you will get a green check mark or a red x.
You can click on these colored icons to get more details about the passing or failing test.

View File

@ -1,17 +1,16 @@
Welcome to SpiffWorkflow's documentation!
=======================================
# Welcome to SpiffWorkflow's documentation
```{toctree}
:maxdepth: 2
:caption: Contents
quick_start/quick_start.md
documentation/documentation.md
how_to/bpmn_unit_tests.md
```
This is great!
Indices and tables
==================
## Indices and tables
* [](genindex)
* [](modindex)

View File

@ -17,6 +17,7 @@ To access SpiffWorkflow, simply sign in using your Keycloak account. Once you ha
:alt: Login Page
:width: 45%
```
```{image} images/Untitled_1.png
:alt: Home Page
:width: 45%
@ -60,6 +61,7 @@ The process section provides a comprehensive view of the process ecosystem by sh
:class: info
💡 A **process group** is a way of grouping a bunch of **process models.** A **process model** contains all the files necessary to execute a specific process.
```
--
![Untitled](images/Untitled_4.png)
@ -142,7 +144,7 @@ After starting a process, it's important to stay informed about its progress. Ev
Here's how you can view the steps of the process you just started.
### Step 1: Navigate to the “Home” or “Process Instance” section.
### Step 1: Navigate to the “Home” or “Process Instance” section
There are 2 ways of finding your process instances.
@ -170,7 +172,7 @@ The Process-defined **metadata can provide valuable insights into its history, c
To check the metadata of a process instance, follow these steps.
### Step 1: Navigate to the “Home” or “Process Instance” section.
### Step 1: Navigate to the “Home” or “Process Instance” section as before
Once you're signed in, navigate to the home section. Here you will find a list of all the process instances you've initiated under **“Started by me”**.
@ -231,7 +233,7 @@ To filter the list, click on the "Filter" option. This will expand the filter se
![Untitled](images/Untitled_20.png)
### Step 3: Apply Filters:
### Step 3: Apply Filters
Once you have entered all the relevant filter details, click on the "**Apply**" button to apply the filters. The system will then display all the process instances matching the input details.
@ -303,4 +305,4 @@ Ensure that all required details have been included such as Process name, Proces
![Untitled](images/Untitled_32.png)
By following these steps, you can request the special permissions needed to carry out your tasks effectively.
By following these steps, you can request the special permissions needed to carry out your tasks effectively.

View File

@ -7,5 +7,20 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
set -x
mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;'
database=spiffworkflow_backend_local_development
if [[ "${1:-}" == "test" ]]; then
database=spiffworkflow_backend_unit_testing
fi
# shellcheck disable=2016
mysql -uroot "$database" -e '
select u.username user, g.identifier group
FROM `user` u
JOIN `user_group_assignment` uga on uga.user_id = u.id
JOIN `group` g on g.id = uga.group_id;
select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa
join principal p on p.id = pa.principal_id
join `group` g on g.id = p.group_id
join permission_target pt on pt.id = pa.permission_target_id;
'

View File

@ -23,6 +23,11 @@ if [[ -z "${SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR
fi
database_host="localhost"
if [[ -n "${SPIFFWORKFLOW_BACKEND_DATABASE_URI:-}" ]]; then
database_host=$(grep -oP "^[^:]+://.*@\K(.+?)[:/]" <<<"$SPIFFWORKFLOW_BACKEND_DATABASE_URI" | sed -E 's/[:\/]$//')
fi
tasks=""
if [[ "${1:-}" == "clean" ]]; then
subcommand="${2:-}"
@ -37,8 +42,8 @@ if [[ "${1:-}" == "clean" ]]; then
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" != "mysql" ]]; then
rm -f ./src/instance/*.sqlite3
else
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
fi
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
@ -74,8 +79,8 @@ else
fi
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing"
mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"
mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_unit_testing"
fi
for task in $tasks; do
@ -85,7 +90,7 @@ done
SPIFFWORKFLOW_BACKEND_ENV=unit_testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(local_development|unit_testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
mysql -h "$database_host" -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
fi
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
fi

View File

@ -1,22 +1,15 @@
"""Conftest."""
# noqa
import os
import shutil
import pytest
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -32,8 +25,7 @@ from spiffworkflow_backend import create_app # noqa: E402
@pytest.fixture(scope="session")
def app() -> Flask:
"""App."""
def app() -> Flask: # noqa
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "unit_testing"
os.environ["FLASK_SESSION_SECRET_KEY"] = "e7711a3ba96c46c68e084a86952de16f"
app = create_app()
@ -55,72 +47,17 @@ def with_db_and_bpmn_file_cleanup() -> None:
try:
yield
finally:
process_model_service = ProcessModelService()
if os.path.exists(process_model_service.root_path()):
shutil.rmtree(process_model_service.root_path())
if os.path.exists(ProcessModelService.root_path()):
shutil.rmtree(ProcessModelService.root_path())
@pytest.fixture()
def with_super_admin_user() -> UserModel:
"""With_super_admin_user."""
return BaseTest.create_user_with_permission("super_admin")
@pytest.fixture()
def setup_process_instances_for_reports(
client: FlaskClient, with_super_admin_user: UserModel
) -> list[ProcessInstanceModel]:
"""Setup_process_instances_for_reports."""
user = with_super_admin_user
process_group_id = "runs_without_input"
process_model_id = "sample"
# bpmn_file_name = "sample.bpmn"
bpmn_file_location = "sample"
process_model_identifier = BaseTest().create_group_and_model_with_bpmn(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
# bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
# BaseTest().create_process_group(
# client=client, user=user, process_group_id=process_group_id, display_name=process_group_id
# )
# process_model_id = "runs_without_input/sample"
# load_test_spec(
# process_model_id=f"{process_group_id}/{process_model_id}",
# process_model_source_directory="sample"
# )
process_instances = []
for data in [kay(), ray(), jay()]:
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
# process_group_identifier=process_group_id,
process_model_identifier=process_model_identifier,
user=user,
)
processor = ProcessInstanceProcessor(process_instance)
processor.slam_in_data(data)
process_instance.status = "complete"
db.session.add(process_instance)
db.session.commit()
process_instances.append(process_instance)
return process_instances
def kay() -> dict:
"""Kay."""
return {"name": "kay", "grade_level": 2, "test_score": 10}
def ray() -> dict:
"""Ray."""
return {"name": "ray", "grade_level": 1, "test_score": 9}
def jay() -> dict:
"""Jay."""
return {"name": "jay", "grade_level": 2, "test_score": 8}
def with_super_admin_user() -> UserModel: # noqa
# this loads all permissions from yaml everytime this function is called which is slow
# so default to just setting a simple super admin and only run with the "real" permissions in ci
if os.environ.get("SPIFFWORKFLOW_BACKEND_RUNNING_IN_CI") == "true":
user = BaseTest.find_or_create_user(username="testadmin1")
AuthorizationService.import_permissions_from_yaml_file(user)
else:
user = BaseTest.create_user_with_permission("super_admin")
return user

View File

@ -41,6 +41,18 @@ def setup_database(session: Session) -> None:
session.env[flask_env_key] = "e7711a3ba96c46c68e084a86952de16f"
session.env["FLASK_APP"] = "src/spiffworkflow_backend"
session.env["SPIFFWORKFLOW_BACKEND_ENV"] = "unit_testing"
if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
# maybe replace this sqlite-specific block with ./bin/recreate_db clean rmall
# (if we can make it work, since it uses poetry),
# which would also remove the migrations folder and re-create things as a single migration
if os.path.exists("migrations"):
import shutil
shutil.rmtree("migrations")
for task in ["init", "migrate"]:
session.run("flask", "db", task)
session.run("flask", "db", "upgrade")

File diff suppressed because it is too large Load Diff

View File

@ -31,7 +31,7 @@ SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "ma
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
# SpiffWorkflow = {develop = true, path = "../../spiffworkflow/" }
sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0"
# sphinx-autoapi = "^2.0"
mysql-connector-python = "*"
pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0"
@ -93,8 +93,8 @@ safety = "^2.3.1"
mypy = ">=0.961"
typeguard = "^3"
xdoctest = {extras = ["colors"], version = "^1.0.1"}
sphinx = "^5.0.2"
sphinx-autobuild = ">=2021.3.14"
# sphinx = "^5.0.2"
# sphinx-autobuild = ">=2021.3.14"
pre-commit = "^2.20.0"
flake8 = "^4.0.1"
black = ">=21.10b0"
@ -111,10 +111,10 @@ pep8-naming = "^0.13.2"
darglint = "^1.8.1"
reorder-python-imports = "^3.9.0"
pre-commit-hooks = "^4.0.1"
sphinx-click = "^4.3.0"
# sphinx-click = "^4.3.0"
Pygments = "^2.10.0"
pyupgrade = "^3.1.0"
furo = ">=2021.11.12"
# furo = ">=2021.11.12"
[tool.poetry.scripts]
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"

View File

@ -70,8 +70,11 @@ def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = Backg
"""Start_scheduler."""
scheduler = scheduler_class()
# TODO: polling intervals for different jobs
# TODO: polling intervals for messages job
polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"]
not_started_polling_interval_in_seconds = app.config[
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS"
]
user_input_required_polling_interval_in_seconds = app.config[
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS"
]
@ -84,6 +87,11 @@ def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = Backg
"interval",
seconds=10,
)
scheduler.add_job(
BackgroundProcessingService(app).process_not_started_process_instances,
"interval",
seconds=not_started_polling_interval_in_seconds,
)
scheduler.add_job(
BackgroundProcessingService(app).process_waiting_process_instances,
"interval",

View File

@ -149,7 +149,7 @@ paths:
$ref: "#/components/schemas/OkTrue"
/debug/test-raise-error:
get:
post:
operationId: spiffworkflow_backend.routes.debug_controller.test_raise_error
summary: Returns an unhandled exception that should notify sentry, if sentry is configured
tags:
@ -184,7 +184,7 @@ paths:
description: The identifier for the last visited page for the user.
schema:
type: string
get:
post:
tags:
- Active User
operationId: spiffworkflow_backend.routes.active_users_controller.active_user_updates
@ -207,7 +207,7 @@ paths:
description: The identifier for the last visited page for the user.
schema:
type: string
get:
post:
tags:
- Active User
operationId: spiffworkflow_backend.routes.active_users_controller.active_user_unregister
@ -425,7 +425,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
/process-models-natural-language/{modified_process_group_id}:
/process-model-natural-language/{modified_process_group_id}:
parameters:
- name: modified_process_group_id
in: path
@ -451,6 +451,48 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
/process-model-tests/{modified_process_model_identifier}:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The process_model_id, modified to replace slashes (/)
schema:
type: string
- name: test_case_file
in: query
required: false
description: The name of the test case file to run
schema:
type: string
- name: test_case_identifier
in: query
required: false
description: The name of the test case file to run
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_models_controller.process_model_test_run
summary: Run a test for a process model
tags:
- Process Model Tests
requestBody:
content:
multipart/form-data:
schema:
type: object
properties:
file:
type: string
format: binary
responses:
"201":
description: Metadata about the uploaded file, but not the file content.
content:
application/json:
schema:
$ref: "#/components/schemas/File"
/process-models/{modified_process_model_identifier}/files:
parameters:
- name: modified_process_model_identifier
@ -564,7 +606,7 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
/process-models/{modified_process_model_identifier}/publish:
/process-model-publish/{modified_process_model_identifier}:
parameters:
- name: modified_process_model_identifier
in: path
@ -1122,7 +1164,7 @@ paths:
- Process Instances
responses:
"200":
description: Empty ok true response on successful resume.
description: Empty ok true response on successful reset.
content:
application/json:
schema:

View File

@ -10,7 +10,7 @@ from spiffworkflow_backend.services.logging_service import setup_logger
class ConfigurationError(Exception):
"""ConfigurationError."""
pass
def setup_database_configs(app: Flask) -> None:

View File

@ -27,6 +27,12 @@ SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int(
default="10",
)
)
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS = int(
environ.get(
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_NOT_STARTED_POLLING_INTERVAL_IN_SECONDS",
default="30",
)
)
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS = int(
environ.get(
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS",
@ -143,6 +149,7 @@ SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int(
environ.get("SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600")
)
# FIXME: do not default this but we will need to coordinate release of it since it is a breaking change
SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get("SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody")
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get(

View File

@ -12,6 +12,5 @@ groups:
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*

View File

@ -1,5 +1,3 @@
default_group: everybody
groups:
admin:
users:
@ -19,6 +17,5 @@ groups:
permissions:
admin:
groups: [admin, tech_writers]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*

View File

@ -1,4 +1,3 @@
default_group: everybody
users:
admin:
@ -41,52 +40,43 @@ permissions:
# Admins have access to everything.
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
# Everybody can participate in tasks assigned to them.
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /tasks/*
# Everybody can start all intstances
create-test-instances:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ create ]
uri: /process-instances/*
# Everyone can see everything (all groups, and processes are visible)
read-all-process-groups:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /process-groups/*
read-all-process-models:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /process-models/*
read-all-process-instance:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /process-instances/*
read-process-instance-reports:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /process-instances/reports/*
processes-read:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /processes
groups-everybody:
groups: [everybody]
users: []
allowed_permissions: [create, read]
uri: /v1.0/user-groups/for-current-user

View File

@ -0,0 +1,17 @@
groups:
admin:
users: [admin@spiffworkflow.org]
permissions:
process-groups-ro:
groups: [admin]
allowed_permissions: [read]
uri: PG:ALL
basic:
groups: [admin]
allowed_permissions: [all]
uri: BASIC
elevated-operations:
groups: [admin]
allowed_permissions: [all]
uri: ELEVATED

View File

@ -0,0 +1,21 @@
groups:
admin:
users: [admin@spiffworkflow.org]
permissions:
process-groups-ro:
groups: [admin]
allowed_permissions: [read]
uri: PG:ALL
basic:
groups: [admin]
allowed_permissions: [all]
uri: BASIC
elevated-operations:
groups: [admin]
allowed_permissions: [all]
uri: ELEVATED
process-model-publish:
groups: [admin]
allowed_permissions: [create]
uri: /process-model-publish/*

View File

@ -1,84 +0,0 @@
default_group: everybody
groups:
admin:
users: [admin@spiffworkflow.org]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [read]
uri: /*
tasks-crud:
groups: [admin]
users: []
allowed_permissions: [create, update, delete]
uri: /tasks/*
process-instances-crud:
groups: [ admin ]
users: [ ]
allowed_permissions: [create, update, delete]
uri: /process-instances/*
suspend:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/process-instance-suspend
terminate:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/process-instance-terminate
resume:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/process-instance-resume
reset:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/process-instance-reset
users-exist:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/users/exists/by-username
send-event:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/send-event/*
task-complete:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/task-complete/*
messages:
groups: [admin]
users: []
allowed_permissions: [create]
uri: /v1.0/messages/*
secrets:
groups: [admin]
users: []
allowed_permissions: [create, update, delete]
uri: /v1.0/secrets/*
task-data:
groups: [admin]
users: []
allowed_permissions: [update]
uri: /v1.0/task-data/*

View File

@ -1,4 +1,3 @@
default_group: everybody
groups:
admin:
@ -11,6 +10,5 @@ groups:
permissions:
admin:
groups: [admin, group1, group2]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*

View File

@ -1,4 +1,3 @@
default_group: everybody
groups:
admin:
@ -7,6 +6,5 @@ groups:
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*

View File

@ -1,5 +1,3 @@
default_group: everybody
users:
testadmin1:
service: https://testing/openid/thing
@ -18,51 +16,50 @@ groups:
users: [testuser2, testuser3, testuser4]
permissions:
admin:
process-groups-all:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
allowed_permissions: [all]
uri: PG:ALL
basic:
groups: [admin]
allowed_permissions: [all]
uri: BASIC
elevated-operations:
groups: [admin]
allowed_permissions: [all]
uri: ELEVATED
read-all:
groups: ["Finance Team", hr, admin]
users: []
allowed_permissions: [read]
uri: /*
process-instances-find-by-id:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /process-instances/find-by-id/*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /tasks/*
# TODO: all uris should really have the same structure
finance-admin-group:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /process-groups/finance/*
allowed_permissions: [all]
uri: PG:finance
finance-admin-model:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /process-models/finance/*
finance-hr-start:
groups: ["hr"]
allowed_permissions: [start]
uri: PG:finance
finance-admin-model-lanes:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /process-models/finance:model_with_lanes/*
finance-admin-instance-run:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /process-instances/*

View File

@ -53,7 +53,7 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
try:
ProcessModelService().process_group_delete(process_group_id)
ProcessModelService.process_group_delete(process_group_id)
except ProcessModelWithInstancesNotDeletableError as exception:
raise ApiError(
error_code="existing_instances",
@ -88,7 +88,7 @@ def process_group_list(
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
process_groups = ProcessModelService.get_process_groups_for_api(process_group_identifier)
batch = ProcessModelService().get_batch(items=process_groups, page=page, per_page=per_page)
batch = ProcessModelService.get_batch(items=process_groups, page=page, per_page=per_page)
pages = len(process_groups) // per_page
remainder = len(process_groups) % per_page
if remainder > 0:
@ -128,7 +128,7 @@ def process_group_show(
def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response:
"""Process_group_move."""
original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier)
new_process_group = ProcessModelService().process_group_move(original_process_group_id, new_location)
new_process_group = ProcessModelService.process_group_move(original_process_group_id, new_location)
_commit_and_push_to_git(
f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
)

View File

@ -124,7 +124,8 @@ def process_instance_run(
processor = None
try:
processor = ProcessInstanceService.run_process_instance_with_processor(process_instance)
if not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance):
processor = ProcessInstanceService.run_process_instance_with_processor(process_instance)
except (
ApiError,
ProcessInstanceIsNotEnqueuedError,

View File

@ -30,6 +30,7 @@ from spiffworkflow_backend.routes.process_api_blueprint import _get_process_mode
from spiffworkflow_backend.routes.process_api_blueprint import (
_un_modify_modified_process_model_id,
)
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.git_service import GitCommandError
from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.git_service import MissingGitConfigsError
@ -43,6 +44,7 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer
from spiffworkflow_backend.services.process_model_service import (
ProcessModelWithInstancesNotDeletableError,
)
from spiffworkflow_backend.services.process_model_test_runner_service import ProcessModelTestRunner
from spiffworkflow_backend.services.spec_file_service import (
ProcessModelFileInvalidError,
)
@ -104,7 +106,7 @@ def process_model_delete(
"""Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
try:
ProcessModelService().process_model_delete(process_model_identifier)
ProcessModelService.process_model_delete(process_model_identifier)
except ProcessModelWithInstancesNotDeletableError as exception:
raise ApiError(
error_code="existing_instances",
@ -182,7 +184,7 @@ def process_model_show(modified_process_model_identifier: str, include_file_refe
def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response:
"""Process_model_move."""
original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier)
new_process_model = ProcessModelService().process_model_move(original_process_model_id, new_location)
new_process_model = ProcessModelService.process_model_move(original_process_model_id, new_location)
_commit_and_push_to_git(
f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
)
@ -219,7 +221,7 @@ def process_model_list(
recursive=recursive,
filter_runnable_by_user=filter_runnable_by_user,
)
process_models_to_return = ProcessModelService().get_batch(process_models, page=page, per_page=per_page)
process_models_to_return = ProcessModelService.get_batch(process_models, page=page, per_page=per_page)
if include_parent_groups:
process_group_cache = IdToProcessGroupMapping({})
@ -314,6 +316,29 @@ def process_model_file_show(modified_process_model_identifier: str, file_name: s
return make_response(jsonify(file), 200)
def process_model_test_run(
modified_process_model_identifier: str,
test_case_file: Optional[str] = None,
test_case_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier)
process_model_test_runner = ProcessModelTestRunner(
process_model_directory_path=FileSystemService.root_path(),
process_model_directory_for_test_discovery=FileSystemService.full_path_from_id(process_model.id),
test_case_file=test_case_file,
test_case_identifier=test_case_identifier,
)
process_model_test_runner.run()
response_json = {
"all_passed": process_model_test_runner.all_test_cases_passed(),
"passing": process_model_test_runner.passing_tests(),
"failing": process_model_test_runner.failing_tests(),
}
return make_response(jsonify(response_json), 200)
# {
# "natural_language_text": "Create a bug tracker process model \
# with a bug-details form that collects summary, description, and priority"

View File

@ -257,7 +257,7 @@ def manual_complete_task(
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
if process_instance:
processor = ProcessInstanceProcessor(process_instance)
processor.manual_complete_task(task_guid, execute)
processor.manual_complete_task(task_guid, execute, g.user)
else:
raise ApiError(
error_code="complete_task",
@ -468,8 +468,12 @@ def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[Optional[str], Optional[str], None]:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
with ProcessInstanceQueueService.dequeued(process_instance):
yield from _interstitial_stream(process_instance)
# TODO: currently this just redirects back to home if the process has not been started
# need something better to show?
if not ProcessInstanceQueueService.is_enqueued_to_run_in_the_future(process_instance):
with ProcessInstanceQueueService.dequeued(process_instance):
yield from _interstitial_stream(process_instance)
def interstitial(process_instance_id: int) -> Response:

View File

@ -34,6 +34,5 @@ class RefreshPermissions(Script):
*args: Any,
**kwargs: Any,
) -> Any:
"""Run."""
group_info = args[0]
AuthorizationService.refresh_permissions(group_info)

View File

@ -1,43 +0,0 @@
"""Save process instance metadata."""
from typing import Any
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.scripts.script import Script
class SaveProcessInstanceMetadata(Script):
"""SaveProcessInstanceMetadata."""
def get_description(self) -> str:
"""Get_description."""
return """Save a given dict as process instance metadata (useful for creating reports)."""
def run(
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any,
) -> Any:
"""Run."""
metadata_dict = args[0]
if script_attributes_context.process_instance_id is None:
raise self.get_proces_instance_id_is_missing_error("save_process_instance_metadata")
for key, value in metadata_dict.items():
pim = ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=script_attributes_context.process_instance_id,
key=key,
).first()
if pim is None:
pim = ProcessInstanceMetadataModel(
process_instance_id=script_attributes_context.process_instance_id,
key=key,
)
pim.value = value
db.session.add(pim)
db.session.commit()

View File

@ -1,11 +1,9 @@
"""Authorization_service."""
import inspect
import re
from dataclasses import dataclass
from hashlib import sha256
from hmac import compare_digest
from hmac import HMAC
from typing import Any
from typing import Optional
from typing import Set
from typing import TypedDict
@ -29,7 +27,6 @@ from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import MissingPrincipalError
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
@ -42,25 +39,23 @@ from spiffworkflow_backend.services.user_service import UserService
class PermissionsFileNotSetError(Exception):
"""PermissionsFileNotSetError."""
pass
class HumanTaskNotFoundError(Exception):
"""HumanTaskNotFoundError."""
pass
class UserDoesNotHaveAccessToTaskError(Exception):
"""UserDoesNotHaveAccessToTaskError."""
pass
class InvalidPermissionError(Exception):
"""InvalidPermissionError."""
pass
@dataclass
class PermissionToAssign:
"""PermissionToAssign."""
permission: str
target_uri: str
@ -80,10 +75,12 @@ PATH_SEGMENTS_FOR_PERMISSION_ALL = [
"path": "/process-instances",
"relevant_permissions": ["create", "read", "delete"],
},
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
{"path": "/process-data", "relevant_permissions": ["read"]},
{"path": "/process-data-file-download", "relevant_permissions": ["read"]},
{"path": "/process-instance-suspend", "relevant_permissions": ["create"]},
{"path": "/process-instance-terminate", "relevant_permissions": ["create"]},
{"path": "/process-model-natural-language", "relevant_permissions": ["create"]},
{"path": "/process-model-publish", "relevant_permissions": ["create"]},
{"path": "/task-data", "relevant_permissions": ["read", "update"]},
]
@ -93,21 +90,29 @@ class UserToGroupDict(TypedDict):
group_identifier: str
class DesiredPermissionDict(TypedDict):
"""DesiredPermissionDict."""
class AddedPermissionDict(TypedDict):
group_identifiers: Set[str]
permission_assignments: list[PermissionAssignmentModel]
user_to_group_identifiers: list[UserToGroupDict]
class DesiredGroupPermissionDict(TypedDict):
actions: list[str]
uri: str
class GroupPermissionsDict(TypedDict):
users: list[str]
name: str
permissions: list[DesiredGroupPermissionDict]
class AuthorizationService:
"""Determine whether a user has permission to perform their request."""
# https://stackoverflow.com/a/71320673/6090676
@classmethod
def verify_sha256_token(cls, auth_header: Optional[str]) -> None:
"""Verify_sha256_token."""
if auth_header is None:
raise TokenNotProvidedError(
"unauthorized",
@ -123,7 +128,6 @@ class AuthorizationService:
@classmethod
def has_permission(cls, principals: list[PrincipalModel], permission: str, target_uri: str) -> bool:
"""Has_permission."""
principal_ids = [p.id for p in principals]
target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX)
@ -153,7 +157,6 @@ class AuthorizationService:
@classmethod
def user_has_permission(cls, user: UserModel, permission: str, target_uri: str) -> bool:
"""User_has_permission."""
if user.principal is None:
raise MissingPrincipalError(f"Missing principal for user with id: {user.id}")
@ -179,7 +182,6 @@ class AuthorizationService:
@classmethod
def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None:
"""Associate_user_with_group."""
user_group_assignemnt = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first()
if user_group_assignemnt is None:
user_group_assignemnt = UserGroupAssignmentModel(user_id=user.id, group_id=group.id)
@ -187,88 +189,13 @@ class AuthorizationService:
db.session.commit()
@classmethod
def import_permissions_from_yaml_file(cls, raise_if_missing_user: bool = False) -> DesiredPermissionDict:
"""Import_permissions_from_yaml_file."""
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
raise (
PermissionsFileNotSetError(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions"
)
)
permission_configs = None
with open(current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_ABSOLUTE_PATH"]) as file:
permission_configs = yaml.safe_load(file)
default_group = None
unique_user_group_identifiers: Set[str] = set()
user_to_group_identifiers: list[UserToGroupDict] = []
if "default_group" in permission_configs:
default_group_identifier = permission_configs["default_group"]
default_group = GroupService.find_or_create_group(default_group_identifier)
unique_user_group_identifiers.add(default_group_identifier)
if "groups" in permission_configs:
for group_identifier, group_config in permission_configs["groups"].items():
group = GroupService.find_or_create_group(group_identifier)
unique_user_group_identifiers.add(group_identifier)
for username in group_config["users"]:
user = UserModel.query.filter_by(username=username).first()
if user is None:
if raise_if_missing_user:
raise (UserNotFoundError(f"Could not find a user with name: {username}"))
continue
user_to_group_dict: UserToGroupDict = {
"username": user.username,
"group_identifier": group_identifier,
}
user_to_group_identifiers.append(user_to_group_dict)
cls.associate_user_with_group(user, group)
permission_assignments = []
if "permissions" in permission_configs:
for _permission_identifier, permission_config in permission_configs["permissions"].items():
uri = permission_config["uri"]
permission_target = cls.find_or_create_permission_target(uri)
for allowed_permission in permission_config["allowed_permissions"]:
if "groups" in permission_config:
for group_identifier in permission_config["groups"]:
group = GroupService.find_or_create_group(group_identifier)
unique_user_group_identifiers.add(group_identifier)
permission_assignments.append(
cls.create_permission_for_principal(
group.principal,
permission_target,
allowed_permission,
)
)
if "users" in permission_config:
for username in permission_config["users"]:
user = UserModel.query.filter_by(username=username).first()
if user is not None:
principal = (
PrincipalModel.query.join(UserModel).filter(UserModel.username == username).first()
)
permission_assignments.append(
cls.create_permission_for_principal(
principal, permission_target, allowed_permission
)
)
if default_group is not None:
for user in UserModel.query.all():
cls.associate_user_with_group(user, default_group)
return {
"group_identifiers": unique_user_group_identifiers,
"permission_assignments": permission_assignments,
"user_to_group_identifiers": user_to_group_identifiers,
}
def import_permissions_from_yaml_file(cls, user_model: Optional[UserModel] = None) -> AddedPermissionDict:
group_permissions = cls.parse_permissions_yaml_into_group_info()
result = cls.add_permissions_from_group_permissions(group_permissions, user_model)
return result
@classmethod
def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel:
"""Find_or_create_permission_target."""
uri_with_percent = re.sub(r"\*", "%", uri)
target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX)
permission_target: Optional[PermissionTargetModel] = PermissionTargetModel.query.filter_by(
@ -287,7 +214,6 @@ class AuthorizationService:
permission_target: PermissionTargetModel,
permission: str,
) -> PermissionAssignmentModel:
"""Create_permission_for_principal."""
permission_assignment: Optional[PermissionAssignmentModel] = PermissionAssignmentModel.query.filter_by(
principal_id=principal.id,
permission_target_id=permission_target.id,
@ -306,7 +232,6 @@ class AuthorizationService:
@classmethod
def should_disable_auth_for_request(cls) -> bool:
"""Should_disable_auth_for_request."""
swagger_functions = ["get_json_spec"]
authentication_exclusion_list = [
"status",
@ -344,7 +269,6 @@ class AuthorizationService:
@classmethod
def get_permission_from_http_method(cls, http_method: str) -> Optional[str]:
"""Get_permission_from_request_method."""
request_method_mapper = {
"POST": "create",
"GET": "read",
@ -363,7 +287,6 @@ class AuthorizationService:
@classmethod
def check_for_permission(cls) -> None:
"""Check_for_permission."""
if cls.should_disable_auth_for_request():
return None
@ -397,11 +320,6 @@ class AuthorizationService:
@staticmethod
def decode_auth_token(auth_token: str) -> dict[str, Union[str, None]]:
"""Decode the auth token.
:param auth_token:
:return: integer|string
"""
secret_key = current_app.config.get("SECRET_KEY")
if secret_key is None:
raise KeyError("we need current_app.config to have a SECRET_KEY")
@ -445,10 +363,11 @@ class AuthorizationService:
@classmethod
def create_user_from_sign_in(cls, user_info: dict) -> UserModel:
"""Create_user_from_sign_in."""
"""Name, family_name, given_name, middle_name, nickname, preferred_username,"""
"""Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """
"""Email."""
"""Fields from user_info.
name, family_name, given_name, middle_name, nickname, preferred_username,
profile, picture, website, gender, birthdate, zoneinfo, locale,updated_at, email.
"""
is_new_user = False
user_attributes = {}
@ -506,7 +425,7 @@ class AuthorizationService:
# we are also a little apprehensive about pre-creating users
# before the user signs in, because we won't know things like
# the external service user identifier.
cls.import_permissions_from_yaml_file()
cls.import_permissions_from_yaml_file(user_model)
if is_new_user:
UserService.add_user_to_human_tasks_if_appropriate(user_model)
@ -521,11 +440,6 @@ class AuthorizationService:
process_related_path_segment: str,
target_uris: list[str],
) -> list[PermissionToAssign]:
"""Get_permissions_to_assign."""
permissions = permission_set.split(",")
if permission_set == "all":
permissions = ["create", "read", "update", "delete"]
permissions_to_assign: list[PermissionToAssign] = []
# we were thinking that if you can start an instance, you ought to be able to:
@ -556,7 +470,9 @@ class AuthorizationService:
]:
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri=target_uri))
else:
permissions = permission_set.split(",")
if permission_set == "all":
permissions = ["create", "read", "update", "delete"]
for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL:
target_uri = f"{path_segment_dict['path']}/{process_related_path_segment}"
relevant_permissions = path_segment_dict["relevant_permissions"]
@ -571,13 +487,11 @@ class AuthorizationService:
@classmethod
def set_basic_permissions(cls) -> list[PermissionToAssign]:
"""Set_basic_permissions."""
permissions_to_assign: list[PermissionToAssign] = []
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/active-users/*"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/process-instances/for-me"))
permissions_to_assign.append(
PermissionToAssign(permission="read", target_uri="/process-instances/report-metadata")
)
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/active-users/*"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/users/exists/by-username"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/connector-proxy/typeahead/*"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/debug/version-info"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-groups"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/process-models"))
@ -585,7 +499,11 @@ class AuthorizationService:
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/processes/callers"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/service-tasks"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/user-groups/for-current-user"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/users/exists/by-username"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/users/search"))
permissions_to_assign.append(
PermissionToAssign(permission="read", target_uri="/process-instances/report-metadata")
)
permissions_to_assign.append(
PermissionToAssign(permission="read", target_uri="/process-instances/find-by-id/*")
)
@ -597,9 +515,37 @@ class AuthorizationService:
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/tasks/*"))
return permissions_to_assign
@classmethod
def set_elevated_permissions(cls) -> list[PermissionToAssign]:
permissions_to_assign: list[PermissionToAssign] = []
for process_instance_action in ["resume", "terminate", "suspend", "reset"]:
permissions_to_assign.append(
PermissionToAssign(permission="create", target_uri=f"/process-instance-{process_instance_action}/*")
)
# FIXME: we need to fix so that user that can start a process-model
# can also start through messages as well
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/messages/*"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/messages"))
permissions_to_assign.append(PermissionToAssign(permission="read", target_uri="/authentications"))
permissions_to_assign.append(
PermissionToAssign(permission="create", target_uri="/can-run-privileged-script/*")
)
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/debug/*"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/send-event/*"))
permissions_to_assign.append(PermissionToAssign(permission="create", target_uri="/task-complete/*"))
# read comes from PG and PM permissions
permissions_to_assign.append(PermissionToAssign(permission="update", target_uri="/task-data/*"))
for permission in ["create", "read", "update", "delete"]:
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/process-instances/*"))
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/secrets/*"))
return permissions_to_assign
@classmethod
def set_process_group_permissions(cls, target: str, permission_set: str) -> list[PermissionToAssign]:
"""Set_process_group_permissions."""
permissions_to_assign: list[PermissionToAssign] = []
process_group_identifier = target.removeprefix("PG:").replace("/", ":").removeprefix(":")
process_related_path_segment = f"{process_group_identifier}:*"
@ -616,7 +562,6 @@ class AuthorizationService:
@classmethod
def set_process_model_permissions(cls, target: str, permission_set: str) -> list[PermissionToAssign]:
"""Set_process_model_permissions."""
permissions_to_assign: list[PermissionToAssign] = []
process_model_identifier = target.removeprefix("PM:").replace("/", ":").removeprefix(":")
process_related_path_segment = f"{process_model_identifier}/*"
@ -644,6 +589,8 @@ class AuthorizationService:
* affects given process-model
BASIC
* Basic access to complete tasks and use the site
ELEVATED
* Operations that require elevated permissions
Permission Macros:
all
@ -666,6 +613,8 @@ class AuthorizationService:
elif target.startswith("BASIC"):
permissions_to_assign += cls.set_basic_permissions()
elif target.startswith("ELEVATED"):
permissions_to_assign += cls.set_elevated_permissions()
elif target == "ALL":
for permission in permissions:
permissions_to_assign.append(PermissionToAssign(permission=permission, target_uri="/*"))
@ -685,7 +634,6 @@ class AuthorizationService:
def add_permission_from_uri_or_macro(
cls, group_identifier: str, permission: str, target: str
) -> list[PermissionAssignmentModel]:
"""Add_permission_from_uri_or_macro."""
group = GroupService.find_or_create_group(group_identifier)
permissions_to_assign = cls.explode_permissions(permission, target)
permission_assignments = []
@ -699,38 +647,112 @@ class AuthorizationService:
return permission_assignments
@classmethod
def refresh_permissions(cls, group_info: list[dict[str, Any]]) -> None:
"""Adds new permission assignments and deletes old ones."""
initial_permission_assignments = PermissionAssignmentModel.query.all()
initial_user_to_group_assignments = UserGroupAssignmentModel.query.all()
result = cls.import_permissions_from_yaml_file()
desired_permission_assignments = result["permission_assignments"]
desired_group_identifiers = result["group_identifiers"]
desired_user_to_group_identifiers = result["user_to_group_identifiers"]
def parse_permissions_yaml_into_group_info(cls) -> list[GroupPermissionsDict]:
if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None:
raise (
PermissionsFileNotSetError(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions"
)
)
for group in group_info:
permission_configs = None
with open(current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_ABSOLUTE_PATH"]) as file:
permission_configs = yaml.safe_load(file)
group_permissions_by_group: dict[str, GroupPermissionsDict] = {}
if current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]:
default_group_identifier = current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
group_permissions_by_group[default_group_identifier] = {
"name": default_group_identifier,
"users": [],
"permissions": [],
}
if "groups" in permission_configs:
for group_identifier, group_config in permission_configs["groups"].items():
group_info: GroupPermissionsDict = {"name": group_identifier, "users": [], "permissions": []}
for username in group_config["users"]:
group_info["users"].append(username)
group_permissions_by_group[group_identifier] = group_info
if "permissions" in permission_configs:
for _permission_identifier, permission_config in permission_configs["permissions"].items():
uri = permission_config["uri"]
for group_identifier in permission_config["groups"]:
group_permissions_by_group[group_identifier]["permissions"].append(
{"actions": permission_config["allowed_permissions"], "uri": uri}
)
return list(group_permissions_by_group.values())
@classmethod
def add_permissions_from_group_permissions(
cls, group_permissions: list[GroupPermissionsDict], user_model: Optional[UserModel] = None
) -> AddedPermissionDict:
unique_user_group_identifiers: Set[str] = set()
user_to_group_identifiers: list[UserToGroupDict] = []
permission_assignments = []
default_group = None
default_group_identifier = current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
if default_group_identifier:
default_group = GroupService.find_or_create_group(default_group_identifier)
unique_user_group_identifiers.add(default_group_identifier)
for group in group_permissions:
group_identifier = group["name"]
GroupService.find_or_create_group(group_identifier)
for username in group["users"]:
if user_model and username != user_model.username:
continue
user_to_group_dict: UserToGroupDict = {
"username": username,
"group_identifier": group_identifier,
}
desired_user_to_group_identifiers.append(user_to_group_dict)
user_to_group_identifiers.append(user_to_group_dict)
GroupService.add_user_to_group_or_add_to_waiting(username, group_identifier)
desired_group_identifiers.add(group_identifier)
unique_user_group_identifiers.add(group_identifier)
for group in group_permissions:
group_identifier = group["name"]
if user_model and group_identifier not in unique_user_group_identifiers:
continue
for permission in group["permissions"]:
for crud_op in permission["actions"]:
desired_permission_assignments.extend(
permission_assignments.extend(
cls.add_permission_from_uri_or_macro(
group_identifier=group_identifier,
target=permission["uri"],
permission=crud_op,
)
)
desired_group_identifiers.add(group_identifier)
unique_user_group_identifiers.add(group_identifier)
if default_group is not None:
if user_model:
cls.associate_user_with_group(user_model, default_group)
else:
for user in UserModel.query.all():
cls.associate_user_with_group(user, default_group)
return {
"group_identifiers": unique_user_group_identifiers,
"permission_assignments": permission_assignments,
"user_to_group_identifiers": user_to_group_identifiers,
}
@classmethod
def remove_old_permissions_from_added_permissions(
cls,
added_permissions: AddedPermissionDict,
initial_permission_assignments: list[PermissionAssignmentModel],
initial_user_to_group_assignments: list[UserGroupAssignmentModel],
) -> None:
added_permission_assignments = added_permissions["permission_assignments"]
added_group_identifiers = added_permissions["group_identifiers"]
added_user_to_group_identifiers = added_permissions["user_to_group_identifiers"]
for ipa in initial_permission_assignments:
if ipa not in desired_permission_assignments:
if ipa not in added_permission_assignments:
db.session.delete(ipa)
for iutga in initial_user_to_group_assignments:
@ -743,19 +765,23 @@ class AuthorizationService:
"username": iutga.user.username,
"group_identifier": iutga.group.identifier,
}
if current_user_dict not in desired_user_to_group_identifiers:
if current_user_dict not in added_user_to_group_identifiers:
db.session.delete(iutga)
# do not remove the default user group
desired_group_identifiers.add(current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"])
groups_to_delete = GroupModel.query.filter(GroupModel.identifier.not_in(desired_group_identifiers)).all()
added_group_identifiers.add(current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"])
groups_to_delete = GroupModel.query.filter(GroupModel.identifier.not_in(added_group_identifiers)).all()
for gtd in groups_to_delete:
db.session.delete(gtd)
db.session.commit()
class KeycloakAuthorization:
"""Interface with Keycloak server."""
# class KeycloakClient:
@classmethod
def refresh_permissions(cls, group_permissions: list[GroupPermissionsDict]) -> None:
"""Adds new permission assignments and deletes old ones."""
initial_permission_assignments = PermissionAssignmentModel.query.all()
initial_user_to_group_assignments = UserGroupAssignmentModel.query.all()
group_permissions = group_permissions + cls.parse_permissions_yaml_into_group_info()
added_permissions = cls.add_permissions_from_group_permissions(group_permissions)
cls.remove_old_permissions_from_added_permissions(
added_permissions, initial_permission_assignments, initial_user_to_group_assignments
)

View File

@ -18,11 +18,17 @@ class BackgroundProcessingService:
"""__init__."""
self.app = app
def process_not_started_process_instances(self) -> None:
"""Since this runs in a scheduler, we need to specify the app context as well."""
with self.app.app_context():
ProcessInstanceLockService.set_thread_local_locking_context("bg:notstarted")
ProcessInstanceService.do_waiting(ProcessInstanceStatus.not_started.value)
def process_waiting_process_instances(self) -> None:
"""Since this runs in a scheduler, we need to specify the app context as well."""
with self.app.app_context():
ProcessInstanceLockService.set_thread_local_locking_context("bg:waiting")
ProcessInstanceService.do_waiting()
ProcessInstanceService.do_waiting(ProcessInstanceStatus.waiting.value)
def process_user_input_required_process_instances(self) -> None:
"""Since this runs in a scheduler, we need to specify the app context as well."""

View File

@ -49,13 +49,12 @@ class FileSystemService:
"""Id_string_to_relative_path."""
return id_string.replace("/", os.sep)
@staticmethod
def process_group_path(name: str) -> str:
"""Category_path."""
@classmethod
def full_path_from_id(cls, id: str) -> str:
return os.path.abspath(
os.path.join(
FileSystemService.root_path(),
FileSystemService.id_string_to_relative_path(name),
cls.root_path(),
cls.id_string_to_relative_path(id),
)
)
@ -65,36 +64,35 @@ class FileSystemService:
return os.path.join(FileSystemService.root_path(), relative_path)
@staticmethod
def process_model_relative_path(spec: ProcessModelInfo) -> str:
def process_model_relative_path(process_model: ProcessModelInfo) -> str:
"""Get the file path to a process model relative to SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR.
If the full path is /path/to/process-group-a/group-b/process-model-a, it will return:
process-group-a/group-b/process-model-a
"""
workflow_path = FileSystemService.workflow_path(spec)
workflow_path = FileSystemService.process_model_full_path(process_model)
return os.path.relpath(workflow_path, start=FileSystemService.root_path())
@staticmethod
def process_group_path_for_spec(spec: ProcessModelInfo) -> str:
"""Category_path_for_spec."""
def process_group_path_for_spec(process_model: ProcessModelInfo) -> str:
# os.path.split apparently returns 2 element tulple like: (first/path, last_item)
process_group_id, _ = os.path.split(spec.id_for_file_path())
return FileSystemService.process_group_path(process_group_id)
process_group_id, _ = os.path.split(process_model.id_for_file_path())
return FileSystemService.full_path_from_id(process_group_id)
@classmethod
def process_model_full_path(cls, process_model: ProcessModelInfo) -> str:
return cls.full_path_from_id(process_model.id)
@staticmethod
def workflow_path(spec: ProcessModelInfo) -> str:
"""Workflow_path."""
process_model_path = os.path.join(FileSystemService.root_path(), spec.id_for_file_path())
return process_model_path
@staticmethod
def full_path_to_process_model_file(spec: ProcessModelInfo) -> str:
def full_path_to_process_model_file(process_model: ProcessModelInfo) -> str:
"""Full_path_to_process_model_file."""
return os.path.join(FileSystemService.workflow_path(spec), spec.primary_file_name) # type: ignore
return os.path.join(
FileSystemService.process_model_full_path(process_model), process_model.primary_file_name # type: ignore
)
def next_display_order(self, spec: ProcessModelInfo) -> int:
def next_display_order(self, process_model: ProcessModelInfo) -> int:
"""Next_display_order."""
path = self.process_group_path_for_spec(spec)
path = self.process_group_path_for_spec(process_model)
if os.path.exists(path):
return len(next(os.walk(path))[1])
else:

View File

@ -1,4 +1,3 @@
"""Group_service."""
from typing import Optional
from spiffworkflow_backend.models.db import db
@ -8,11 +7,8 @@ from spiffworkflow_backend.services.user_service import UserService
class GroupService:
"""GroupService."""
@classmethod
def find_or_create_group(cls, group_identifier: str) -> GroupModel:
"""Find_or_create_group."""
group: Optional[GroupModel] = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None:
group = GroupModel(identifier=group_identifier)
@ -23,7 +19,6 @@ class GroupService:
@classmethod
def add_user_to_group_or_add_to_waiting(cls, username: str, group_identifier: str) -> None:
"""Add_user_to_group_or_add_to_waiting."""
group = cls.find_or_create_group(group_identifier)
user = UserModel.query.filter_by(username=username).first()
if user:

View File

@ -25,7 +25,6 @@ from uuid import UUID
import dateparser
import pytz
from flask import current_app
from flask import g
from lxml import etree # type: ignore
from lxml.etree import XMLSyntaxError # type: ignore
from RestrictedPython import safe_globals # type: ignore
@ -292,6 +291,7 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
"datetime": datetime,
"decimal": decimal,
"enumerate": enumerate,
"filter": filter,
"format": format,
"list": list,
"dict": dict,
@ -420,7 +420,6 @@ class ProcessInstanceProcessor:
)
self.process_instance_model = process_instance_model
self.process_model_service = ProcessModelService()
bpmn_process_spec = None
self.full_bpmn_process_dict: dict = {}
@ -1018,7 +1017,7 @@ class ProcessInstanceProcessor:
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
process_model_display_name = ""
process_model_info = self.process_model_service.get_process_model(
process_model_info = ProcessModelService.get_process_model(
self.process_instance_model.process_model_identifier
)
if process_model_info is not None:
@ -1109,7 +1108,7 @@ class ProcessInstanceProcessor:
# TODO: do_engine_steps without a lock
self.do_engine_steps(save=True)
def manual_complete_task(self, task_id: str, execute: bool) -> None:
def manual_complete_task(self, task_id: str, execute: bool, user: UserModel) -> None:
"""Mark the task complete optionally executing it."""
spiff_task = self.bpmn_process_instance.get_task_from_id(UUID(task_id))
event_type = ProcessInstanceEventType.task_skipped.value
@ -1122,7 +1121,7 @@ class ProcessInstanceProcessor:
f" instance {self.process_instance_model.id}"
)
human_task = HumanTaskModel.query.filter_by(task_id=task_id).first()
self.complete_task(spiff_task, human_task=human_task, user=g.user)
self.complete_task(spiff_task, human_task=human_task, user=user)
elif execute:
current_app.logger.info(
f"Manually executing Task {spiff_task.task_spec.name} of process"

View File

@ -31,8 +31,6 @@ class ProcessInstanceQueueService:
def _configure_and_save_queue_entry(
cls, process_instance: ProcessInstanceModel, queue_entry: ProcessInstanceQueueModel
) -> None:
# TODO: configurable params (priority/run_at)
queue_entry.run_at_in_seconds = round(time.time())
queue_entry.priority = 2
queue_entry.status = process_instance.status
queue_entry.locked_by = None
@ -42,13 +40,18 @@ class ProcessInstanceQueueService:
db.session.commit()
@classmethod
def enqueue_new_process_instance(cls, process_instance: ProcessInstanceModel) -> None:
queue_entry = ProcessInstanceQueueModel(process_instance_id=process_instance.id)
def enqueue_new_process_instance(cls, process_instance: ProcessInstanceModel, run_at_in_seconds: int) -> None:
queue_entry = ProcessInstanceQueueModel(
process_instance_id=process_instance.id, run_at_in_seconds=run_at_in_seconds
)
cls._configure_and_save_queue_entry(process_instance, queue_entry)
@classmethod
def _enqueue(cls, process_instance: ProcessInstanceModel) -> None:
queue_entry = ProcessInstanceLockService.unlock(process_instance.id)
current_time = round(time.time())
if current_time > queue_entry.run_at_in_seconds:
queue_entry.run_at_in_seconds = current_time
cls._configure_and_save_queue_entry(process_instance, queue_entry)
@classmethod
@ -115,14 +118,16 @@ class ProcessInstanceQueueService:
@classmethod
def entries_with_status(
cls,
status_value: str = ProcessInstanceStatus.waiting.value,
locked_by: Optional[str] = None,
status_value: str,
locked_by: Optional[str],
run_at_in_seconds_threshold: int,
) -> List[ProcessInstanceQueueModel]:
return (
db.session.query(ProcessInstanceQueueModel)
.filter(
ProcessInstanceQueueModel.status == status_value,
ProcessInstanceQueueModel.locked_by == locked_by,
ProcessInstanceQueueModel.run_at_in_seconds <= run_at_in_seconds_threshold,
)
.all()
)
@ -130,8 +135,23 @@ class ProcessInstanceQueueService:
@classmethod
def peek_many(
cls,
status_value: str = ProcessInstanceStatus.waiting.value,
status_value: str,
run_at_in_seconds_threshold: int,
) -> List[int]:
queue_entries = cls.entries_with_status(status_value, None)
queue_entries = cls.entries_with_status(status_value, None, run_at_in_seconds_threshold)
ids_with_status = [entry.process_instance_id for entry in queue_entries]
return ids_with_status
@staticmethod
def is_enqueued_to_run_in_the_future(process_instance: ProcessInstanceModel) -> bool:
queue_entry = (
db.session.query(ProcessInstanceQueueModel)
.filter(ProcessInstanceQueueModel.process_instance_id == process_instance.id)
.first()
)
if queue_entry is None:
return False
current_time = round(time.time())
return queue_entry.run_at_in_seconds > current_time

View File

@ -77,7 +77,8 @@ class ProcessInstanceService:
)
db.session.add(process_instance_model)
db.session.commit()
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model)
run_at_in_seconds = round(time.time())
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance_model, run_at_in_seconds)
return process_instance_model
@classmethod
@ -134,9 +135,12 @@ class ProcessInstanceService:
return False
@classmethod
def do_waiting(cls, status_value: str = ProcessInstanceStatus.waiting.value) -> None:
def do_waiting(cls, status_value: str) -> None:
"""Do_waiting."""
process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(status_value)
run_at_in_seconds_threshold = round(time.time())
process_instance_ids_to_check = ProcessInstanceQueueService.peek_many(
status_value, run_at_in_seconds_threshold
)
if len(process_instance_ids_to_check) == 0:
return
@ -192,8 +196,7 @@ class ProcessInstanceService:
"""
# navigation = processor.bpmn_process_instance.get_deep_nav_list()
# ProcessInstanceService.update_navigation(navigation, processor)
process_model_service = ProcessModelService()
process_model_service.get_process_model(processor.process_model_identifier)
ProcessModelService.get_process_model(processor.process_model_identifier)
process_instance_api = ProcessInstanceApi(
id=processor.get_process_instance_id(),
status=processor.get_status(),

View File

@ -60,12 +60,7 @@ class ProcessModelService(FileSystemService):
def is_process_group_identifier(cls, process_group_identifier: str) -> bool:
"""Is_process_group_identifier."""
if os.path.exists(FileSystemService.root_path()):
process_group_path = os.path.abspath(
os.path.join(
FileSystemService.root_path(),
FileSystemService.id_string_to_relative_path(process_group_identifier),
)
)
process_group_path = FileSystemService.full_path_from_id(process_group_identifier)
return cls.is_process_group(process_group_path)
return False
@ -82,12 +77,7 @@ class ProcessModelService(FileSystemService):
def is_process_model_identifier(cls, process_model_identifier: str) -> bool:
"""Is_process_model_identifier."""
if os.path.exists(FileSystemService.root_path()):
process_model_path = os.path.abspath(
os.path.join(
FileSystemService.root_path(),
FileSystemService.id_string_to_relative_path(process_model_identifier),
)
)
process_model_path = FileSystemService.full_path_from_id(process_model_identifier)
return cls.is_process_model(process_model_path)
return False
@ -104,7 +94,6 @@ class ProcessModelService(FileSystemService):
page: int = 1,
per_page: int = 10,
) -> list[T]:
"""Get_batch."""
start = (page - 1) * per_page
end = start + per_page
return items[start:end]
@ -139,8 +128,8 @@ class ProcessModelService(FileSystemService):
cls.write_json_file(json_path, json_data)
process_model.id = process_model_id
def process_model_delete(self, process_model_id: str) -> None:
"""Delete Procecss Model."""
@classmethod
def process_model_delete(cls, process_model_id: str) -> None:
instances = ProcessInstanceModel.query.filter(
ProcessInstanceModel.process_model_identifier == process_model_id
).all()
@ -148,19 +137,19 @@ class ProcessModelService(FileSystemService):
raise ProcessModelWithInstancesNotDeletableError(
f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it."
)
process_model = self.get_process_model(process_model_id)
path = self.workflow_path(process_model)
process_model = cls.get_process_model(process_model_id)
path = cls.process_model_full_path(process_model)
shutil.rmtree(path)
def process_model_move(self, original_process_model_id: str, new_location: str) -> ProcessModelInfo:
"""Process_model_move."""
process_model = self.get_process_model(original_process_model_id)
original_model_path = self.workflow_path(process_model)
@classmethod
def process_model_move(cls, original_process_model_id: str, new_location: str) -> ProcessModelInfo:
process_model = cls.get_process_model(original_process_model_id)
original_model_path = cls.process_model_full_path(process_model)
_, model_id = os.path.split(original_model_path)
new_relative_path = os.path.join(new_location, model_id)
new_model_path = os.path.abspath(os.path.join(FileSystemService.root_path(), new_relative_path))
shutil.move(original_model_path, new_model_path)
new_process_model = self.get_process_model(new_relative_path)
new_process_model = cls.get_process_model(new_relative_path)
return new_process_model
@classmethod
@ -314,12 +303,7 @@ class ProcessModelService(FileSystemService):
def get_process_group(cls, process_group_id: str, find_direct_nested_items: bool = True) -> ProcessGroup:
"""Look for a given process_group, and return it."""
if os.path.exists(FileSystemService.root_path()):
process_group_path = os.path.abspath(
os.path.join(
FileSystemService.root_path(),
FileSystemService.id_string_to_relative_path(process_group_id),
)
)
process_group_path = FileSystemService.full_path_from_id(process_group_id)
if cls.is_process_group(process_group_path):
return cls.find_or_create_process_group(
process_group_path,
@ -330,13 +314,11 @@ class ProcessModelService(FileSystemService):
@classmethod
def add_process_group(cls, process_group: ProcessGroup) -> ProcessGroup:
"""Add_process_group."""
return cls.update_process_group(process_group)
@classmethod
def update_process_group(cls, process_group: ProcessGroup) -> ProcessGroup:
"""Update_process_group."""
cat_path = cls.process_group_path(process_group.id)
cat_path = cls.full_path_from_id(process_group.id)
os.makedirs(cat_path, exist_ok=True)
json_path = os.path.join(cat_path, cls.PROCESS_GROUP_JSON_FILE)
serialized_process_group = process_group.serialized
@ -346,33 +328,33 @@ class ProcessModelService(FileSystemService):
cls.write_json_file(json_path, serialized_process_group)
return process_group
def process_group_move(self, original_process_group_id: str, new_location: str) -> ProcessGroup:
"""Process_group_move."""
original_group_path = self.process_group_path(original_process_group_id)
@classmethod
def process_group_move(cls, original_process_group_id: str, new_location: str) -> ProcessGroup:
original_group_path = cls.full_path_from_id(original_process_group_id)
_, original_group_id = os.path.split(original_group_path)
new_root = os.path.join(FileSystemService.root_path(), new_location)
new_group_path = os.path.abspath(os.path.join(FileSystemService.root_path(), new_root, original_group_id))
destination = shutil.move(original_group_path, new_group_path)
new_process_group = self.get_process_group(destination)
new_process_group = cls.get_process_group(destination)
return new_process_group
def __get_all_nested_models(self, group_path: str) -> list:
"""__get_all_nested_models."""
@classmethod
def __get_all_nested_models(cls, group_path: str) -> list:
all_nested_models = []
for _root, dirs, _files in os.walk(group_path):
for dir in dirs:
model_dir = os.path.join(group_path, dir)
if ProcessModelService.is_process_model(model_dir):
process_model = self.get_process_model(model_dir)
process_model = cls.get_process_model(model_dir)
all_nested_models.append(process_model)
return all_nested_models
def process_group_delete(self, process_group_id: str) -> None:
"""Delete_process_group."""
@classmethod
def process_group_delete(cls, process_group_id: str) -> None:
problem_models = []
path = self.process_group_path(process_group_id)
path = cls.full_path_from_id(process_group_id)
if os.path.exists(path):
nested_models = self.__get_all_nested_models(path)
nested_models = cls.__get_all_nested_models(path)
for process_model in nested_models:
instances = ProcessInstanceModel.query.filter(
ProcessInstanceModel.process_model_identifier == process_model.id
@ -386,15 +368,15 @@ class ProcessModelService(FileSystemService):
f" {problem_models}"
)
shutil.rmtree(path)
self.cleanup_process_group_display_order()
cls._cleanup_process_group_display_order()
def cleanup_process_group_display_order(self) -> List[Any]:
"""Cleanup_process_group_display_order."""
process_groups = self.get_process_groups() # Returns an ordered list
@classmethod
def _cleanup_process_group_display_order(cls) -> List[Any]:
process_groups = cls.get_process_groups() # Returns an ordered list
index = 0
for process_group in process_groups:
process_group.display_order = index
self.update_process_group(process_group)
cls.update_process_group(process_group)
index += 1
return process_groups

View File

@ -0,0 +1,446 @@
import glob
import json
import os
import re
import traceback
from abc import abstractmethod
from dataclasses import dataclass
from typing import Optional
from typing import Type
from typing import Union
from lxml import etree # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from spiffworkflow_backend.services.custom_parser import MyCustomParser
class UnrunnableTestCaseError(Exception):
pass
class MissingBpmnFileForTestCaseError(Exception):
pass
class NoTestCasesFoundError(Exception):
pass
class MissingInputTaskData(Exception):
pass
class UnsupporterRunnerDelegateGiven(Exception):
pass
class BpmnFileMissingExecutableProcessError(Exception):
pass
@dataclass
class TestCaseErrorDetails:
error_messages: list[str]
task_error_line: Optional[str] = None
task_trace: Optional[list[str]] = None
task_bpmn_identifier: Optional[str] = None
task_bpmn_name: Optional[str] = None
task_line_number: Optional[int] = None
stacktrace: Optional[list[str]] = None
@dataclass
class TestCaseResult:
passed: bool
bpmn_file: str
test_case_identifier: str
test_case_error_details: Optional[TestCaseErrorDetails] = None
class ProcessModelTestRunnerDelegate:
"""Abstract class for the process model test runner delegate.
All delegates MUST inherit from this class.
"""
def __init__(
self,
process_model_directory_path: str,
) -> None:
self.process_model_directory_path = process_model_directory_path
@abstractmethod
def instantiate_executer(self, bpmn_file: str) -> BpmnWorkflow:
raise NotImplementedError("method instantiate_executer must be implemented")
@abstractmethod
def execute_task(self, spiff_task: SpiffTask, task_data_for_submit: Optional[dict] = None) -> None:
raise NotImplementedError("method execute_task must be implemented")
@abstractmethod
def get_next_task(self, bpmn_process_instance: BpmnWorkflow) -> Optional[SpiffTask]:
raise NotImplementedError("method get_next_task must be implemented")
class ProcessModelTestRunnerMostlyPureSpiffDelegate(ProcessModelTestRunnerDelegate):
def __init__(
self,
process_model_directory_path: str,
) -> None:
super().__init__(process_model_directory_path)
self.bpmn_processes_to_file_mappings: dict[str, str] = {}
self.bpmn_files_to_called_element_mappings: dict[str, list[str]] = {}
self._discover_process_model_processes()
def instantiate_executer(self, bpmn_file: str) -> BpmnWorkflow:
parser = MyCustomParser()
bpmn_file_etree = self._get_etree_from_bpmn_file(bpmn_file)
parser.add_bpmn_xml(bpmn_file_etree, filename=os.path.basename(bpmn_file))
all_related = self._find_related_bpmn_files(bpmn_file)
for related_file in all_related:
related_file_etree = self._get_etree_from_bpmn_file(related_file)
parser.add_bpmn_xml(related_file_etree, filename=os.path.basename(related_file))
sub_parsers = list(parser.process_parsers.values())
executable_process = None
for sub_parser in sub_parsers:
if sub_parser.process_executable:
executable_process = sub_parser.bpmn_id
if executable_process is None:
raise BpmnFileMissingExecutableProcessError(
f"Executable process cannot be found in {bpmn_file}. Test cannot run."
)
bpmn_process_spec = parser.get_spec(executable_process)
bpmn_process_instance = BpmnWorkflow(bpmn_process_spec)
return bpmn_process_instance
def execute_task(self, spiff_task: SpiffTask, task_data_for_submit: Optional[dict] = None) -> None:
if task_data_for_submit is not None or spiff_task.task_spec.manual:
if task_data_for_submit is not None:
spiff_task.update_data(task_data_for_submit)
spiff_task.complete()
else:
spiff_task.run()
def get_next_task(self, bpmn_process_instance: BpmnWorkflow) -> Optional[SpiffTask]:
ready_tasks = list([t for t in bpmn_process_instance.get_tasks(TaskState.READY)])
if len(ready_tasks) > 0:
return ready_tasks[0]
return None
def _get_etree_from_bpmn_file(self, bpmn_file: str) -> etree._Element:
data = None
with open(bpmn_file, "rb") as f_handle:
data = f_handle.read()
etree_xml_parser = etree.XMLParser(resolve_entities=False)
return etree.fromstring(data, parser=etree_xml_parser)
def _find_related_bpmn_files(self, bpmn_file: str) -> list[str]:
related_bpmn_files = []
if bpmn_file in self.bpmn_files_to_called_element_mappings:
for bpmn_process_identifier in self.bpmn_files_to_called_element_mappings[bpmn_file]:
if bpmn_process_identifier in self.bpmn_processes_to_file_mappings:
new_file = self.bpmn_processes_to_file_mappings[bpmn_process_identifier]
related_bpmn_files.append(new_file)
related_bpmn_files.extend(self._find_related_bpmn_files(new_file))
return related_bpmn_files
def _discover_process_model_processes(
self,
) -> None:
process_model_bpmn_file_glob = os.path.join(self.process_model_directory_path, "**", "*.bpmn")
for file in glob.glob(process_model_bpmn_file_glob, recursive=True):
file_norm = os.path.normpath(file)
if file_norm not in self.bpmn_files_to_called_element_mappings:
self.bpmn_files_to_called_element_mappings[file_norm] = []
with open(file_norm, "rb") as f:
file_contents = f.read()
etree_xml_parser = etree.XMLParser(resolve_entities=False)
# if we cannot load process model then ignore it since it can cause errors unrelated
# to the test and if it is related, it will most likely be caught further along the test
try:
root = etree.fromstring(file_contents, parser=etree_xml_parser)
except etree.XMLSyntaxError:
continue
call_activities = root.findall(".//bpmn:callActivity", namespaces=DEFAULT_NSMAP)
for call_activity in call_activities:
if "calledElement" in call_activity.attrib:
called_element = call_activity.attrib["calledElement"]
self.bpmn_files_to_called_element_mappings[file_norm].append(called_element)
bpmn_process_element = root.find('.//bpmn:process[@isExecutable="true"]', namespaces=DEFAULT_NSMAP)
if bpmn_process_element is not None:
bpmn_process_identifier = bpmn_process_element.attrib["id"]
self.bpmn_processes_to_file_mappings[bpmn_process_identifier] = file_norm
DEFAULT_NSMAP = {
"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL",
"bpmndi": "http://www.omg.org/spec/BPMN/20100524/DI",
"dc": "http://www.omg.org/spec/DD/20100524/DC",
}
"""
JSON file name:
The name should be in format "test_BPMN_FILE_NAME_IT_TESTS.json".
BPMN_TASK_IDENTIIFER:
can be either task bpmn identifier or in format:
[BPMN_PROCESS_ID]:[TASK_BPMN_IDENTIFIER]
example: 'BasicServiceTaskProcess:service_task_one'
this allows for tasks to share bpmn identifiers across models
which is useful for call activities
DATA for tasks:
This is an array of task data. This allows for the task to
be called multiple times and given different data each time.
This is useful for testing loops where each iteration needs
different input. The test will fail if the task is called
multiple times without task data input for each call.
JSON file format:
{
TEST_CASE_NAME: {
"tasks": {
BPMN_TASK_IDENTIIFER: {
"data": [DATA]
}
},
"expected_output_json": DATA
}
}
"""
class ProcessModelTestRunner:
"""Runs the test case json files for a given process model directory.
It searches for test case files recursively and will run all it finds by default.
"""
def __init__(
self,
process_model_directory_path: str,
process_model_test_runner_delegate_class: Type = ProcessModelTestRunnerMostlyPureSpiffDelegate,
process_model_directory_for_test_discovery: Optional[str] = None,
test_case_file: Optional[str] = None,
test_case_identifier: Optional[str] = None,
) -> None:
self.process_model_directory_path = process_model_directory_path
self.process_model_directory_for_test_discovery = (
process_model_directory_for_test_discovery or process_model_directory_path
)
self.test_case_file = test_case_file
self.test_case_identifier = test_case_identifier
if not issubclass(process_model_test_runner_delegate_class, ProcessModelTestRunnerDelegate):
raise UnsupporterRunnerDelegateGiven(
"Process model test runner delegate must inherit from ProcessModelTestRunnerDelegate. Given"
f" class '{process_model_test_runner_delegate_class}' does not"
)
self.process_model_test_runner_delegate = process_model_test_runner_delegate_class(
process_model_directory_path
)
self.test_mappings = self._discover_process_model_test_cases()
self.test_case_results: list[TestCaseResult] = []
# keep track of the current task data index
self.task_data_index: dict[str, int] = {}
def all_test_cases_passed(self) -> bool:
failed_tests = self.failing_tests()
return len(failed_tests) < 1
def failing_tests(self) -> list[TestCaseResult]:
return [t for t in self.test_case_results if t.passed is False]
def passing_tests(self) -> list[TestCaseResult]:
return [t for t in self.test_case_results if t.passed is True]
def failing_tests_formatted(self) -> str:
formatted_tests = ["FAILING TESTS:"]
for failing_test in self.failing_tests():
msg = ""
if failing_test.test_case_error_details is not None:
msg = "\n\t\t".join(failing_test.test_case_error_details.error_messages)
formatted_tests.append(f"\t{failing_test.bpmn_file}: {failing_test.test_case_identifier}: {msg}")
return "\n".join(formatted_tests)
def run(self) -> None:
if len(self.test_mappings.items()) < 1:
raise NoTestCasesFoundError(
f"Could not find any test cases in given directory: {self.process_model_directory_for_test_discovery}"
)
for json_test_case_file, bpmn_file in self.test_mappings.items():
with open(json_test_case_file) as f:
json_file_contents = json.loads(f.read())
for test_case_identifier, test_case_contents in json_file_contents.items():
if self.test_case_identifier is None or test_case_identifier == self.test_case_identifier:
self.task_data_index = {}
try:
self.run_test_case(bpmn_file, test_case_identifier, test_case_contents)
except Exception as ex:
self._add_test_result(False, bpmn_file, test_case_identifier, exception=ex)
def run_test_case(self, bpmn_file: str, test_case_identifier: str, test_case_contents: dict) -> None:
bpmn_process_instance = self._instantiate_executer(bpmn_file)
next_task = self._get_next_task(bpmn_process_instance)
while next_task is not None:
test_case_task_properties = None
test_case_task_key = next_task.task_spec.bpmn_id
if "tasks" in test_case_contents:
if test_case_task_key not in test_case_contents["tasks"]:
# we may need to go to the top level workflow of a given bpmn file
test_case_task_key = f"{next_task.workflow.spec.name}:{next_task.task_spec.bpmn_id}"
if test_case_task_key in test_case_contents["tasks"]:
test_case_task_properties = test_case_contents["tasks"][test_case_task_key]
task_type = next_task.task_spec.__class__.__name__
if task_type in ["ServiceTask", "UserTask", "CallActivity"] and (
test_case_task_properties is None or "data" not in test_case_task_properties
):
raise UnrunnableTestCaseError(
f"Cannot run test case '{test_case_identifier}'. It requires task data for"
f" {next_task.task_spec.bpmn_id} because it is of type '{task_type}'"
)
self._execute_task(next_task, test_case_task_key, test_case_task_properties)
next_task = self._get_next_task(bpmn_process_instance)
error_message = None
if bpmn_process_instance.is_completed() is False:
error_message = [
"Expected process instance to complete but it did not.",
f"Final data was: {bpmn_process_instance.last_task.data}",
f"Last task bpmn id: {bpmn_process_instance.last_task.task_spec.bpmn_id}",
f"Last task type: {bpmn_process_instance.last_task.task_spec.__class__.__name__}",
]
elif bpmn_process_instance.success is False:
error_message = [
"Expected process instance to succeed but it did not.",
f"Final data was: {bpmn_process_instance.data}",
]
elif test_case_contents["expected_output_json"] != bpmn_process_instance.data:
error_message = [
"Expected output did not match actual output:",
f"expected: {test_case_contents['expected_output_json']}",
f"actual: {bpmn_process_instance.data}",
]
self._add_test_result(error_message is None, bpmn_file, test_case_identifier, error_message)
def _execute_task(
self, spiff_task: SpiffTask, test_case_task_key: Optional[str], test_case_task_properties: Optional[dict]
) -> None:
task_data_for_submit = None
if test_case_task_key and test_case_task_properties and "data" in test_case_task_properties:
if test_case_task_key not in self.task_data_index:
self.task_data_index[test_case_task_key] = 0
task_data_length = len(test_case_task_properties["data"])
test_case_index = self.task_data_index[test_case_task_key]
if task_data_length <= test_case_index:
raise MissingInputTaskData(
f"Missing input task data for task: {test_case_task_key}. "
f"Only {task_data_length} given in the json but task was called {test_case_index + 1} times"
)
task_data_for_submit = test_case_task_properties["data"][test_case_index]
self.task_data_index[test_case_task_key] += 1
self.process_model_test_runner_delegate.execute_task(spiff_task, task_data_for_submit)
def _get_next_task(self, bpmn_process_instance: BpmnWorkflow) -> Optional[SpiffTask]:
return self.process_model_test_runner_delegate.get_next_task(bpmn_process_instance)
def _instantiate_executer(self, bpmn_file: str) -> BpmnWorkflow:
return self.process_model_test_runner_delegate.instantiate_executer(bpmn_file)
def _get_relative_path_of_bpmn_file(self, bpmn_file: str) -> str:
return os.path.relpath(bpmn_file, start=self.process_model_directory_path)
def _exception_to_test_case_error_details(
self, exception: Union[Exception, WorkflowTaskException]
) -> TestCaseErrorDetails:
error_messages = str(exception).split("\n")
test_case_error_details = TestCaseErrorDetails(error_messages=error_messages)
if isinstance(exception, WorkflowTaskException):
test_case_error_details.task_error_line = exception.error_line
test_case_error_details.task_trace = exception.task_trace
test_case_error_details.task_line_number = exception.line_number
test_case_error_details.task_bpmn_identifier = exception.task_spec.bpmn_id
test_case_error_details.task_bpmn_name = exception.task_spec.bpmn_name
else:
test_case_error_details.stacktrace = traceback.format_exc().split("\n")
return test_case_error_details
def _add_test_result(
self,
passed: bool,
bpmn_file: str,
test_case_identifier: str,
error_messages: Optional[list[str]] = None,
exception: Optional[Exception] = None,
) -> None:
test_case_error_details = None
if exception is not None:
test_case_error_details = self._exception_to_test_case_error_details(exception)
elif error_messages:
test_case_error_details = TestCaseErrorDetails(error_messages=error_messages)
bpmn_file_relative = self._get_relative_path_of_bpmn_file(bpmn_file)
test_result = TestCaseResult(
passed=passed,
bpmn_file=bpmn_file_relative,
test_case_identifier=test_case_identifier,
test_case_error_details=test_case_error_details,
)
self.test_case_results.append(test_result)
def _discover_process_model_test_cases(
self,
) -> dict[str, str]:
test_mappings = {}
json_test_file_glob = os.path.join(self.process_model_directory_for_test_discovery, "**", "test_*.json")
for file in glob.glob(json_test_file_glob, recursive=True):
file_norm = os.path.normpath(file)
file_dir = os.path.dirname(file_norm)
json_file_name = os.path.basename(file_norm)
if self.test_case_file is None or json_file_name == self.test_case_file:
bpmn_file_name = re.sub(r"^test_(.*)\.json", r"\1.bpmn", json_file_name)
bpmn_file_path = os.path.join(file_dir, bpmn_file_name)
if os.path.isfile(bpmn_file_path):
test_mappings[file_norm] = bpmn_file_path
else:
raise MissingBpmnFileForTestCaseError(
f"Cannot find a matching bpmn file for test case json file: '{file_norm}'"
)
return test_mappings
class ProcessModeltTestRunnerBackendDelegate(ProcessModelTestRunnerMostlyPureSpiffDelegate):
pass
class ProcessModelTestRunnerService:
def __init__(
self,
process_model_directory_path: str,
test_case_file: Optional[str] = None,
test_case_identifier: Optional[str] = None,
) -> None:
self.process_model_test_runner = ProcessModelTestRunner(
process_model_directory_path,
test_case_file=test_case_file,
test_case_identifier=test_case_identifier,
process_model_test_runner_delegate_class=ProcessModeltTestRunnerBackendDelegate,
)
def run(self) -> None:
self.process_model_test_runner.run()

View File

@ -221,37 +221,37 @@ class SpecFileService(FileSystemService):
return spec_file_data
@staticmethod
def full_file_path(spec: ProcessModelInfo, file_name: str) -> str:
def full_file_path(process_model: ProcessModelInfo, file_name: str) -> str:
"""File_path."""
return os.path.abspath(os.path.join(SpecFileService.workflow_path(spec), file_name))
return os.path.abspath(os.path.join(SpecFileService.process_model_full_path(process_model), file_name))
@staticmethod
def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime:
def last_modified(process_model: ProcessModelInfo, file_name: str) -> datetime:
"""Last_modified."""
full_file_path = SpecFileService.full_file_path(spec, file_name)
full_file_path = SpecFileService.full_file_path(process_model, file_name)
return FileSystemService._last_modified(full_file_path)
@staticmethod
def timestamp(spec: ProcessModelInfo, file_name: str) -> float:
def timestamp(process_model: ProcessModelInfo, file_name: str) -> float:
"""Timestamp."""
full_file_path = SpecFileService.full_file_path(spec, file_name)
full_file_path = SpecFileService.full_file_path(process_model, file_name)
return FileSystemService._timestamp(full_file_path)
@staticmethod
def delete_file(spec: ProcessModelInfo, file_name: str) -> None:
def delete_file(process_model: ProcessModelInfo, file_name: str) -> None:
"""Delete_file."""
# Fixme: Remember to remove the lookup files when the spec file is removed.
# Fixme: Remember to remove the lookup files when the process_model file is removed.
# lookup_files = session.query(LookupFileModel).filter_by(file_model_id=file_id).all()
# for lf in lookup_files:
# session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
# session.query(LookupFileModel).filter_by(id=lf.id).delete()
full_file_path = SpecFileService.full_file_path(spec, file_name)
full_file_path = SpecFileService.full_file_path(process_model, file_name)
os.remove(full_file_path)
@staticmethod
def delete_all_files(spec: ProcessModelInfo) -> None:
def delete_all_files(process_model: ProcessModelInfo) -> None:
"""Delete_all_files."""
dir_path = SpecFileService.workflow_path(spec)
dir_path = SpecFileService.process_model_full_path(process_model)
if os.path.exists(dir_path):
shutil.rmtree(dir_path)

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_FailingProcess" name="Failing Process" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1xkc1ru</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1xkc1ru" sourceRef="StartEvent_1" targetRef="Activity_FailingTask" />
<bpmn:endEvent id="Event_00iauxo">
<bpmn:incoming>Flow_0tkkq9s</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0tkkq9s" sourceRef="Activity_FailingTask" targetRef="Event_00iauxo" />
<bpmn:scriptTask id="Activity_FailingTask" name="Failing Task">
<bpmn:incoming>Flow_1xkc1ru</bpmn:incoming>
<bpmn:outgoing>Flow_0tkkq9s</bpmn:outgoing>
<bpmn:script>a = 1
b = a + 'two'</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_FailingProcess">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_00iauxo_di" bpmnElement="Event_00iauxo">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0ecfxz2_di" bpmnElement="Activity_FailingTask">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1xkc1ru_di" bpmnElement="Flow_1xkc1ru">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0tkkq9s_di" bpmnElement="Flow_0tkkq9s">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,11 @@
{
"description": "Process that raises an exception",
"display_name": "Failing Process",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "failing_task.bpmn",
"primary_process_id": "Process_FailingProcess"
}

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="CallActivityProcess" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0ext5lt</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0ext5lt" sourceRef="StartEvent_1" targetRef="Activity_0irfg4l" />
<bpmn:endEvent id="Event_0bz40ol">
<bpmn:incoming>Flow_1hzwssi</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1hzwssi" sourceRef="Activity_0irfg4l" targetRef="Event_0bz40ol" />
<bpmn:callActivity id="Activity_0irfg4l" name="Call Activity" calledElement="ManualTaskProcess">
<bpmn:incoming>Flow_0ext5lt</bpmn:incoming>
<bpmn:outgoing>Flow_1hzwssi</bpmn:outgoing>
</bpmn:callActivity>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_test_a42_A_4_2_bd2e724">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0bz40ol_di" bpmnElement="Event_0bz40ol">
<dc:Bounds x="422" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0148g78_di" bpmnElement="Activity_0irfg4l">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0ext5lt_di" bpmnElement="Flow_0ext5lt">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1hzwssi_di" bpmnElement="Flow_1hzwssi">
<di:waypoint x="370" y="177" />
<di:waypoint x="422" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,9 @@
{
"description": "",
"display_name": "Call Activity",
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"primary_file_name": "call_activity.bpmn",
"primary_process_id": "CallActivityProcess"
}

View File

@ -0,0 +1,5 @@
{
"test_case_1": {
"expected_output_json": {}
}
}

View File

@ -0,0 +1,11 @@
{
"title": "Choose Your Branch",
"description": "",
"properties": {
"branch": {
"type": "string",
"title": "branch"
}
},
"required": []
}

View File

@ -0,0 +1,98 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="exclusive_gateway_based_on_user_task_process" name="ExclusiveGatewayBasedOnUserTaskProcess" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_19j3jcx</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_19j3jcx" sourceRef="StartEvent_1" targetRef="user_task_choose_branch" />
<bpmn:exclusiveGateway id="Gateway_0xwvfep" default="Flow_10m4g0q">
<bpmn:incoming>Flow_0qa66xz</bpmn:incoming>
<bpmn:outgoing>Flow_1ww41l3</bpmn:outgoing>
<bpmn:outgoing>Flow_10m4g0q</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_0qa66xz" sourceRef="user_task_choose_branch" targetRef="Gateway_0xwvfep" />
<bpmn:sequenceFlow id="Flow_1ww41l3" sourceRef="Gateway_0xwvfep" targetRef="script_task_branch_a">
<bpmn:conditionExpression>branch == 'a'</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_10m4g0q" sourceRef="Gateway_0xwvfep" targetRef="script_task_branch_b" />
<bpmn:endEvent id="Event_05ovp79">
<bpmn:incoming>Flow_1oxbb75</bpmn:incoming>
<bpmn:incoming>Flow_1ck9lfk</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1oxbb75" sourceRef="script_task_branch_b" targetRef="Event_05ovp79" />
<bpmn:sequenceFlow id="Flow_1ck9lfk" sourceRef="script_task_branch_a" targetRef="Event_05ovp79" />
<bpmn:userTask id="user_task_choose_branch" name="User Task Choose Branch">
<bpmn:extensionElements>
<spiffworkflow:properties>
<spiffworkflow:property name="formJsonSchemaFilename" value="choose-your-branch-schema.json" />
<spiffworkflow:property name="formUiSchemaFilename" value="choose-your-branch-uischema.json" />
</spiffworkflow:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_19j3jcx</bpmn:incoming>
<bpmn:outgoing>Flow_0qa66xz</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="script_task_branch_a" name="Script Task Branch A">
<bpmn:incoming>Flow_1ww41l3</bpmn:incoming>
<bpmn:outgoing>Flow_1ck9lfk</bpmn:outgoing>
<bpmn:script>chosen_branch = 'A'</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="script_task_branch_b" name="Script Task Branch B">
<bpmn:incoming>Flow_10m4g0q</bpmn:incoming>
<bpmn:outgoing>Flow_1oxbb75</bpmn:outgoing>
<bpmn:script>chosen_branch = 'B'</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="exclusive_gateway_based_on_user_task_process">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0xwvfep_di" bpmnElement="Gateway_0xwvfep" isMarkerVisible="true">
<dc:Bounds x="425" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_05ovp79_di" bpmnElement="Event_05ovp79">
<dc:Bounds x="562" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_15rk06j_di" bpmnElement="user_task_choose_branch">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0oy60uv_di" bpmnElement="script_task_branch_a">
<dc:Bounds x="500" y="20" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_02hkehe_di" bpmnElement="script_task_branch_b">
<dc:Bounds x="500" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_19j3jcx_di" bpmnElement="Flow_19j3jcx">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0qa66xz_di" bpmnElement="Flow_0qa66xz">
<di:waypoint x="370" y="177" />
<di:waypoint x="425" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ww41l3_di" bpmnElement="Flow_1ww41l3">
<di:waypoint x="450" y="152" />
<di:waypoint x="450" y="60" />
<di:waypoint x="500" y="60" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_10m4g0q_di" bpmnElement="Flow_10m4g0q">
<di:waypoint x="450" y="202" />
<di:waypoint x="450" y="300" />
<di:waypoint x="500" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1oxbb75_di" bpmnElement="Flow_1oxbb75">
<di:waypoint x="550" y="260" />
<di:waypoint x="550" y="233" />
<di:waypoint x="580" y="233" />
<di:waypoint x="580" y="195" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ck9lfk_di" bpmnElement="Flow_1ck9lfk">
<di:waypoint x="550" y="100" />
<di:waypoint x="550" y="130" />
<di:waypoint x="580" y="130" />
<di:waypoint x="580" y="159" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,11 @@
{
"description": "",
"display_name": "Exclusive Gateway Based on User Task",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "exclusive_gateway_based_on_user_task.bpmn",
"primary_process_id": "exclusive_gateway_based_on_user_task_process"
}

View File

@ -0,0 +1,22 @@
{
"test_case_one": {
"tasks": {
"user_task_choose_branch": {
"data": [
{ "branch": "a" }
]
}
},
"expected_output_json": { "branch": "a", "chosen_branch": "A"}
},
"test_case_two": {
"tasks": {
"user_task_choose_branch": {
"data": [
{ "branch": "b" }
]
}
},
"expected_output_json": { "branch": "b", "chosen_branch": "B"}
}
}

View File

@ -0,0 +1,110 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="loopback_to_user_task_process" name="Loopback To User Task Process" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_12xxe7w</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_12xxe7w" sourceRef="StartEvent_1" targetRef="set_variable" />
<bpmn:exclusiveGateway id="Gateway_1gap20a" default="Flow_1sg0c65">
<bpmn:incoming>Flow_1s3znr2</bpmn:incoming>
<bpmn:outgoing>Flow_0utss6p</bpmn:outgoing>
<bpmn:outgoing>Flow_1sg0c65</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_08tc3r7" sourceRef="set_variable" targetRef="user_task_enter_increment" />
<bpmn:endEvent id="Event_1il3y5o">
<bpmn:incoming>Flow_0utss6p</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0utss6p" sourceRef="Gateway_1gap20a" targetRef="Event_1il3y5o">
<bpmn:conditionExpression>counter == 3</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:scriptTask id="set_variable" name="Set Variable">
<bpmn:incoming>Flow_12xxe7w</bpmn:incoming>
<bpmn:outgoing>Flow_08tc3r7</bpmn:outgoing>
<bpmn:script>counter = 1
the_var = 0</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0wnc5ju" sourceRef="user_task_enter_increment" targetRef="add_user_input_to_variable" />
<bpmn:sequenceFlow id="Flow_1sg0c65" sourceRef="Gateway_1gap20a" targetRef="user_task_enter_increment" />
<bpmn:userTask id="user_task_enter_increment" name="User Task Enter Increment">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser />
<spiffworkflow:properties>
<spiffworkflow:property name="formJsonSchemaFilename" value="user-input-schema.json" />
<spiffworkflow:property name="formUiSchemaFilename" value="user-input-uischema.json" />
</spiffworkflow:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_08tc3r7</bpmn:incoming>
<bpmn:incoming>Flow_1sg0c65</bpmn:incoming>
<bpmn:outgoing>Flow_0wnc5ju</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1s3znr2" sourceRef="add_user_input_to_variable" targetRef="Gateway_1gap20a" />
<bpmn:scriptTask id="add_user_input_to_variable" name="Add User Input to Variable">
<bpmn:incoming>Flow_0wnc5ju</bpmn:incoming>
<bpmn:outgoing>Flow_1s3znr2</bpmn:outgoing>
<bpmn:script>the_var = user_input_variable + the_var
counter += 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:textAnnotation id="TextAnnotation_09y70ug">
<bpmn:text>loop back if a &lt; 3</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_0470wt9" sourceRef="Flow_1sg0c65" targetRef="TextAnnotation_09y70ug" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="loopback_to_user_task_process">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1gap20a_di" bpmnElement="Gateway_1gap20a" isMarkerVisible="true">
<dc:Bounds x="625" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1il3y5o_di" bpmnElement="Event_1il3y5o">
<dc:Bounds x="712" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0hrsdn8_di" bpmnElement="set_variable">
<dc:Bounds x="250" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0katfaf_di" bpmnElement="user_task_enter_increment">
<dc:Bounds x="380" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0a6owe7_di" bpmnElement="add_user_input_to_variable">
<dc:Bounds x="500" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="TextAnnotation_09y70ug_di" bpmnElement="TextAnnotation_09y70ug">
<dc:Bounds x="610" y="55" width="130" height="30" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_12xxe7w_di" bpmnElement="Flow_12xxe7w">
<di:waypoint x="215" y="177" />
<di:waypoint x="250" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08tc3r7_di" bpmnElement="Flow_08tc3r7">
<di:waypoint x="350" y="177" />
<di:waypoint x="380" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0utss6p_di" bpmnElement="Flow_0utss6p">
<di:waypoint x="675" y="177" />
<di:waypoint x="712" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0wnc5ju_di" bpmnElement="Flow_0wnc5ju">
<di:waypoint x="480" y="177" />
<di:waypoint x="500" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1sg0c65_di" bpmnElement="Flow_1sg0c65">
<di:waypoint x="650" y="150" />
<di:waypoint x="550" y="70" />
<di:waypoint x="475" y="139" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1s3znr2_di" bpmnElement="Flow_1s3znr2">
<di:waypoint x="600" y="177" />
<di:waypoint x="625" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Association_0470wt9_di" bpmnElement="Association_0470wt9">
<di:waypoint x="579" y="93" />
<di:waypoint x="610" y="81" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,11 @@
{
"description": "",
"display_name": "Loopback to User Task",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "loopback_to_user_task.bpmn",
"primary_process_id": "loopback_to_user_task_process"
}

View File

@ -0,0 +1,13 @@
{
"test_case_one": {
"tasks": {
"user_task_enter_increment": {
"data": [
{ "user_input_variable": 7 },
{ "user_input_variable": 8 }
]
}
},
"expected_output_json": { "the_var": 15, "counter": 3, "user_input_variable": 8 }
}
}

View File

@ -0,0 +1,11 @@
{
"title": "User Input",
"description": "",
"properties": {
"user_input_variable": {
"type": "integer",
"title": "user_input_variable"
}
},
"required": []
}

View File

@ -0,0 +1,5 @@
{
"ui:order": [
"user_input_variable"
]
}

View File

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="loopback_process" name="Loopback Process" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_12xxe7w</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_12xxe7w" sourceRef="StartEvent_1" targetRef="set_variable" />
<bpmn:exclusiveGateway id="Gateway_1gap20a" default="Flow_1sg0c65">
<bpmn:incoming>Flow_0wnc5ju</bpmn:incoming>
<bpmn:outgoing>Flow_0utss6p</bpmn:outgoing>
<bpmn:outgoing>Flow_1sg0c65</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_08tc3r7" sourceRef="set_variable" targetRef="increment_variable" />
<bpmn:endEvent id="Event_1il3y5o">
<bpmn:incoming>Flow_0utss6p</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0utss6p" sourceRef="Gateway_1gap20a" targetRef="Event_1il3y5o">
<bpmn:conditionExpression>a == 3</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:scriptTask id="set_variable" name="Set Variable">
<bpmn:incoming>Flow_12xxe7w</bpmn:incoming>
<bpmn:outgoing>Flow_08tc3r7</bpmn:outgoing>
<bpmn:script>a = 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0wnc5ju" sourceRef="increment_variable" targetRef="Gateway_1gap20a" />
<bpmn:scriptTask id="increment_variable" name="Increment Variable">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser />
</bpmn:extensionElements>
<bpmn:incoming>Flow_08tc3r7</bpmn:incoming>
<bpmn:incoming>Flow_1sg0c65</bpmn:incoming>
<bpmn:outgoing>Flow_0wnc5ju</bpmn:outgoing>
<bpmn:script>a += 1</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1sg0c65" sourceRef="Gateway_1gap20a" targetRef="increment_variable" />
<bpmn:textAnnotation id="TextAnnotation_09y70ug">
<bpmn:text>loop back if a &lt; 3</bpmn:text>
</bpmn:textAnnotation>
<bpmn:association id="Association_0470wt9" sourceRef="Flow_1sg0c65" targetRef="TextAnnotation_09y70ug" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="loopback_process">
<bpmndi:BPMNShape id="TextAnnotation_09y70ug_di" bpmnElement="TextAnnotation_09y70ug">
<dc:Bounds x="610" y="55" width="130" height="30" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1gap20a_di" bpmnElement="Gateway_1gap20a" isMarkerVisible="true">
<dc:Bounds x="535" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1il3y5o_di" bpmnElement="Event_1il3y5o">
<dc:Bounds x="632" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0hrsdn8_di" bpmnElement="set_variable">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1g5b8wo_di" bpmnElement="increment_variable">
<dc:Bounds x="400" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Association_0470wt9_di" bpmnElement="Association_0470wt9">
<di:waypoint x="567.1081954098089" y="89.9595613114437" />
<di:waypoint x="610" y="81" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_12xxe7w_di" bpmnElement="Flow_12xxe7w">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08tc3r7_di" bpmnElement="Flow_08tc3r7">
<di:waypoint x="370" y="177" />
<di:waypoint x="400" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0utss6p_di" bpmnElement="Flow_0utss6p">
<di:waypoint x="585" y="177" />
<di:waypoint x="632" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0wnc5ju_di" bpmnElement="Flow_0wnc5ju">
<di:waypoint x="500" y="177" />
<di:waypoint x="535" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1sg0c65_di" bpmnElement="Flow_1sg0c65">
<di:waypoint x="560" y="150" />
<di:waypoint x="610" y="140" />
<di:waypoint x="550" y="70" />
<di:waypoint x="489" y="137" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,11 @@
{
"description": "",
"display_name": "Loopback",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "loopback.bpmn",
"primary_process_id": "loopback_process"
}

View File

@ -0,0 +1,5 @@
{
"test_case_1": {
"expected_output_json": { "a": 3 }
}
}

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="ManualTaskProcess" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0gz6i84</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0gz6i84" sourceRef="StartEvent_1" targetRef="manual_task_one" />
<bpmn:endEvent id="Event_0ynlmo7">
<bpmn:incoming>Flow_0ikklg6</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0ikklg6" sourceRef="manual_task_one" targetRef="Event_0ynlmo7" />
<bpmn:manualTask id="manual_task_one" name="Manual">
<bpmn:incoming>Flow_0gz6i84</bpmn:incoming>
<bpmn:outgoing>Flow_0ikklg6</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ManualTaskProcess">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0ynlmo7_di" bpmnElement="Event_0ynlmo7">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0sji2rz_di" bpmnElement="manualt_task_one">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0gz6i84_di" bpmnElement="Flow_0gz6i84">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ikklg6_di" bpmnElement="Flow_0ikklg6">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,9 @@
{
"description": "Manual Task",
"display_name": "Manual Task",
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"primary_file_name": "manual_task.bpmn",
"primary_process_id": "ManualTaskProcess"
}

View File

@ -0,0 +1,10 @@
{
"test_case_1": {
"tasks": {
"manual_task_one": {
"data": [{}]
}
},
"expected_output_json": {}
}
}

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="ProcessA" name="ProcessA" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0jk46kf</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0jk46kf" sourceRef="StartEvent_1" targetRef="Activity_0e9rl60" />
<bpmn:endEvent id="Event_1srknca">
<bpmn:incoming>Flow_0pw6euz</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0pw6euz" sourceRef="Activity_0e9rl60" targetRef="Event_1srknca" />
<bpmn:scriptTask id="Activity_0e9rl60">
<bpmn:incoming>Flow_0jk46kf</bpmn:incoming>
<bpmn:outgoing>Flow_0pw6euz</bpmn:outgoing>
<bpmn:script>a = 1</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ProcessA">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1srknca_di" bpmnElement="Event_1srknca">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0yxs81w_di" bpmnElement="Activity_0e9rl60">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0jk46kf_di" bpmnElement="Flow_0jk46kf">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0pw6euz_di" bpmnElement="Flow_0pw6euz">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,42 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_39edgqg" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1qgv480</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1qgv480" sourceRef="StartEvent_1" targetRef="Activity_1kral0x" />
<bpmn:endEvent id="ProcessB" name="ProcessB">
<bpmn:incoming>Flow_1sbj39z</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1sbj39z" sourceRef="Activity_1kral0x" targetRef="ProcessB" />
<bpmn:scriptTask id="Activity_1kral0x">
<bpmn:incoming>Flow_1qgv480</bpmn:incoming>
<bpmn:outgoing>Flow_1sbj39z</bpmn:outgoing>
<bpmn:script>b = 1</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_39edgqg">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_12lq7sg_di" bpmnElement="ProcessB">
<dc:Bounds x="432" y="159" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="427" y="202" width="48" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0pkm1sr_di" bpmnElement="Activity_1kral0x">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1qgv480_di" bpmnElement="Flow_1qgv480">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1sbj39z_di" bpmnElement="Flow_1sbj39z">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,11 @@
{
"description": "",
"display_name": "Multiple Test Files",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "a.bpmn",
"primary_process_id": "ProcessA"
}

View File

@ -0,0 +1,5 @@
{
"test_case_1": {
"expected_output_json": { "a": 1 }
}
}

View File

@ -0,0 +1,8 @@
{
"test_case_1": {
"expected_output_json": { "b": 1 }
},
"test_case_2": {
"expected_output_json": { "b": 1 }
}
}

View File

@ -0,0 +1,9 @@
{
"admin": false,
"description": "",
"display_name": "Expected To Pass",
"display_order": 0,
"parent_groups": null,
"process_groups": [],
"process_models": []
}

View File

@ -0,0 +1,11 @@
{
"description": "",
"display_name": "Script Task",
"display_order": 0,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"metadata_extraction_paths": null,
"primary_file_name": "Script.bpmn",
"primary_process_id": "Process_Script_Task"
}

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_Script_Task" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0qfycuk</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0qfycuk" sourceRef="StartEvent_1" targetRef="Activity_1qdbp6x" />
<bpmn:endEvent id="Event_1kumwb5">
<bpmn:incoming>Flow_1auiekw</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1auiekw" sourceRef="Activity_1qdbp6x" targetRef="Event_1kumwb5" />
<bpmn:scriptTask id="Activity_1qdbp6x" name="Script">
<bpmn:incoming>Flow_0qfycuk</bpmn:incoming>
<bpmn:outgoing>Flow_1auiekw</bpmn:outgoing>
<bpmn:script>a = 1</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_Script_Task">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1kumwb5_di" bpmnElement="Event_1kumwb5">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0ii0b3p_di" bpmnElement="Activity_1qdbp6x">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0qfycuk_di" bpmnElement="Flow_0qfycuk">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1auiekw_di" bpmnElement="Flow_1auiekw">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,5 @@
{
"test_case_1": {
"expected_output_json": { "a": 1 }
}
}

View File

@ -0,0 +1,10 @@
{
"description": "A.1.0.2",
"display_name": "A.1.0.2 - Service Task",
"display_order": 13,
"exception_notification_addresses": [],
"fault_or_suspend_on_exception": "fault",
"files": [],
"primary_file_name": "A.1.0.2.bpmn",
"primary_process_id": "Process_test_a102_A_1_0_2_bd2e724"
}

View File

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="ServiceTaskProcess" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_19ephzh</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_19ephzh" sourceRef="StartEvent_1" targetRef="service_task_one" />
<bpmn:endEvent id="Event_132m0z7">
<bpmn:incoming>Flow_1dsxn78</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1dsxn78" sourceRef="service_task_one" targetRef="Event_132m0z7" />
<bpmn:serviceTask id="service_task_one" name="Task 2">
<bpmn:extensionElements>
<spiffworkflow:serviceTaskOperator id="http/GetRequest" resultVariable="bamboo_get_employee">
<spiffworkflow:parameters>
<spiffworkflow:parameter id="basic_auth_password" type="str" value="&#34;x&#34;" />
<spiffworkflow:parameter id="basic_auth_username" type="str" value="&#34;x&#34;" />
<spiffworkflow:parameter id="headers" type="any" value="{&#34;Accept&#34;: &#34;application/json&#34;}" />
<spiffworkflow:parameter id="params" type="any" value="{}" />
<spiffworkflow:parameter id="url" type="str" value="f&#34;https://example.com/api/user&#34;" />
</spiffworkflow:parameters>
</spiffworkflow:serviceTaskOperator>
<spiffworkflow:instructionsForEndUser>This is the Service Task Unit Test Screen.</spiffworkflow:instructionsForEndUser>
<spiffworkflow:postScript />
</bpmn:extensionElements>
<bpmn:incoming>Flow_0xx2kop</bpmn:incoming>
<bpmn:outgoing>Flow_1dsxn78</bpmn:outgoing>
</bpmn:serviceTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ServiceTaskProcess">
<bpmndi:BPMNEdge id="Flow_19ephzh_di" bpmnElement="Flow_19ephzh">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0xx2kop_di" bpmnElement="Flow_0xx2kop">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1dsxn78_di" bpmnElement="Flow_1dsxn78">
<di:waypoint x="530" y="177" />
<di:waypoint x="592" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_132m0z7_di" bpmnElement="Event_132m0z7">
<dc:Bounds x="592" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1nlg9cc_di" bpmnElement="service_task_one">
<dc:Bounds x="430" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,10 @@
{
"test_case_1": {
"tasks": {
"ServiceTaskProcess:service_task_one": {
"data": [{ "the_result": "result_from_service" }]
}
},
"expected_output_json": { "the_result": "result_from_service" }
}
}

View File

@ -11,13 +11,13 @@
<bpmn:scriptTask id="save_key1">
<bpmn:incoming>Flow_1j4jzft</bpmn:incoming>
<bpmn:outgoing>Flow_10xyk22</bpmn:outgoing>
<bpmn:script>save_process_instance_metadata({"key1": "value1"})</bpmn:script>
<bpmn:script>key1 = "value1"</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_10xyk22" sourceRef="save_key1" targetRef="save_key2" />
<bpmn:scriptTask id="save_key2">
<bpmn:incoming>Flow_10xyk22</bpmn:incoming>
<bpmn:outgoing>Flow_01xr2ac</bpmn:outgoing>
<bpmn:script>save_process_instance_metadata({"key2": "value2", "key3": "value3"})</bpmn:script>
<bpmn:script>key2 = "value2"; key3 = "value3"</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_01xr2ac" sourceRef="save_key2" targetRef="Event_1s123jg" />
</bpmn:process>

View File

@ -14,6 +14,7 @@ from werkzeug.test import TestResponse # type: ignore
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.permission_assignment import Permission
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.process_group import ProcessGroup
@ -26,9 +27,11 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceQueueService,
)
from spiffworkflow_backend.services.process_instance_service import ProcessInstanceService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.user_service import UserService
@ -56,7 +59,6 @@ class BaseTest:
@staticmethod
def logged_in_headers(user: UserModel, _redirect_url: str = "http://some/frontend/url") -> Dict[str, str]:
"""Logged_in_headers."""
return dict(Authorization="Bearer " + user.encode_auth_token())
def create_group_and_model_with_bpmn(
@ -303,7 +305,8 @@ class BaseTest:
db.session.add(process_instance)
db.session.commit()
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance)
run_at_in_seconds = round(time.time())
ProcessInstanceQueueService.enqueue_new_process_instance(process_instance, run_at_in_seconds)
return process_instance
@ -314,7 +317,6 @@ class BaseTest:
target_uri: str = PermissionTargetModel.URI_ALL,
permission_names: Optional[list[str]] = None,
) -> UserModel:
"""Create_user_with_permission."""
user = BaseTest.find_or_create_user(username=username)
return cls.add_permissions_to_user(user, target_uri=target_uri, permission_names=permission_names)
@ -325,7 +327,6 @@ class BaseTest:
target_uri: str = PermissionTargetModel.URI_ALL,
permission_names: Optional[list[str]] = None,
) -> UserModel:
"""Add_permissions_to_user."""
permission_target = AuthorizationService.find_or_create_permission_target(target_uri)
if permission_names is None:
@ -401,3 +402,65 @@ class BaseTest:
def empty_report_metadata_body(self) -> ReportMetadata:
return {"filter_by": [], "columns": [], "order_by": []}
def start_sender_process(
self,
client: FlaskClient,
payload: dict,
group_name: str = "test_group",
) -> ProcessInstanceModel:
process_model = load_test_spec(
"test_group/message",
process_model_source_directory="message_send_one_conversation",
bpmn_file_name="message_sender.bpmn", # Slightly misnamed, it sends and receives
)
process_instance = self.create_process_instance_from_process_model(process_model)
processor_send_receive = ProcessInstanceProcessor(process_instance)
processor_send_receive.do_engine_steps(save=True)
task = processor_send_receive.get_all_user_tasks()[0]
human_task = process_instance.active_human_tasks[0]
ProcessInstanceService.complete_form_task(
processor_send_receive,
task,
payload,
process_instance.process_initiator,
human_task,
)
processor_send_receive.save()
return process_instance
def assure_a_message_was_sent(self, process_instance: ProcessInstanceModel, payload: dict) -> None:
# There should be one new send message for the given process instance.
send_messages = (
MessageInstanceModel.query.filter_by(message_type="send")
.filter_by(process_instance_id=process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
assert len(send_messages) == 1
send_message = send_messages[0]
assert send_message.payload == payload, "The send message should match up with the payload"
assert send_message.name == "Request Approval"
assert send_message.status == "ready"
def assure_there_is_a_process_waiting_on_a_message(self, process_instance: ProcessInstanceModel) -> None:
# There should be one new send message for the given process instance.
waiting_messages = (
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
assert len(waiting_messages) == 1
waiting_message = waiting_messages[0]
self.assure_correlation_properties_are_right(waiting_message)
def assure_correlation_properties_are_right(self, message: MessageInstanceModel) -> None:
# Correlation Properties should match up
po_curr = next(c for c in message.correlation_rules if c.name == "po_number")
customer_curr = next(c for c in message.correlation_rules if c.name == "customer_id")
assert po_curr is not None
assert customer_curr is not None

View File

@ -12,12 +12,10 @@ from spiffworkflow_backend.services.process_model_service import ProcessModelSer
def assure_process_group_exists(process_group_id: Optional[str] = None) -> ProcessGroup:
"""Assure_process_group_exists."""
process_group = None
process_model_service = ProcessModelService()
if process_group_id is not None:
try:
process_group = process_model_service.get_process_group(process_group_id)
process_group = ProcessModelService.get_process_group(process_group_id)
except ProcessEntityNotFoundError:
process_group = None
@ -31,7 +29,7 @@ def assure_process_group_exists(process_group_id: Optional[str] = None) -> Proce
admin=False,
display_order=0,
)
process_model_service.add_process_group(process_group)
ProcessModelService.add_process_group(process_group)
return process_group

View File

@ -10,7 +10,7 @@ class TestDebugController(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
response = client.get(
response = client.post(
"/v1.0/debug/test-raise-error",
)
assert response.status_code == 500

View File

@ -25,14 +25,12 @@ class TestLoggingService(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None
AuthorizationService.import_permissions_from_yaml_file()
process_model = load_test_spec(
process_model_id="misc/category_number_one/simple_form",
# bpmn_file_name="simp.bpmn",
process_model_source_directory="simple_form",
)
process_instance = self.create_process_instance_from_process_model(
@ -85,14 +83,12 @@ class TestLoggingService(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None
AuthorizationService.import_permissions_from_yaml_file()
process_model = load_test_spec(
process_model_id="misc/category_number_one/simple_form",
# bpmn_file_name="simp.bpmn",
process_model_source_directory="simple_form",
)
process_instance = self.create_process_instance_from_process_model(

View File

@ -0,0 +1,63 @@
import pytest
from flask import Flask
from flask import g
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.routes.messages_controller import message_send
class TestMessages(BaseTest):
def test_message_from_api_into_running_process(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test sending a message to a running process via the API.
This example workflow will send a message called 'request_approval' and then wait for a response message
of 'approval_result'. This test assures that it will fire the message with the correct correlation properties
and will respond only to a message called 'approval_result' that has the matching correlation properties,
as sent by an API Call.
"""
payload = {
"customer_id": "Sartography",
"po_number": 1001,
"description": "We built a new feature for messages!",
"amount": "100.00",
}
process_instance = self.start_sender_process(client, payload, "test_from_api")
self.assure_a_message_was_sent(process_instance, payload)
self.assure_there_is_a_process_waiting_on_a_message(process_instance)
g.user = process_instance.process_initiator
# Make an API call to the service endpoint, but use the wrong po number
with pytest.raises(ApiError):
message_send("Approval Result", {"payload": {"po_number": 5001}})
# Should return an error when making an API call for right po number, wrong client
with pytest.raises(ApiError):
message_send(
"Approval Result",
{"payload": {"po_number": 1001, "customer_id": "jon"}},
)
# No error when calling with the correct parameters
message_send(
"Approval Result",
{"payload": {"po_number": 1001, "customer_id": "Sartography"}},
)
# There is no longer a waiting message
waiting_messages = (
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=process_instance.id)
.all()
)
assert len(waiting_messages) == 0
# The process has completed
assert process_instance.status == "complete"

View File

@ -182,7 +182,7 @@ class TestProcessApi(BaseTest):
user=with_super_admin_user,
)
response = client.post(
f"/v1.0/process-models-natural-language/{process_group_id}",
f"/v1.0/process-model-natural-language/{process_group_id}",
content_type="application/json",
data=json.dumps(body),
headers=self.logged_in_headers(with_super_admin_user),
@ -238,9 +238,6 @@ class TestProcessApi(BaseTest):
process_model_identifier = f"{process_group_id}/{process_model_id}"
initial_primary_process_id = "sample"
terminal_primary_process_id = "new_process_id"
self.create_process_group_with_api(
client=client, user=with_super_admin_user, process_group_id=process_group_id
)
bpmn_file_name = f"{process_model_id}.bpmn"
bpmn_file_source_directory = process_model_id
@ -282,14 +279,11 @@ class TestProcessApi(BaseTest):
) -> None:
"""Test_process_model_delete."""
process_group_id = "test_process_group"
process_group_description = "Test Process Group"
process_model_id = "sample"
process_model_identifier = f"{process_group_id}/{process_model_id}"
self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_description)
self.create_process_model_with_api(
client,
process_model = load_test_spec(
process_model_id=process_model_identifier,
user=with_super_admin_user,
process_model_source_directory=process_model_id,
)
# assert we have a model
@ -2349,7 +2343,6 @@ class TestProcessApi(BaseTest):
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_correct_user_can_get_and_update_a_task."""
initiator_user = self.find_or_create_user("testuser4")
finance_user = self.find_or_create_user("testuser2")
assert initiator_user.principal is not None
@ -2372,15 +2365,8 @@ class TestProcessApi(BaseTest):
bpmn_file_location=bpmn_file_location,
)
# process_model = load_test_spec(
# process_model_id="model_with_lanes",
# bpmn_file_name="lanes.bpmn",
# process_group_id="finance",
# )
response = self.create_process_instance_from_process_model_id_with_api(
client,
# process_model.process_group_id,
process_model_identifier,
headers=self.logged_in_headers(initiator_user),
)
@ -3041,7 +3027,7 @@ class TestProcessApi(BaseTest):
#
# # modified_process_model_id = process_model_identifier.replace("/", ":")
# # response = client.post(
# # f"/v1.0/process-models/{modified_process_model_id}/publish?branch_to_update=staging",
# # f"/v1.0/process-model-publish/{modified_process_model_id}?branch_to_update=staging",
# # headers=self.logged_in_headers(with_super_admin_user),
# # )
#
@ -3059,6 +3045,17 @@ class TestProcessApi(BaseTest):
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
ProcessModelService.update_process_model(
process_model,
{
"metadata_extraction_paths": [
{"key": "key1", "path": "key1"},
{"key": "key2", "path": "key2"},
{"key": "key3", "path": "key3"},
]
},
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
@ -3196,6 +3193,16 @@ class TestProcessApi(BaseTest):
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
ProcessModelService.update_process_model(
process_model,
{
"metadata_extraction_paths": [
{"key": "key1", "path": "key1"},
{"key": "key2", "path": "key2"},
{"key": "key3", "path": "key3"},
]
},
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
@ -3270,7 +3277,6 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_list_can_order_by_metadata."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
"test_group/hello_world",
process_model_source_directory="nested-task-data-structure",

View File

@ -1,31 +1,21 @@
"""Test_get_localtime."""
from operator import itemgetter
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.scripts.get_all_permissions import GetAllPermissions
from spiffworkflow_backend.services.authorization_service import AuthorizationService
class TestGetAllPermissions(BaseTest):
"""TestGetAllPermissions."""
def test_can_get_all_permissions(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_all_permissions."""
self.find_or_create_user("test_user")
# now that we have everything, try to clear it out...
script_attributes_context = ScriptAttributesContext(
task=None,

View File

@ -3,14 +3,12 @@ import json
from flask import g
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.scripts.get_current_user import GetCurrentUser
@ -18,11 +16,8 @@ class TestGetCurrentUser(BaseTest):
def test_get_current_user(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_members_of_a_group."""
testuser1 = self.find_or_create_user("testuser1")
testuser1.tenant_specific_field_1 = "456"
db.session.add(testuser1)

View File

@ -1,12 +1,9 @@
"""Test_get_localtime."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -14,16 +11,11 @@ from spiffworkflow_backend.services.user_service import UserService
class TestGetGroupMembers(BaseTest):
"""TestGetGroupMembers."""
def test_can_get_members_of_a_group(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_members_of_a_group."""
initiator_user = self.find_or_create_user("initiator_user")
testuser1 = self.find_or_create_user("testuser1")
testuser2 = self.find_or_create_user("testuser2")
@ -38,7 +30,6 @@ class TestGetGroupMembers(BaseTest):
UserService.add_user_to_group(testuser2, group_a)
UserService.add_user_to_group(testuser3, group_b)
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
process_model_id="test_group/get_group_members",
bpmn_file_name="get_group_members.bpmn",

View File

@ -1,10 +1,7 @@
"""Test_get_localtime."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@ -18,12 +15,8 @@ class TestGetLastUserCompletingTask(BaseTest):
def test_get_last_user_completing_task_script_works(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_sets_permission_correctly_on_human_task."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None
AuthorizationService.import_permissions_from_yaml_file()

View File

@ -1,9 +1,7 @@
"""Test_get_localtime."""
import datetime
import pytz
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -20,10 +18,7 @@ from spiffworkflow_backend.services.process_instance_service import (
class TestGetLocaltime(BaseTest):
"""TestProcessAPi."""
def test_get_localtime_script_directly(self) -> None:
"""Test_get_localtime_script_directly."""
current_time = datetime.datetime.now()
timezone = "US/Pacific"
process_model_identifier = "test_process_model"
@ -44,17 +39,14 @@ class TestGetLocaltime(BaseTest):
def test_get_localtime_script_through_bpmn(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_process_instance_run."""
initiator_user = self.find_or_create_user("initiator_user")
self.add_permissions_to_user(
initiator_user,
target_uri="/v1.0/process-groups",
permission_names=["read", "create"],
)
self.create_process_group_with_api(client=client, user=initiator_user, process_group_id="test_group")
process_model = load_test_spec(
process_model_id="test_group/get_localtime",
bpmn_file_name="get_localtime.bpmn",

View File

@ -1,10 +1,8 @@
"""Test_get_localtime."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@ -18,19 +16,15 @@ class TestGetProcessInitiatorUser(BaseTest):
def test_get_process_initiator_user(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_sets_permission_correctly_on_human_task."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
initiator_user = self.find_or_create_user("initiator_user")
assert initiator_user.principal is not None
AuthorizationService.import_permissions_from_yaml_file()
process_model = load_test_spec(
process_model_id="misc/category_number_one/simple_form",
# bpmn_file_name="simp.bpmn",
process_model_source_directory="simple_form",
)
process_instance = self.create_process_instance_from_process_model(

View File

@ -1,7 +1,6 @@
"""Test_get_localtime."""
import pytest
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -15,7 +14,6 @@ class TestRefreshPermissions(BaseTest):
def test_refresh_permissions_requires_elevated_permission(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
basic_user = self.find_or_create_user("basic_user")

View File

@ -1,42 +0,0 @@
"""Test_get_localtime."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
class TestSaveProcessInstanceMetadata(BaseTest):
"""TestSaveProcessInstanceMetadata."""
def test_can_save_process_instance_metadata(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_save_process_instance_metadata."""
self.create_process_group_with_api(client, with_super_admin_user, "test_group", "test_group")
process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=process_instance.id
).all()
assert len(process_instance_metadata) == 3

View File

@ -3,11 +3,11 @@ import pytest
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.authorization_service import GroupPermissionsDict
from spiffworkflow_backend.services.authorization_service import InvalidPermissionError
from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.process_instance_processor import (
@ -16,24 +16,14 @@ from spiffworkflow_backend.services.process_instance_processor import (
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.user_service import UserService
class TestAuthorizationService(BaseTest):
"""TestAuthorizationService."""
def test_can_raise_if_missing_user(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
"""Test_can_raise_if_missing_user."""
with pytest.raises(UserNotFoundError):
AuthorizationService.import_permissions_from_yaml_file(raise_if_missing_user=True)
def test_does_not_fail_if_user_not_created(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
"""Test_does_not_fail_if_user_not_created."""
AuthorizationService.import_permissions_from_yaml_file()
def test_can_import_permissions_from_yaml(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
"""Test_can_import_permissions_from_yaml."""
usernames = [
"testadmin1",
"testadmin2",
@ -56,22 +46,19 @@ class TestAuthorizationService(BaseTest):
assert testuser1_group_identifiers == ["Finance Team", "everybody"]
assert len(users["testuser2"].groups) == 3
self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/finance/model1")
self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/finance/")
self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/finance:model1")
self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/finance")
self.assert_user_has_permission(users["testuser1"], "update", "/v1.0/process-groups/", expected_result=False)
self.assert_user_has_permission(users["testuser4"], "update", "/v1.0/process-groups/finance/model1")
# via the user, not the group
self.assert_user_has_permission(users["testuser4"], "read", "/v1.0/process-groups/finance/model1")
self.assert_user_has_permission(users["testuser2"], "update", "/v1.0/process-groups/finance/model1")
self.assert_user_has_permission(users["testuser2"], "update", "/v1.0/process-groups/", expected_result=False)
self.assert_user_has_permission(users["testuser2"], "read", "/v1.0/process-groups/")
self.assert_user_has_permission(users["testuser4"], "read", "/v1.0/process-groups/finance:model1")
self.assert_user_has_permission(users["testuser2"], "update", "/v1.0/process-groups/finance:model1")
self.assert_user_has_permission(users["testuser2"], "update", "/v1.0/process-groups", expected_result=False)
self.assert_user_has_permission(users["testuser2"], "read", "/v1.0/process-groups")
def test_user_can_be_added_to_human_task_on_first_login(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_user_can_be_added_to_human_task_on_first_login."""
initiator_user = self.find_or_create_user("initiator_user")
@ -80,16 +67,12 @@ class TestAuthorizationService(BaseTest):
self.find_or_create_user("testuser1")
AuthorizationService.import_permissions_from_yaml_file()
process_model_identifier = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="test_group",
process_model_id="model_with_lanes",
process_model = load_test_spec(
process_model_id="test_group/model_with_lanes",
bpmn_file_name="lanes.bpmn",
bpmn_file_location="model_with_lanes",
process_model_source_directory="model_with_lanes",
)
process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
)
@ -121,7 +104,6 @@ class TestAuthorizationService(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_all_on_process_group."""
expected_permissions = sorted(
[
("/event-error-details/some-process-group:some-process-model:*", "read"),
@ -153,6 +135,8 @@ class TestAuthorizationService(BaseTest):
"delete",
),
("/process-instances/some-process-group:some-process-model:*", "read"),
("/process-model-natural-language/some-process-group:some-process-model:*", "create"),
("/process-model-publish/some-process-group:some-process-model:*", "create"),
("/process-models/some-process-group:some-process-model:*", "create"),
("/process-models/some-process-group:some-process-model:*", "delete"),
("/process-models/some-process-group:some-process-model:*", "read"),
@ -174,26 +158,28 @@ class TestAuthorizationService(BaseTest):
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_start_on_process_group."""
expected_permissions = [
("/event-error-details/some-process-group:some-process-model:*", "read"),
(
"/logs/some-process-group:some-process-model:*",
"read",
),
(
"/logs/typeahead-filter-values/some-process-group:some-process-model:*",
"read",
),
(
"/process-data-file-download/some-process-group:some-process-model:*",
"read",
),
(
"/process-instances/for-me/some-process-group:some-process-model:*",
"read",
),
("/process-instances/some-process-group:some-process-model:*", "create"),
]
expected_permissions = sorted(
[
("/event-error-details/some-process-group:some-process-model:*", "read"),
(
"/logs/some-process-group:some-process-model:*",
"read",
),
(
"/logs/typeahead-filter-values/some-process-group:some-process-model:*",
"read",
),
(
"/process-data-file-download/some-process-group:some-process-model:*",
"read",
),
(
"/process-instances/for-me/some-process-group:some-process-model:*",
"read",
),
("/process-instances/some-process-group:some-process-model:*", "create"),
]
)
permissions_to_assign = AuthorizationService.explode_permissions(
"start", "PG:/some-process-group/some-process-model"
)
@ -206,7 +192,6 @@ class TestAuthorizationService(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_all_on_process_model."""
expected_permissions = sorted(
[
("/event-error-details/some-process-group:some-process-model/*", "read"),
@ -234,6 +219,8 @@ class TestAuthorizationService(BaseTest):
"delete",
),
("/process-instances/some-process-group:some-process-model/*", "read"),
("/process-model-natural-language/some-process-group:some-process-model/*", "create"),
("/process-model-publish/some-process-group:some-process-model/*", "create"),
("/process-models/some-process-group:some-process-model/*", "create"),
("/process-models/some-process-group:some-process-model/*", "delete"),
("/process-models/some-process-group:some-process-model/*", "read"),
@ -255,26 +242,28 @@ class TestAuthorizationService(BaseTest):
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_explode_permissions_start_on_process_model."""
expected_permissions = [
(
"/event-error-details/some-process-group:some-process-model/*",
"read",
),
(
"/logs/some-process-group:some-process-model/*",
"read",
),
("/logs/typeahead-filter-values/some-process-group:some-process-model/*", "read"),
(
"/process-data-file-download/some-process-group:some-process-model/*",
"read",
),
(
"/process-instances/for-me/some-process-group:some-process-model/*",
"read",
),
("/process-instances/some-process-group:some-process-model/*", "create"),
]
expected_permissions = sorted(
[
(
"/event-error-details/some-process-group:some-process-model/*",
"read",
),
(
"/logs/some-process-group:some-process-model/*",
"read",
),
("/logs/typeahead-filter-values/some-process-group:some-process-model/*", "read"),
(
"/process-data-file-download/some-process-group:some-process-model/*",
"read",
),
(
"/process-instances/for-me/some-process-group:some-process-model/*",
"read",
),
("/process-instances/some-process-group:some-process-model/*", "create"),
]
)
permissions_to_assign = AuthorizationService.explode_permissions(
"start", "PM:/some-process-group/some-process-model"
)
@ -287,32 +276,70 @@ class TestAuthorizationService(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
expected_permissions = [
("/active-users/*", "read"),
("/debug/version-info", "read"),
("/process-groups", "read"),
("/process-instances/find-by-id/*", "read"),
("/process-instances/for-me", "create"),
("/process-instances/report-metadata", "read"),
("/process-instances/reports/*", "create"),
("/process-instances/reports/*", "delete"),
("/process-instances/reports/*", "read"),
("/process-instances/reports/*", "update"),
("/process-models", "read"),
("/processes", "read"),
("/processes/callers", "read"),
("/service-tasks", "read"),
("/tasks/*", "create"),
("/tasks/*", "delete"),
("/tasks/*", "read"),
("/tasks/*", "update"),
("/user-groups/for-current-user", "read"),
("/users/exists/by-username", "create"),
]
expected_permissions = sorted(
[
("/active-users/*", "create"),
("/connector-proxy/typeahead/*", "read"),
("/debug/version-info", "read"),
("/process-groups", "read"),
("/process-instances/find-by-id/*", "read"),
("/process-instances/for-me", "create"),
("/process-instances/report-metadata", "read"),
("/process-instances/reports/*", "create"),
("/process-instances/reports/*", "delete"),
("/process-instances/reports/*", "read"),
("/process-instances/reports/*", "update"),
("/process-models", "read"),
("/processes", "read"),
("/processes/callers", "read"),
("/service-tasks", "read"),
("/tasks/*", "create"),
("/tasks/*", "delete"),
("/tasks/*", "read"),
("/tasks/*", "update"),
("/user-groups/for-current-user", "read"),
("/users/exists/by-username", "create"),
("/users/search", "read"),
]
)
permissions_to_assign = AuthorizationService.explode_permissions("all", "BASIC")
permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign])
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_elevated(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
expected_permissions = sorted(
[
("/authentications", "read"),
("/can-run-privileged-script/*", "create"),
("/debug/*", "create"),
("/messages", "read"),
("/messages/*", "create"),
("/process-instance-reset/*", "create"),
("/process-instance-resume/*", "create"),
("/process-instance-suspend/*", "create"),
("/process-instance-terminate/*", "create"),
("/process-instances/*", "create"),
("/process-instances/*", "delete"),
("/process-instances/*", "read"),
("/process-instances/*", "update"),
("/secrets/*", "create"),
("/secrets/*", "delete"),
("/secrets/*", "read"),
("/secrets/*", "update"),
("/send-event/*", "create"),
("/task-complete/*", "create"),
("/task-data/*", "update"),
]
)
permissions_to_assign = AuthorizationService.explode_permissions("all", "ELEVATED")
permissions_to_assign_tuples = sorted([(p.target_uri, p.permission) for p in permissions_to_assign])
assert permissions_to_assign_tuples == expected_permissions
def test_explode_permissions_all(
self,
app: Flask,
@ -387,7 +414,6 @@ class TestAuthorizationService(BaseTest):
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_can_refresh_permissions."""
user = self.find_or_create_user(username="user_one")
user_two = self.find_or_create_user(username="user_two")
admin_user = self.find_or_create_user(username="testadmin1")
@ -399,7 +425,7 @@ class TestAuthorizationService(BaseTest):
GroupService.find_or_create_group("group_three")
assert GroupModel.query.filter_by(identifier="group_three").first() is not None
group_info = [
group_info: list[GroupPermissionsDict] = [
{
"users": ["user_one", "user_two"],
"name": "group_one",
@ -410,14 +436,20 @@ class TestAuthorizationService(BaseTest):
"name": "group_three",
"permissions": [{"actions": ["create", "read"], "uri": "PG:hey2"}],
},
{
"users": [],
"name": "everybody",
"permissions": [{"actions": ["read"], "uri": "PG:hey2everybody"}],
},
]
AuthorizationService.refresh_permissions(group_info)
assert GroupModel.query.filter_by(identifier="group_two").first() is None
assert GroupModel.query.filter_by(identifier="group_one").first() is not None
self.assert_user_has_permission(admin_user, "create", "/anything-they-want")
self.assert_user_has_permission(admin_user, "create", "/v1.0/process-groups/whatever")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey2everybody:yo")
self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey:yo")
@ -445,7 +477,7 @@ class TestAuthorizationService(BaseTest):
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey")
self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo")
self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo", expected_result=False)
self.assert_user_has_permission(admin_user, "create", "/anything-they-want")
self.assert_user_has_permission(admin_user, "create", "/v1.0/process-groups/whatever")
self.assert_user_has_permission(user_two, "read", "/v1.0/process-groups/hey", expected_result=False)
assert GroupModel.query.filter_by(identifier="group_three").first() is not None

View File

@ -2,8 +2,8 @@
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@ -18,29 +18,17 @@ class TestDotNotation(BaseTest):
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_form_data_conversion_to_dot_dict."""
process_group_id = "dot_notation_group"
process_model_id = "test_dot_notation"
process_model_id = "dot_notation_group/test_dot_notation"
bpmn_file_name = "diagram.bpmn"
bpmn_file_location = "dot_notation"
process_model_identifier = self.create_group_and_model_with_bpmn(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model = load_test_spec(
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
process_model_source_directory=bpmn_file_location,
)
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance_from_process_model_id_with_api(
client, process_model_identifier, headers
)
process_instance_id = response.json["id"]
process_instance = ProcessInstanceService().get_process_instance(process_instance_id)
process_instance = self.create_process_instance_from_process_model(process_model)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
human_task = process_instance.human_tasks[0]
@ -53,7 +41,9 @@ class TestDotNotation(BaseTest):
"invoice.invoiceAmount": "1000.00",
"invoice.dueDate": "09/30/2022",
}
ProcessInstanceService.complete_form_task(processor, user_task, form_data, with_super_admin_user, human_task)
ProcessInstanceService.complete_form_task(
processor, user_task, form_data, process_instance.process_initiator, human_task
)
expected = {
"contibutorName": "Elizabeth",

View File

@ -9,14 +9,10 @@ from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.workflow_execution_service import WorkflowExecutionServiceError
@ -27,12 +23,8 @@ class TestErrorHandlingService(BaseTest):
Like it can fire off BPMN messages in case a BPMN Task is waiting for that message.
"""
def run_process_model_and_handle_error(
self, process_model: ProcessModelInfo, user: UserModel
) -> ProcessInstanceModel:
process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model.id, user
)
def run_process_model_and_handle_error(self, process_model: ProcessModelInfo) -> ProcessInstanceModel:
process_instance = self.create_process_instance_from_process_model(process_model)
pip = ProcessInstanceProcessor(process_instance)
with pytest.raises(WorkflowExecutionServiceError) as e:
pip.do_engine_steps(save=True)
@ -44,7 +36,6 @@ class TestErrorHandlingService(BaseTest):
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Process Model in DB marked as suspended when error occurs."""
process_model = load_test_spec(
@ -54,13 +45,13 @@ class TestErrorHandlingService(BaseTest):
)
# Process instance should be marked as errored by default.
process_instance = self.run_process_model_and_handle_error(process_model, with_super_admin_user)
process_instance = self.run_process_model_and_handle_error(process_model)
assert ProcessInstanceStatus.error.value == process_instance.status
# If process model should be suspended on error, then that is what should happen.
process_model.fault_or_suspend_on_exception = "suspend"
ProcessModelService.save_process_model(process_model)
process_instance = self.run_process_model_and_handle_error(process_model, with_super_admin_user)
process_instance = self.run_process_model_and_handle_error(process_model)
assert ProcessInstanceStatus.suspended.value == process_instance.status
def test_error_sends_bpmn_message(
@ -68,7 +59,6 @@ class TestErrorHandlingService(BaseTest):
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Real BPMN Messages should get generated and processes should fire off and complete."""
process_model = load_test_spec(
@ -85,7 +75,7 @@ class TestErrorHandlingService(BaseTest):
process_model.exception_notification_addresses = ["dan@ILoveToReadErrorsInMyEmails.com"]
ProcessModelService.save_process_model(process_model)
# kick off the process and assure it got marked as an error.
process_instance = self.run_process_model_and_handle_error(process_model, with_super_admin_user)
process_instance = self.run_process_model_and_handle_error(process_model)
assert ProcessInstanceStatus.error.value == process_instance.status
# Both send and receive messages should be generated, matched

View File

@ -3,44 +3,33 @@ import pytest
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.models.process_model import ProcessModelInfo
class TestMessageInstance(BaseTest):
"""TestMessageInstance."""
def setup_message_tests(self, client: FlaskClient, user: UserModel) -> str:
"""Setup_message_tests."""
process_group_id = "test_group"
process_model_id = "hello_world"
def setup_message_tests(self, client: FlaskClient) -> ProcessModelInfo:
process_model_id = "testk_group/hello_world"
bpmn_file_name = "hello_world.bpmn"
bpmn_file_location = "hello_world"
process_model_identifier = self.create_group_and_model_with_bpmn(
client,
user,
process_group_id=process_group_id,
process_model = load_test_spec(
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
process_model_source_directory=bpmn_file_location,
)
return process_model_identifier
return process_model
def test_can_create_message_instance(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_create_message_instance."""
message_name = "Message Model One"
process_model_identifier = self.setup_message_tests(client, with_super_admin_user)
process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier)
process_model = self.setup_message_tests(client)
process_instance = self.create_process_instance_from_process_model(process_model, "waiting")
queued_message = MessageInstanceModel(
@ -64,12 +53,9 @@ class TestMessageInstance(BaseTest):
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_cannot_set_invalid_status."""
message_name = "message_model_one"
process_model_identifier = self.setup_message_tests(client, with_super_admin_user)
process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier)
process_model = self.setup_message_tests(client)
process_instance = self.create_process_instance_from_process_model(process_model, "waiting")
with pytest.raises(ValueError) as exception:
@ -100,13 +86,9 @@ class TestMessageInstance(BaseTest):
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_cannot_set_invalid_message_type."""
message_name = "message_model_one"
process_model_identifier = self.setup_message_tests(client, with_super_admin_user)
process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier)
process_model = self.setup_message_tests(client)
process_instance = self.create_process_instance_from_process_model(process_model, "waiting")
with pytest.raises(ValueError) as exception:
@ -136,13 +118,9 @@ class TestMessageInstance(BaseTest):
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_force_failure_cause_if_status_is_failure."""
message_name = "message_model_one"
process_model_identifier = self.setup_message_tests(client, with_super_admin_user)
process_model = ProcessModelService.get_process_model(process_model_id=process_model_identifier)
process_model = self.setup_message_tests(client)
process_instance = self.create_process_instance_from_process_model(process_model, "waiting")
queued_message = MessageInstanceModel(

View File

@ -1,15 +1,10 @@
"""Test_message_service."""
import pytest
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.messages_controller import message_send
from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@ -20,66 +15,11 @@ from spiffworkflow_backend.services.process_instance_service import (
class TestMessageService(BaseTest):
"""TestMessageService."""
def test_message_from_api_into_running_process(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test sending a message to a running process via the API.
This example workflow will send a message called 'request_approval' and then wait for a response message
of 'approval_result'. This test assures that it will fire the message with the correct correlation properties
and will respond only to a message called 'approval_result' that has the matching correlation properties,
as sent by an API Call.
"""
self.payload = {
"customer_id": "Sartography",
"po_number": 1001,
"description": "We built a new feature for messages!",
"amount": "100.00",
}
self.start_sender_process(client, with_super_admin_user, "test_from_api")
self.assure_a_message_was_sent()
self.assure_there_is_a_process_waiting_on_a_message()
# Make an API call to the service endpoint, but use the wrong po number
with pytest.raises(ApiError):
message_send("Approval Result", {"payload": {"po_number": 5001}})
# Should return an error when making an API call for right po number, wrong client
with pytest.raises(ApiError):
message_send(
"Approval Result",
{"payload": {"po_number": 1001, "customer_id": "jon"}},
)
# No error when calling with the correct parameters
message_send(
"Approval Result",
{"payload": {"po_number": 1001, "customer_id": "Sartography"}},
)
# There is no longer a waiting message
waiting_messages = (
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=self.process_instance.id)
.all()
)
assert len(waiting_messages) == 0
# The process has completed
assert self.process_instance.status == "complete"
def test_single_conversation_between_two_processes(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test messages between two different running processes using a single conversation.
@ -87,7 +27,7 @@ class TestMessageService(BaseTest):
we have two process instances that are communicating with each other using one conversation about an
Invoice whose details are defined in the following message payload
"""
self.payload = {
payload = {
"customer_id": "Sartography",
"po_number": 1001,
"description": "We built a new feature for messages!",
@ -104,8 +44,8 @@ class TestMessageService(BaseTest):
)
# Now start the main process
self.start_sender_process(client, with_super_admin_user, "test_between_processes")
self.assure_a_message_was_sent()
process_instance = self.start_sender_process(client, payload, "test_between_processes")
self.assure_a_message_was_sent(process_instance, payload)
# This is typically called in a background cron process, so we will manually call it
# here in the tests
@ -113,7 +53,7 @@ class TestMessageService(BaseTest):
MessageService.correlate_all_message_instances()
# The sender process should still be waiting on a message to be returned to it ...
self.assure_there_is_a_process_waiting_on_a_message()
self.assure_there_is_a_process_waiting_on_a_message(process_instance)
# The second time we call ths process_message_isntances (again it would typically be running on cron)
# it will deliver the message that was sent from the receiver back to the original sender.
@ -125,7 +65,7 @@ class TestMessageService(BaseTest):
waiting_messages = (
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=self.process_instance.id)
.filter_by(process_instance_id=process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
@ -136,7 +76,7 @@ class TestMessageService(BaseTest):
assert len(waiting_messages) == 0
# The message sender process is complete
assert self.process_instance.status == "complete"
assert process_instance.status == "complete"
# The message receiver process is also complete
message_receiver_process = (
@ -146,83 +86,14 @@ class TestMessageService(BaseTest):
)
assert message_receiver_process.status == "complete"
def start_sender_process(
self,
client: FlaskClient,
with_super_admin_user: UserModel,
group_name: str = "test_group",
) -> None:
process_group_id = group_name
self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id)
process_model = load_test_spec(
"test_group/message",
process_model_source_directory="message_send_one_conversation",
bpmn_file_name="message_sender.bpmn", # Slightly misnamed, it sends and receives
)
self.process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
process_model.id,
with_super_admin_user,
)
processor_send_receive = ProcessInstanceProcessor(self.process_instance)
processor_send_receive.do_engine_steps(save=True)
task = processor_send_receive.get_all_user_tasks()[0]
human_task = self.process_instance.active_human_tasks[0]
ProcessInstanceService.complete_form_task(
processor_send_receive,
task,
self.payload,
with_super_admin_user,
human_task,
)
processor_send_receive.save()
def assure_a_message_was_sent(self) -> None:
# There should be one new send message for the given process instance.
send_messages = (
MessageInstanceModel.query.filter_by(message_type="send")
.filter_by(process_instance_id=self.process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
assert len(send_messages) == 1
send_message = send_messages[0]
assert send_message.payload == self.payload, "The send message should match up with the payload"
assert send_message.name == "Request Approval"
assert send_message.status == "ready"
def assure_there_is_a_process_waiting_on_a_message(self) -> None:
# There should be one new send message for the given process instance.
waiting_messages = (
MessageInstanceModel.query.filter_by(message_type="receive")
.filter_by(status="ready")
.filter_by(process_instance_id=self.process_instance.id)
.order_by(MessageInstanceModel.id)
.all()
)
assert len(waiting_messages) == 1
waiting_message = waiting_messages[0]
self.assure_correlation_properties_are_right(waiting_message)
def assure_correlation_properties_are_right(self, message: MessageInstanceModel) -> None:
# Correlation Properties should match up
po_curr = next(c for c in message.correlation_rules if c.name == "po_number")
customer_curr = next(c for c in message.correlation_rules if c.name == "customer_id")
assert po_curr is not None
assert customer_curr is not None
def test_can_send_message_to_multiple_process_models(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_send_message_to_multiple_process_models."""
process_group_id = "test_group_multi"
self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id)
# self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id)
process_model_sender = load_test_spec(
"test_group/message_sender",

View File

@ -1,4 +1,3 @@
"""Test Permissions."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@ -9,7 +8,6 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.user_service import UserService
@ -22,18 +20,13 @@ from spiffworkflow_backend.services.user_service import UserService
# * super-admins users maybe conventionally get the user role as well
# finance-admin role allows create, update, and delete of all models under the finance group
class TestPermissions(BaseTest):
"""TestPermissions."""
def test_user_can_be_given_permission_to_administer_process_group(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
process_group_id = "group-a"
self.create_process_group_with_api(client, with_super_admin_user, process_group_id, process_group_id)
load_test_spec(
"group-a/timers_intermediate_catch_event",
bpmn_file_name="timers_intermediate_catch_event.bpmn",
@ -58,7 +51,6 @@ class TestPermissions(BaseTest):
def test_group_a_admin_needs_to_stay_away_from_group_b(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_group_a_admin_needs_to_stay_away_from_group_b."""
process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0]
process_group_b_id = process_group_ids[1]
@ -87,7 +79,6 @@ class TestPermissions(BaseTest):
self.assert_user_has_permission(group_a_admin, "update", f"/{process_group_b_id}", expected_result=False)
def test_user_can_be_granted_access_through_a_group(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
"""Test_user_can_be_granted_access_through_a_group."""
process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0]
for process_group_id in process_group_ids:
@ -125,7 +116,6 @@ class TestPermissions(BaseTest):
def test_user_can_be_read_models_with_global_permission(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_user_can_be_read_models_with_global_permission."""
process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0]
process_group_b_id = process_group_ids[1]
@ -156,7 +146,6 @@ class TestPermissions(BaseTest):
def test_user_can_access_base_path_when_given_wildcard_permission(
self, app: Flask, with_db_and_bpmn_file_cleanup: None
) -> None:
"""Test_user_can_access_base_path_when_given_wildcard_permission."""
group_a_admin = self.find_or_create_user()
permission_target = PermissionTargetModel(uri="/process-models/%")

Some files were not shown because too many files have changed in this diff Show More