Merge commit '883e65384f1e36a0310f4fdcff57ac486890cd5e' as 'spiffworkflow-backend'

This commit is contained in:
Jon Herron 2022-10-12 10:22:22 -04:00
commit 66e3f52c24
212 changed files with 40908 additions and 0 deletions

View File

@ -0,0 +1,12 @@
{
"_template": "gh:cjolowicz/cookiecutter-hypermodern-python",
"author": "Sartography",
"development_status": "Development Status :: 1 - Planning",
"email": "sartography@users.noreply.github.com",
"friendly_name": "Spiffworkflow Backend",
"github_user": "sartography",
"license": "MIT",
"package_name": "spiffworkflow_backend",
"project_name": "spiffworkflow-backend",
"version": "0.0.1"
}

View File

@ -0,0 +1,2 @@
[darglint]
strictness = long

View File

@ -0,0 +1,29 @@
[flake8]
select = B,B9,C,D,DAR,E,F,N,RST,S,W
ignore = E203,E501,RST201,RST203,RST301,W503,S410,S320
max-line-length = 120
max-complexity = 30
docstring-convention = google
rst-roles = class,const,func,meth,mod,ref
rst-directives = deprecated
per-file-ignores =
# prefer naming tests descriptively rather than forcing comments
tests/*:S101,D103
bin/keycloak_test_server.py:B950,D
conftest.py:S105
wsgi.py:S104
# allow writing to /tmp for throwaway script output
bin/get_bpmn_json_for_process_instance:S108
# the exclude=./migrations option doesn't seem to work with pre-commit
# migrations are autogenerated from "flask db migration" so ignore them
migrations/*:D
src/spiffworkflow_backend/config/testing.py:S105
src/spiffworkflow_backend/load_database_models.py:F401
# this file overwrites methods from the logging library so we can't change them
# and ignore long comment line
src/spiffworkflow_backend/services/logging_service.py:N802,B950

1
spiffworkflow-backend/.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
* text=auto eol=lf

View File

@ -0,0 +1,18 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: "/"
schedule:
interval: daily
- package-ecosystem: pip
directory: "/.github/workflows"
schedule:
interval: daily
- package-ecosystem: pip
directory: "/docs"
schedule:
interval: daily
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily

View File

@ -0,0 +1,66 @@
---
# Labels names are important as they are used by Release Drafter to decide
# regarding where to record them in changelog or if to skip them.
#
# The repository labels will be automatically configured using this file and
# the GitHub Action https://github.com/marketplace/actions/github-labeler.
- name: breaking
description: Breaking Changes
color: bfd4f2
- name: bug
description: Something isn't working
color: d73a4a
- name: build
description: Build System and Dependencies
color: bfdadc
- name: ci
description: Continuous Integration
color: 4a97d6
- name: dependencies
description: Pull requests that update a dependency file
color: 0366d6
- name: documentation
description: Improvements or additions to documentation
color: 0075ca
- name: duplicate
description: This issue or pull request already exists
color: cfd3d7
- name: enhancement
description: New feature or request
color: a2eeef
- name: github_actions
description: Pull requests that update Github_actions code
color: "000000"
- name: good first issue
description: Good for newcomers
color: 7057ff
- name: help wanted
description: Extra attention is needed
color: 008672
- name: invalid
description: This doesn't seem right
color: e4e669
- name: performance
description: Performance
color: "016175"
- name: python
description: Pull requests that update Python code
color: 2b67c6
- name: question
description: Further information is requested
color: d876e3
- name: refactoring
description: Refactoring
color: ef67c4
- name: removal
description: Removals and Deprecations
color: 9ae7ea
- name: style
description: Style
color: c120e5
- name: testing
description: Testing
color: b1fc6f
- name: wontfix
description: This will not be worked on
color: ffffff

View File

@ -0,0 +1,29 @@
categories:
- title: ":boom: Breaking Changes"
label: "breaking"
- title: ":rocket: Features"
label: "enhancement"
- title: ":fire: Removals and Deprecations"
label: "removal"
- title: ":beetle: Fixes"
label: "bug"
- title: ":racehorse: Performance"
label: "performance"
- title: ":rotating_light: Testing"
label: "testing"
- title: ":construction_worker: Continuous Integration"
label: "ci"
- title: ":books: Documentation"
label: "documentation"
- title: ":hammer: Refactoring"
label: "refactoring"
- title: ":lipstick: Style"
label: "style"
- title: ":package: Dependencies"
labels:
- "dependencies"
- "build"
template: |
## Changes
$CHANGES

View File

@ -0,0 +1,72 @@
name: Dependabot auto-merge
on:
workflow_run:
workflows: ["Tests"]
# completed does not mean success of Tests workflow. see below checking github.event.workflow_run.conclusion
types:
- completed
# workflow_call is used to indicate that a workflow can be called by another workflow. When a workflow is triggered with the workflow_call event, the event payload in the called workflow is the same event payload from the calling workflow. For more information see, "Reusing workflows."
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
# maybe hook into this instead of workflow_run:
# on:
# pull_request:
# pull_request_target:
# types: [labeled]
permissions:
contents: write
jobs:
# print the context for debugging in case a job gets skipped
printJob:
name: Print event
runs-on: ubuntu-latest
steps:
- name: Dump GitHub context
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
run: |
echo "$GITHUB_CONTEXT"
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' && github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
steps:
- name: Development Code
uses: actions/checkout@v3
###### GET PR NUMBER
# we saved the pr_number in tests.yml. fetch it so we can merge the correct PR.
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
- name: "Download artifact"
uses: actions/github-script@v6
with:
script: |
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
owner: context.repo.owner,
repo: context.repo.repo,
run_id: context.payload.workflow_run.id,
});
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
return artifact.name == "pr_number"
})[0];
let download = await github.rest.actions.downloadArtifact({
owner: context.repo.owner,
repo: context.repo.repo,
artifact_id: matchArtifact.id,
archive_format: 'zip',
});
let fs = require('fs');
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/pr_number.zip`, Buffer.from(download.data));
- name: "Unzip artifact"
run: unzip pr_number.zip
###########
- name: print pr number
run: cat pr_number
- name: actually merge it
run: gh pr merge --auto --merge "$(cat pr_number)"
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}

View File

@ -0,0 +1,5 @@
pip==22.2.2
nox==2022.8.7
nox-poetry==1.0.1
poetry==1.2.1
virtualenv==20.16.5

View File

@ -0,0 +1,18 @@
name: Labeler
on:
push:
branches:
- main
jobs:
labeler:
runs-on: ubuntu-latest
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
- name: Run Labeler
uses: crazy-max/ghaction-github-labeler@v3.1.1
with:
skip-delete: true

View File

@ -0,0 +1,260 @@
name: Tests
on:
- push
- pull_request
jobs:
tests:
name: ${{ matrix.session }} ${{ matrix.python }} / ${{ matrix.os }} ${{ matrix.database }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- { python: "3.10", os: "ubuntu-latest", session: "pre-commit" }
- { python: "3.10", os: "ubuntu-latest", session: "safety" }
- { python: "3.10", os: "ubuntu-latest", session: "mypy" }
- { python: "3.9", os: "ubuntu-latest", session: "mypy" }
- {
python: "3.10",
os: "ubuntu-latest",
session: "tests",
database: "mysql",
}
- {
python: "3.10",
os: "ubuntu-latest",
session: "tests",
database: "postgres",
}
- {
python: "3.10",
os: "ubuntu-latest",
session: "tests",
database: "sqlite",
}
- {
python: "3.9",
os: "ubuntu-latest",
session: "tests",
database: "sqlite",
}
- {
python: "3.10",
os: "windows-latest",
session: "tests",
database: "sqlite",
}
- {
python: "3.10",
os: "macos-latest",
session: "tests",
database: "sqlite",
}
- {
# typeguard 2.13.3 is broken with TypeDict in 3.10.
# probably the next release fixes it.
# https://github.com/agronholm/typeguard/issues/242
python: "3.9",
os: "ubuntu-latest",
session: "typeguard",
database: "sqlite",
}
- { python: "3.10", os: "ubuntu-latest", session: "xdoctest" }
- { python: "3.10", os: "ubuntu-latest", session: "docs-build" }
env:
NOXSESSION: ${{ matrix.session }}
SPIFF_DATABASE_TYPE: ${{ matrix.database }}
FORCE_COLOR: "1"
PRE_COMMIT_COLOR: "always"
DB_PASSWORD: password
FLASK_SESSION_SECRET_KEY: super_secret_key
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4.2.0
with:
python-version: ${{ matrix.python }}
- name: Upgrade pip
run: |
pip install --constraint=.github/workflows/constraints.txt pip
pip --version
- name: Upgrade pip in virtual environments
shell: python
run: |
import os
import pip
with open(os.environ["GITHUB_ENV"], mode="a") as io:
print(f"VIRTUALENV_PIP={pip.__version__}", file=io)
- name: Install Poetry
run: |
pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry
poetry --version
- name: Install Nox
run: |
pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
nox --version
- name: Compute pre-commit cache key
if: matrix.session == 'pre-commit'
id: pre-commit-cache
shell: python
run: |
import hashlib
import sys
python = "py{}.{}".format(*sys.version_info[:2])
payload = sys.version.encode() + sys.executable.encode()
digest = hashlib.sha256(payload).hexdigest()
result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8])
print("::set-output name=result::{}".format(result))
- name: Restore pre-commit cache
uses: actions/cache@v3.0.10
if: matrix.session == 'pre-commit'
with:
path: ~/.cache/pre-commit
key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
${{ steps.pre-commit-cache.outputs.result }}-
- name: Setup Mysql
uses: mirromutth/mysql-action@v1.1
with:
host port: 3306
container port: 3306
mysql version: "8.0"
mysql database: "spiffworkflow_backend_testing"
mysql root password: password
if: matrix.database == 'mysql'
- name: Setup Postgres
run: docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres
if: matrix.database == 'postgres'
- name: Run Nox
run: |
nox --force-color --python=${{ matrix.python }}
- name: Upload coverage data
# pin to upload coverage from only one matrix entry, otherwise coverage gets confused later
if: always() && matrix.session == 'tests' && matrix.python == '3.10' && matrix.os == 'ubuntu-latest'
uses: "actions/upload-artifact@v3.0.0"
with:
name: coverage-data
path: ".coverage.*"
- name: Upload documentation
if: matrix.session == 'docs-build'
uses: actions/upload-artifact@v3.0.0
with:
name: docs
path: docs/_build
- name: Upload logs
if: failure() && matrix.session == 'tests'
uses: "actions/upload-artifact@v3.0.0"
with:
name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}}
path: "./log/*.log"
check_docker_start_script:
runs-on: ubuntu-latest
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: start_backend
run: ./bin/build_and_run_with_docker_compose
timeout-minutes: 20
env:
SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA: "true"
- name: wait_for_backend
run: ./bin/wait_for_server_to_be_up 5
coverage:
runs-on: ubuntu-latest
needs: tests
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v4.2.0
with:
python-version: "3.10"
- name: Upgrade pip
run: |
pip install --constraint=.github/workflows/constraints.txt pip
pip --version
- name: Install Poetry
run: |
pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry
poetry --version
- name: Install Nox
run: |
pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
nox --version
- name: Download coverage data
uses: actions/download-artifact@v3.0.0
with:
name: coverage-data
- name: Combine coverage data and display human readable report
run: |
find . -name \*.pyc -delete
nox --force-color --session=coverage
- name: Create coverage report
run: |
nox --force-color --session=coverage -- xml
- name: Upload coverage report
uses: codecov/codecov-action@v3.1.0
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
# thought about just skipping dependabot
# if: ${{ github.actor != 'dependabot[bot]' }}
# but figured all pull requests seems better, since none of them will have access to sonarcloud.
# however, with just skipping pull requests, the build associated with "Triggered via push" is also associated with the pull request and also fails hitting sonarcloud
# if: ${{ github.event_name != 'pull_request' }}
# so just skip everything but main
if: github.ref_name == 'main'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
# part about saving PR number and then using it from auto-merge-dependabot-prs from:
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
- name: Save PR number
if: ${{ github.event_name == 'pull_request' }}
env:
PR_NUMBER: ${{ github.event.number }}
run: |
mkdir -p ./pr
echo "$PR_NUMBER" > ./pr/pr_number
- uses: actions/upload-artifact@v3
with:
name: pr_number
path: pr/

21
spiffworkflow-backend/.gitignore vendored Normal file
View File

@ -0,0 +1,21 @@
.mypy_cache/
/.idea/
/.coverage
/.coverage.*
/.nox/
/.python-version
/.pytype/
/dist/
/docs/_build/
/src/*.egg-info/
/src/instance
__pycache__/
*.sqlite3
node_modules
/pyrightconfig.json
/tests/files/tickets.csv
/log/*.log
/tests/spiffworkflow_backend/files
/bin/import_secrets.py
/src/spiffworkflow_backend/config/secrets.py
_null-ls_*

View File

@ -0,0 +1,63 @@
repos:
- repo: local
hooks:
- id: black
name: black
entry: black
language: system
types: [python]
require_serial: true
exclude: ^migrations/
- id: check-added-large-files
name: Check for added large files
entry: check-added-large-files
language: system
- id: check-toml
name: Check Toml
entry: check-toml
language: system
types: [toml]
- id: check-yaml
name: Check Yaml
entry: check-yaml
language: system
types: [yaml]
- id: end-of-file-fixer
name: Fix End of Files
entry: end-of-file-fixer
language: system
types: [text]
stages: [commit, push, manual]
- id: flake8
name: flake8
entry: flake8
language: system
types: [python]
require_serial: true
exclude: ^migrations/
- id: pyupgrade
name: pyupgrade
description: Automatically upgrade syntax for newer versions.
entry: pyupgrade
language: system
types: [python]
args: [--py37-plus]
- id: reorder-python-imports
name: Reorder python imports
entry: reorder-python-imports
language: system
types: [python]
args: [--application-directories=src]
exclude: "(^migrations/|load_database_models)"
- id: trailing-whitespace
name: Trim Trailing Whitespace
entry: trailing-whitespace-fixer
language: system
types: [text]
stages: [commit, push, manual]
exclude: ^migrations/
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.4.1
hooks:
- id: prettier
exclude_types: [html]

View File

@ -0,0 +1,12 @@
version: 2
build:
os: ubuntu-20.04
tools:
python: "3.10"
sphinx:
configuration: docs/conf.py
formats: all
python:
install:
- requirements: docs/requirements.txt
- path: .

View File

@ -0,0 +1 @@
python 3.10.4

View File

@ -0,0 +1,105 @@
Contributor Covenant Code of Conduct
====================================
Our Pledge
----------
We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community.
Our Standards
-------------
Examples of behavior that contributes to a positive environment for our community include:
- Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
- Focusing on what is best not just for us as individuals, but for the overall community
Examples of unacceptable behavior include:
- The use of sexualized language or imagery, and sexual attention or
advances of any kind
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email
address, without their explicit permission
- Other conduct which could reasonably be considered inappropriate in a
professional setting
Enforcement Responsibilities
----------------------------
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful.
Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate.
Scope
-----
This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event.
Enforcement
-----------
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at sartography@users.noreply.github.com. All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the reporter of any incident.
Enforcement Guidelines
----------------------
Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct:
1. Correction
~~~~~~~~~~~~~
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested.
2. Warning
~~~~~~~~~~
**Community Impact**: A violation through a single incident or series of actions.
**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban.
3. Temporary Ban
~~~~~~~~~~~~~~~~
**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban.
4. Permanent Ban
~~~~~~~~~~~~~~~~
**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the community.
Attribution
-----------
This Code of Conduct is adapted from the `Contributor Covenant <homepage_>`__, version 2.0,
available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by `Mozillas code of conduct enforcement ladder <https://github.com/mozilla/diversity>`__.
.. _homepage: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations.

View File

@ -0,0 +1,123 @@
Contributor Guide
=================
Thank you for your interest in improving this project.
This project is open-source under the `MIT license`_ and
welcomes contributions in the form of bug reports, feature requests, and pull requests.
Here is a list of important resources for contributors:
- `Source Code`_
- `Documentation`_
- `Issue Tracker`_
- `Code of Conduct`_
.. _MIT license: https://opensource.org/licenses/MIT
.. _Source Code: https://github.com/sartography/spiffworkflow-backend
.. _Documentation: https://spiffworkflow-backend.readthedocs.io/
.. _Issue Tracker: https://github.com/sartography/spiffworkflow-backend/issues
How to report a bug
-------------------
Report bugs on the `Issue Tracker`_.
When filing an issue, make sure to answer these questions:
- Which operating system and Python version are you using?
- Which version of this project are you using?
- What did you do?
- What did you expect to see?
- What did you see instead?
The best way to get your bug fixed is to provide a test case,
and/or steps to reproduce the issue.
How to request a feature
------------------------
Request features on the `Issue Tracker`_.
How to set up your development environment
------------------------------------------
You need Python 3.9+ and the following tools:
- Poetry_
- Nox_
- nox-poetry_
Install the package with development requirements:
.. code:: console
$ poetry install
You can now run an interactive Python session,
or the command-line interface:
.. code:: console
$ poetry run python
$ poetry run spiffworkflow-backend
.. _Poetry: https://python-poetry.org/
.. _Nox: https://nox.thea.codes/
.. _nox-poetry: https://nox-poetry.readthedocs.io/
How to test the project
-----------------------
Run the full test suite:
.. code:: console
$ nox
List the available Nox sessions:
.. code:: console
$ nox --list-sessions
You can also run a specific Nox session.
For example, invoke the unit test suite like this:
.. code:: console
$ nox --session=tests
Unit tests are located in the ``tests`` directory,
and are written using the pytest_ testing framework.
.. _pytest: https://pytest.readthedocs.io/
How to submit changes
---------------------
Open a `pull request`_ to submit changes to this project.
Your pull request needs to meet the following guidelines for acceptance:
- The Nox test suite must pass without errors and warnings.
- Include unit tests. This project maintains 100% code coverage.
- If your changes add functionality, update the documentation accordingly.
Feel free to submit early, though—we can always iterate on this.
To run linting and code formatting checks before committing your change, you can install pre-commit as a Git hook by running the following command:
.. code:: console
$ nox --session=pre-commit -- install
It is recommended to open an issue before starting work on anything.
This will allow a chance to talk it over with the owners and validate your approach.
.. _pull request: https://github.com/sartography/spiffworkflow-backend/pulls
.. github-only
.. _Code of Conduct: CODE_OF_CONDUCT.rst

View File

@ -0,0 +1,28 @@
FROM ghcr.io/sartography/python:3.10
RUN pip install poetry
RUN useradd _gunicorn --no-create-home --user-group
RUN apt-get update && \
apt-get install -y -q \
gcc libssl-dev \
curl git-core libpq-dev \
gunicorn3 default-mysql-client
WORKDIR /app
COPY pyproject.toml poetry.lock /app/
RUN poetry install
RUN set -xe \
&& apt-get remove -y gcc python3-dev libssl-dev \
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY . /app/
# run poetry install again AFTER copying the app into the image
# otherwise it does not know what the main app module is
RUN poetry install
CMD ./bin/boot_server_in_docker

View File

@ -0,0 +1,504 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random
Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

View File

@ -0,0 +1,92 @@
Spiffworkflow Backend
==========
|Tests| |Codecov|
|pre-commit| |Black|
.. |Tests| image:: https://github.com/sartography/spiffworkflow-backend/workflows/Tests/badge.svg
:target: https://github.com/sartography/spiffworkflow-backend/actions?workflow=Tests
:alt: Tests
.. |Codecov| image:: https://codecov.io/gh/sartography/spiffworkflow-backend/branch/main/graph/badge.svg
:target: https://codecov.io/gh/sartography/spiffworkflow-backend
:alt: Codecov
.. |pre-commit| image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white
:target: https://github.com/pre-commit/pre-commit
:alt: pre-commit
.. |Black| image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/psf/black
:alt: Black
Features
--------
* Backend API portion of the spiffworkflow engine webapp
Running Locally
---------------
* Install libraries using poetry:
.. code:: console
$ poetry install
* Setup the database - uses mysql and assumes server is running by default:
.. code:: console
$ ./bin/recreate_db clean
* Run the server:
.. code:: console
$ ./bin/run_server_locally
Requirements
------------
* Python 3.9+
* Poetry
Contributing
------------
Contributions are very welcome.
To learn more, see the `Contributor Guide`_.
License
-------
Distributed under the terms of the `MIT license`_,
*Spiffworkflow Backend* is free and open source software.
Issues
------
If you encounter any problems,
please `file an issue`_ along with a detailed description.
Credits
-------
This project was generated from `@cjolowicz`_'s `Hypermodern Python Cookiecutter`_ template.
.. _@cjolowicz: https://github.com/cjolowicz
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _MIT license: https://opensource.org/licenses/MIT
.. _PyPI: https://pypi.org/
.. _Hypermodern Python Cookiecutter: https://github.com/cjolowicz/cookiecutter-hypermodern-python
.. _file an issue: https://github.com/sartography/spiffworkflow-backend/issues
.. _pip: https://pip.pypa.io/
.. github-only
.. _Contributor Guide: CONTRIBUTING.rst
.. _Usage: https://spiffworkflow-backend.readthedocs.io/en/latest/usage.html

View File

@ -0,0 +1,14 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ ! -f /app/log/db_development.log ]]; then
touch /app/log/db_development.log
fi
tail -f /app/log/db_development.log

View File

@ -0,0 +1,45 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# run migrations
export FLASK_APP=/app/src/spiffworkflow_backend
if [[ "${DOWNGRADE_DB:-}" == "true" ]]; then
echo 'Downgrading database...'
poetry run flask db downgrade
fi
if [[ "${SPIFFWORKFLOW_BACKEND_UPGRADE_DB:-}" == "true" ]]; then
echo 'Upgrading database...'
poetry run flask db upgrade
fi
port="${SPIFFWORKFLOW_BACKEND_PORT:-}"
if [[ -z "$port" ]]; then
port=7000
fi
additional_args=""
if [[ "${APPLICATION_ROOT:-}" != "/" ]]; then
additional_args="${additional_args} -e SCRIPT_NAME=${APPLICATION_ROOT}"
fi
# HACK: if loading fixtures for acceptance tests when we do not need multiple workers
# it causes issues with attempting to add duplicate data to the db
workers=3
if [[ "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" == "true" ]]; then
workers=1
fi
export IS_GUNICORN="true"
export PROCESS_WAITING_MESSAGES="true"
# THIS MUST BE THE LAST COMMAND!
exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$port" --workers="$workers" --timeout 90 --capture-output --access-logfile '-' --log-level debug wsgi:app

View File

@ -0,0 +1,42 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
fi
additional_args=""
if [[ "${RUN_WITH_DAEMON:-}" != "false" ]]; then
additional_args="${additional_args} -d"
fi
docker compose --profile "$SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE" build
docker compose --profile "$SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE" stop
if [[ "${SPIFFWORKFLOW_BACKEND_RECREATE_DATABASE:-}" == "true" ]]; then
docker stop db
docker rm db
docker volume rm spiffworkflow-backend_spiffworkflow_backend
# i observed a case locally where the db had a stale sqlalchemy revision which
# caused the backend to exit and when docker compose up was running with
# --wait, it just said waiting forever (like we have seen in CI). so removing
# the volume would work around that case, if the volumes are not cleaned up in
# CI. also removing the wait prevents it from hanging forever in the case where
# the backend crashes, so then we'll just wait for the timeout to happen in the
# bin/wait_for_server_to_be_up script.
docker volume rm spiffworkflow-backend_spiffworkflow_backend || echo 'docker volume not found'
fi
docker compose --profile "$SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE" up --wait $additional_args

View File

@ -0,0 +1,48 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_ENV=staging
fi
if [[ -z "${FLASK_SESSION_SECRET_KEY:-}" ]]; then
export FLASK_SESSION_SECRET_KEY=staging_super_secret_key_dont_tell_anyone
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD=St4g3Th1515
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DATABASE_NAME=spiffworkflow_backend_staging
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY=always
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
fi
if [[ -z "${SPIFFWORKFLOW_FRONTEND_URL:-}" ]]; then
export SPIFFWORKFLOW_FRONTEND_URL='http://167.172.242.138:7001'
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_URL:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_URL='http://167.172.242.138:7000'
fi
if [[ -z "${OPEN_ID_SERVER_URL:-}" ]]; then
export OPEN_ID_SERVER_URL='http://167.172.242.138:7002'
fi
git pull
./bin/build_and_run_with_docker_compose
./bin/wait_for_server_to_be_up

View File

@ -0,0 +1,25 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
realms="$*"
if [[ -z "$realms" ]]; then
realms="spiffworkflow-realm"
fi
docker_container_path=/tmp/hey
local_tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX)
docker exec keycloak rm -rf "$docker_container_path"
docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_path}" --users realm_file || echo ''
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
for realm in $realms ; do
cp "${local_tmp_dir}/hey/${realm}.json" bin/
done
rm -rf "$local_tmp_dir"

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,35 @@
"""Get the bpmn process json for a given process instance id and store it in /tmp."""
#!/usr/bin/env python
import os
import sys
from spiffworkflow_backend import create_app
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
def main(process_instance_id: str):
"""Main."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
app = create_app()
with app.app_context():
process_instance = ProcessInstanceModel.query.filter_by(
id=process_instance_id
).first()
if not process_instance:
raise Exception(
f"Could not find a process instance with id: {process_instance_id}"
)
with open(
f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8"
) as f:
f.write(process_instance.bpmn_json)
if len(sys.argv) < 2:
raise Exception("Process instance id not supplied")
main(sys.argv[1])

View File

@ -0,0 +1,10 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
docker compose logs "$@"

View File

@ -0,0 +1,103 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# this tests we can get a token from a public client and exchange it with a confidential client
# so we can see what resources that user has access to
# originally from https://medium.com/keycloak/keycloak-jwt-token-using-curl-post-72c9e791ba8c
# btw, meta config endpoint: http://localhost:7002/realms/spiffworkflow/.well-known/openid-configuration
# token exchange described at https://github.com/keycloak/keycloak-documentation/blob/main/securing_apps/topics/token-exchange/token-exchange.adoc
# some UMA stuff at https://github.com/keycloak/keycloak-documentation/blob/main/authorization_services/topics/service-authorization-obtaining-permission.adoc,
# though resource_set docs are elsewhere.
# ./bin/get_token # uses ciuser1 ciuser1
# ./bin/get_token ciadmin1 ciadmin1
# ./bin/get_token repeat_form_user_1 repeat_form_user_1 # actually has permissions to the resource in this script
# ./bin/get_token ciadmin1 ciadmin1 '%2Fprocess-models'
HOSTNAME=localhost:7002
REALM_NAME=spiffworkflow
USERNAME=${1-ciuser1}
PASSWORD=${2-ciuser1}
URI_TO_TEST_AGAINST=${3-'%2Fprocess-models%2Fcategory_number_one%2Fprocess-model-with-repeating-form'}
FRONTEND_CLIENT_ID=spiffworkflow-frontend
BACKEND_CLIENT_ID=spiffworkflow-backend
BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105
SECURE=false
BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64)
KEYCLOAK_URL=http://$HOSTNAME/realms/$REALM_NAME/protocol/openid-connect/token
echo "Using Keycloak: $KEYCLOAK_URL"
echo "realm: $REALM_NAME"
echo "client-id: $FRONTEND_CLIENT_ID"
echo "username: $USERNAME"
echo "password: $PASSWORD"
echo "secure: $SECURE"
if [[ $SECURE = 'y' ]]; then
INSECURE=
else
INSECURE=--insecure
fi
result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
-H "Content-Type: application/x-www-form-urlencoded" \
-d "username=$USERNAME" \
-d "password=$PASSWORD" \
-d 'grant_type=password' \
-d "client_id=$FRONTEND_CLIENT_ID" \
)
frontend_token=$(jq -r '.access_token' <<< "$result")
result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
-H "Content-Type: application/x-www-form-urlencoded" \
--data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:token-exchange' \
-d "client_id=$BACKEND_CLIENT_ID" \
-d "subject_token=${frontend_token}" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "audience=${BACKEND_CLIENT_ID}" \
)
backend_token=$(jq -r '.access_token' <<< "$result")
if [[ "$backend_token" != 'null' ]]; then
echo "backend_token: $backend_token"
echo "Getting resource set"
# everything_resource_id='446bdcf4-a3bd-41c7-a0f8-67a225ba6b57'
resource_result=$(curl -s "http://${HOSTNAME}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${URI_TO_TEST_AGAINST}" -H "Authorization: Bearer $backend_token")
# resource_result=$(curl -s "http://${HOSTNAME}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=false&deep=true&max=-1&exactName=false&type=admin" -H "Authorization: Bearer $backend_token")
resource_id_name_pairs=$(jq -r '.[] | "\(._id):\(.name)"' <<<"$resource_result" || echo '')
if [[ -z "$resource_id_name_pairs" || "$resource_id_name_pairs" == "null" ]]; then
>&2 echo "ERROR: Could not find the resource id from the result: ${resource_result}"
exit 1
fi
echo $resource_id_name_pairs
echo "Getting permissions"
for resource_id_name_pair in $resource_id_name_pairs ; do
resource_id=$(awk -F ':' '{print $1}' <<<"$resource_id_name_pair")
resource_name=$(awk -F ':' '{print $2}' <<<"$resource_id_name_pair")
echo "Checking $resource_name"
curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "audience=${BACKEND_CLIENT_ID}" \
--data-urlencode "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" \
-d "permission=${resource_id}" \
-d "subject_token=${backend_token}" \
| jq .
done
else
echo "Failed auth result: $result"
fi

View File

@ -0,0 +1,36 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# HELP: git adds and commits the entire BPMN models directory, including all process groups
bpmn_models_absolute_dir="$1"
git_commit_message="$2"
git_commit_username="$3"
git_commit_email="$4"
if [[ -z "${2:-}" ]]; then
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]"
exit 1
fi
cd "$bpmn_models_absolute_dir"
git add .
# https://unix.stackexchange.com/a/155077/456630
if [ -z "$(git status --porcelain)" ]; then
echo "No changes to commit"
else
if [[ -n "$git_commit_username" ]]; then
git config --local user.name "$git_commit_username"
fi
if [[ -n "$git_commit_email" ]]; then
git config --local user.email "$git_commit_email"
fi
git commit -m "$git_commit_message"
fi

View File

@ -0,0 +1,112 @@
"""Grabs tickets from csv and makes process instances."""
import csv
import os
from flask_bpmn.models.db import db
from spiffworkflow_backend import create_app
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
def print_process_instance_count(process_model_identifier_ticket: str) -> None:
"""Print process instance count."""
process_instances = ProcessInstanceModel.query.filter_by(
process_model_identifier=process_model_identifier_ticket
).all()
process_instance_count = len(process_instances)
print(f"process_instance_count: {process_instance_count}")
def main():
"""Main."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
app = create_app()
with app.app_context():
process_model_identifier_ticket = "ticket"
db.session.query(ProcessInstanceModel).filter(
ProcessInstanceModel.process_model_identifier
== process_model_identifier_ticket
).delete()
db.session.commit()
"""Print process instance count."""
process_instances = ProcessInstanceModel.query.filter_by(
process_model_identifier=process_model_identifier_ticket
).all()
process_instance_count = len(process_instances)
print(f"process_instance_count: {process_instance_count}")
columns_to_data_key_mappings = {
"Month": "month",
"MS": "milestone",
"ID": "req_id",
"Dev Days": "dev_days",
"Feature": "feature",
"Priority": "priority",
}
columns_to_header_index_mappings = {}
user = UserModel.query.first()
with open("tests/files/tickets.csv") as infile:
reader = csv.reader(infile, delimiter=",")
# first row is garbage
next(reader)
header = next(reader)
for column_name in columns_to_data_key_mappings:
columns_to_header_index_mappings[column_name] = header.index(
column_name
)
id_index = header.index("ID")
priority_index = header.index("Priority")
print(f"header: {header}")
for row in reader:
ticket_identifier = row[id_index]
priority = row[priority_index]
print(f"ticket_identifier: {ticket_identifier}")
print(f"priority: {priority}")
process_instance = ProcessInstanceService.create_process_instance(
process_model_identifier_ticket,
user,
process_group_identifier="sartography-admin",
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps()
# processor.save()
for (
column_name,
desired_data_key,
) in columns_to_data_key_mappings.items():
appropriate_index = columns_to_header_index_mappings[column_name]
processor.bpmn_process_instance.data[desired_data_key] = row[
appropriate_index
]
print(f"datas: {processor.bpmn_process_instance.data}")
if processor.bpmn_process_instance.data["month"] == "":
continue
# you at least need a month, or else this row in the csv is considered garbage
# if processor.bpmn_process_instance.data["month"] is None:
# continue
processor.save()
# if __name__ == "__main__":
main()

View File

@ -0,0 +1,110 @@
"""Import tickets, for use in script task."""
def main():
"""Use main to avoid global namespace."""
import csv
from flask_bpmn.models.db import db
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
process_model_identifier_ticket = "ticket"
db.session.query(ProcessInstanceModel).filter(
ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket
).delete()
db.session.commit()
"""Print process instance count."""
process_instances = ProcessInstanceModel.query.filter_by(
process_model_identifier=process_model_identifier_ticket
).all()
process_instance_count = len(process_instances)
print(f"process_instance_count: {process_instance_count}")
columns_to_data_key_mappings = {
"Month": "month",
"MS": "milestone",
"Done?": "done",
"#": "notion_id",
"ID": "req_id",
"Dev Days": "dev_days",
"Feature": "feature",
"Feature description": "feature_description",
"Priority": "priority",
}
columns_to_header_index_mappings = {}
user = UserModel.query.first()
with open("tests/files/tickets.csv") as infile:
reader = csv.reader(infile, delimiter=",")
# first row is garbage
next(reader)
header = next(reader)
for column_name in columns_to_data_key_mappings:
columns_to_header_index_mappings[column_name] = header.index(column_name)
id_index = header.index("ID")
priority_index = header.index("Priority")
month_index = header.index("Month")
print(f"header: {header}")
for row in reader:
ticket_identifier = row[id_index]
priority = row[priority_index]
month = row[month_index]
print(f"ticket_identifier: {ticket_identifier}")
print(f"priority: {priority}")
# if there is no month, who cares about it.
if month:
process_instance = ProcessInstanceService.create_process_instance(
process_model_identifier=process_model_identifier_ticket,
user=user,
process_group_identifier="sartography-admin",
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps()
# processor.save()
for (
column_name,
desired_data_key,
) in columns_to_data_key_mappings.items():
appropriate_index = columns_to_header_index_mappings[column_name]
print(f"appropriate_index: {appropriate_index}")
processor.bpmn_process_instance.data[desired_data_key] = row[
appropriate_index
]
# you at least need a month, or else this row in the csv is considered garbage
month_value = processor.bpmn_process_instance.data["month"]
if month_value == "" or month_value is None:
db.delete(process_instance)
db.session.commit()
continue
processor.save()
process_instance_data = processor.get_data()
print(f"process_instance_data: {process_instance_data}")
ProcessInstanceReportModel.add_fixtures()
print("added report fixtures")
main()
# to avoid serialization issues
del main

View File

@ -0,0 +1,12 @@
{
"web": {
"issuer": "http://localhost:8080/realms/finance",
"auth_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/auth",
"client_id": "myclient",
"client_secret": "OAh6rkjXIiPJDtPOz4459i3VtdlxGcce",
"redirect_uris": ["http://localhost:5005/*"],
"userinfo_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/userinfo",
"token_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/token",
"token_introspection_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/token/introspect"
}
}

View File

@ -0,0 +1,104 @@
# type: ignore
"""keycloak_test_server."""
# ./bin/start_keycloak # starts keycloak on 8080
# pip install flask_oidc
# pip install itsdangerous==2.0.1
# python ./bin/keycloak_test_server.py # starts flask on 5005
import json
import logging
import requests
from flask import Flask
from flask import g
from flask_oidc import OpenIDConnect
logging.basicConfig(level=logging.DEBUG)
app = Flask(__name__)
app.config.update(
{
"SECRET_KEY": "SomethingNotEntirelySecret",
"TESTING": True,
"DEBUG": True,
"OIDC_CLIENT_SECRETS": "bin/keycloak_test_secrets.json",
"OIDC_ID_TOKEN_COOKIE_SECURE": False,
"OIDC_REQUIRE_VERIFIED_EMAIL": False,
"OIDC_USER_INFO_ENABLED": True,
"OIDC_OPENID_REALM": "flask-demo",
"OIDC_SCOPES": ["openid", "email", "profile"],
"OIDC_INTROSPECTION_AUTH_METHOD": "client_secret_post",
}
)
oidc = OpenIDConnect(app)
@app.route("/")
def hello_world():
"""Hello_world."""
if oidc.user_loggedin:
return (
'Hello, %s, <a href="/private">See private</a> '
'<a href="/logout">Log out</a>'
) % oidc.user_getfield("preferred_username")
else:
return 'Welcome anonymous, <a href="/private">Log in</a>'
@app.route("/private")
@oidc.require_login
def hello_me():
"""Example for protected endpoint that extracts private information from the OpenID Connect id_token.
Uses the accompanied access_token to access a backend service.
"""
info = oidc.user_getinfo(["preferred_username", "email", "sub"])
username = info.get("preferred_username")
email = info.get("email")
user_id = info.get("sub")
if user_id in oidc.credentials_store:
try:
from oauth2client.client import OAuth2Credentials
access_token = OAuth2Credentials.from_json(
oidc.credentials_store[user_id]
).access_token
print("access_token=<%s>" % access_token)
headers = {"Authorization": "Bearer %s" % (access_token)}
# YOLO
greeting = requests.get(
"http://localhost:8080/greeting", headers=headers
).text
except BaseException:
print("Could not access greeting-service")
greeting = "Hello %s" % username
return """{} your email is {} and your user_id is {}!
<ul>
<li><a href="/">Home</a></li>
<li><a href="//localhost:8080/auth/realms/finance/account?referrer=flask-app&referrer_uri=http://localhost:5005/private&">Account</a></li>
</ul>""".format(
greeting,
email,
user_id,
)
@app.route("/api", methods=["POST"])
@oidc.accept_token(require_token=True, scopes_required=["openid"])
def hello_api():
"""OAuth 2.0 protected API endpoint accessible via AccessToken."""
return json.dumps({"hello": "Welcome %s" % g.oidc_token_info["sub"]})
@app.route("/logout")
def logout():
"""Performs local logout by removing the session cookie."""
oidc.logout()
return 'Hi, you have been logged out! <a href="/">Return</a>'
if __name__ == "__main__":
app.run(port=5005)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,49 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
export FLASK_SESSION_SECRET_KEY="this_is_recreate_db_secret_key"
tasks=""
if [[ "${1:-}" == "clean" ]]; then
subcommand="${2:-}"
if [[ "$subcommand" == "rmall" ]]; then
tasks="$tasks init migrate"
rm -rf migrations/
elif [[ -n "$subcommand" ]]; then
>&2 echo "ERROR: you passed a subcommand that was not rmall, and that is not supported: $subcommand"
exit 1
fi
rm -f ./src/instance/*.sqlite3
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_development"
mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_testing"
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
# start postgres in background with one db
if [[ "${SPIFF_DATABASE_TYPE:-}" == "postgres" ]]; then
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "select 1"; then
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres
sleep 4 # classy
fi
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_development -c "select 1"; then
# create other db. spiffworkflow_backend_testing came with the docker run.
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_development;"
fi
fi
fi
tasks="$tasks upgrade"
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_development"
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_testing"
for task in $tasks; do
SPIFFWORKFLOW_BACKEND_ENV=development FLASK_APP=src/spiffworkflow_backend poetry run flask db "$task"
done
SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade

View File

@ -0,0 +1,111 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
docker stop keycloak || echo 'no keycloak container found'
docker rm keycloak 2>/dev/null || echo 'no keycloak container found'
docker run -p 8080:8080 -d --name keycloak -e KEYCLOAK_LOGLEVEL=ALL -e ROOT_LOGLEVEL=ALL -e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin quay.io/keycloak/keycloak:18.0.2 start-dev -Dkeycloak.profile.feature.token_exchange=enabled -Dkeycloak.profile.feature.admin_fine_grained_authz=enabled
docker cp "${script_dir}/testing-realm.json" keycloak:/tmp
sleep 10
docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/testing-realm.json || echo ''
docker stop keycloak
docker start keycloak
sleep 10
HOSTNAME=localhost:8080
REALM_NAME=testing
USERS=(
ciadmin1
repeat_form_user_1
)
URIS_TO_TEST_AGAINST=(
/blog/post/1
/blog
)
FRONTEND_CLIENT_ID=testing-frontend
BACKEND_CLIENT_ID=testing-backend
BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105
BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64)
KEYCLOAK_URL=http://$HOSTNAME/realms/$REALM_NAME/protocol/openid-connect/token
result_array=()
for user in "${USERS[@]}" ; do
result=$(curl -s -X POST "$KEYCLOAK_URL" \
-H "Content-Type: application/x-www-form-urlencoded" \
-d "username=$user" \
-d "password=$user" \
-d 'grant_type=password' \
-d "client_id=$FRONTEND_CLIENT_ID" \
)
frontend_token=$(jq -r '.access_token' <<< "$result")
result=$(curl -s -X POST "$KEYCLOAK_URL" \
-H "Content-Type: application/x-www-form-urlencoded" \
--data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:token-exchange' \
-d "client_id=$BACKEND_CLIENT_ID" \
-d "subject_token=${frontend_token}" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "audience=${BACKEND_CLIENT_ID}" \
)
backend_token=$(jq -r '.access_token' <<< "$result")
if [[ "$backend_token" != 'null' ]]; then
echo "Getting resource set"
for uri in "${URIS_TO_TEST_AGAINST[@]}" ; do
escaped_uri=$(sed 's|/|%2F|g' <<<"$uri")
resource_result=$(curl -s "http://${HOSTNAME}/realms/testing/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${escaped_uri}" -H "Authorization: Bearer $backend_token")
resource_id_name_pairs=$(jq -r '.[] | "\(._id):\(.name)"' <<<"$resource_result" || echo '')
if [[ -z "$resource_id_name_pairs" || "$resource_id_name_pairs" == "null" ]]; then
>&2 echo "ERROR: Could not find the resource id from the result: ${resource_result}"
exit 1
fi
echo "Getting permissions"
for resource_id_name_pair in $resource_id_name_pairs ; do
resource_id=$(awk -F ':' '{print $1}' <<<"$resource_id_name_pair")
resource_name=$(awk -F ':' '{print $2}' <<<"$resource_id_name_pair")
echo "Checking $resource_name"
auth_result=$(curl -s -X POST "$KEYCLOAK_URL" \
-H "Content-Type: application/x-www-form-urlencoded" \
-H "Authorization: Basic $BACKEND_BASIC_AUTH" \
-d "audience=${BACKEND_CLIENT_ID}" \
--data-urlencode "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" \
-d "permission=${resource_id}" \
-d "subject_token=${backend_token}" \
)
error_message=$(jq -r '.error' <<<"$auth_result" || echo -n '')
if [[ -n "$error_message" && "$error_message" != "null" ]]; then
result_array+=("${user}, ${uri}, DENY")
fi
access_token=$(jq -r '.access_token' <<<"$auth_result" || echo -n '')
if [[ -n "$access_token"&& "$access_token" != "null" ]]; then
result_array+=("${user}, ${uri}, APPROVED")
fi
done
done
else
echo "Failed auth result: $result"
fi
done
echo -e "\n\nRESULTS:\n"
for final_result in "${result_array[@]}" ; do
echo "$final_result"
done

View File

@ -0,0 +1,29 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_ENV=development
fi
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
fi
export FLASK_SESSION_SECRET_KEY=super_secret_key
export APPLICATION_ROOT="/"
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
./bin/boot_server_in_docker
else
if [[ -z "${PROCESS_WAITING_MESSAGES:-}" ]]; then
export PROCESS_WAITING_MESSAGES="true"
fi
FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
fi

View File

@ -0,0 +1,25 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
tables=(
message_model
message_instance
message_correlation
message_correlation_property
message_correlation_message_instance
)
for i in "${tables[@]}" ;do
echo "$i"
mysql -uroot -e "select * from spiffworkflow_backend_development.${i}"
done
echo "process_instance"
mysql -uroot -e "select id,process_model_identifier,process_group_identifier,status from spiffworkflow_backend_development.process_instance"

View File

@ -0,0 +1,99 @@
"""Grabs tickets from csv and makes process instances."""
import os
from spiffworkflow_backend import create_app
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
# from lxml.etree import Element as EtreeElement
def main():
"""Main."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development"
flask_env_key = "FLASK_SESSION_SECRET_KEY"
os.environ[flask_env_key] = "whatevs"
if "BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
home = os.environ["HOME"]
full_process_model_path = (
f"{home}/projects/github/sartography/sample-process-models"
)
if os.path.isdir(full_process_model_path):
os.environ["BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path
else:
raise Exception(f"Could not find {full_process_model_path}")
app = create_app()
with app.app_context():
no_primary = []
failing_process_models = []
process_models = ProcessModelService().get_process_models()
for process_model in process_models:
if process_model.primary_file_name:
bpmn_xml_file_contents = SpecFileService.get_data(
process_model, process_model.primary_file_name
)
bad_files = [
"B.1.0.bpmn",
"C.1.0.bpmn",
"C.2.0.bpmn",
"C.6.0.bpmn",
"TC-5.1.bpmn",
]
if process_model.primary_file_name in bad_files:
continue
print(f"primary_file_name: {process_model.primary_file_name}")
try:
SpecFileService.update_file(
process_model,
process_model.primary_file_name,
bpmn_xml_file_contents,
)
except Exception as ex:
failing_process_models.append(
(process_model.primary_file_name, str(ex))
)
# files = SpecFileService.get_files(
# process_model, extension_filter="bpmn"
# )
# bpmn_etree_element: EtreeElement = (
# SpecFileService.get_etree_element_from_binary_data(
# bpmn_xml_file_contents, process_model.primary_file_name
# )
# )
# if len(files) == 1:
# try:
# new_bpmn_process_identifier = (
# SpecFileService.get_bpmn_process_identifier(
# bpmn_etree_element
# )
# )
# if (
# process_model.primary_process_id
# != new_bpmn_process_identifier
# ):
# print(
# "primary_process_id: ", process_model.primary_process_id
# )
# # attributes_to_update = {
# # "primary_process_id": new_bpmn_process_identifier
# # }
# # ProcessModelService().update_spec(
# # process_model, attributes_to_update
# # )
# # except Exception as exception:
# except Exception:
# print(f"BAD ONE: {process_model.id}")
# # raise exception
else:
no_primary.append(process_model)
# for bpmn in no_primary:
# print(bpmn)
for bpmn_errors in failing_process_models:
print(bpmn_errors)
if len(failing_process_models) > 0:
exit(1)
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,80 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if ! docker network inspect spiffworkflow > /dev/null 2>&1; then
docker network create spiffworkflow
fi
docker rm keycloak 2>/dev/null || echo 'no keycloak container found'
docker run \
-p 7002:8080 \
-d \
--network=spiffworkflow \
--name keycloak \
-e KEYCLOAK_LOGLEVEL=ALL \
-e ROOT_LOGLEVEL=ALL \
-e KEYCLOAK_ADMIN=admin \
-e KEYCLOAK_ADMIN_PASSWORD=admin quay.io/keycloak/keycloak:18.0.2 start-dev \
-Dkeycloak.profile.feature.token_exchange=enabled \
-Dkeycloak.profile.feature.admin_fine_grained_authz=enabled
docker cp bin/finance-realm.json keycloak:/tmp
docker cp bin/spiffworkflow-realm.json keycloak:/tmp
docker cp bin/quarkus-realm.json keycloak:/tmp
sleep 10
docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/finance-realm.json || echo ''
docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/spiffworkflow-realm.json || echo ''
docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/quarkus-realm.json || echo ''
echo 'imported realms'
if [ "${TURN_OFF_SSL:-}" == "true" ]; then
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh config credentials --server http://localhost:8080 --realm master --user admin
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/master -s sslRequired=NONE
docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/spiffworkflow -s sslRequired=NONE
echo 'turned off SSL requirement'
fi
docker stop keycloak
docker start keycloak
# to export:
# /opt/keycloak/bin/kc.sh export --dir /tmp/hey --users realm_file
# change any js policies to role policies - just copy the config of one and change the type to role
# https://github.com/keycloak/keycloak/issues/11664#issuecomment-1111062102
#
# if docker exec commands fail below then attempt to import by adding a new realm in the webui
# NOTE: creds - user1 / password
#### Example resource_set call
# GET /realms/quarkus/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=%2Fapi%2Fusers%2Fme HTTP/1.1..Authorization: Bearer eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJjZklBRE5feHhDSm1Wa1d5Ti1QTlhFRXZNVVdzMnI2OEN4dG1oRUROelhVIn0.eyJleHAiOjE2NTcxMzgzNzAsImlhdCI6MTY1NzEzODA3MCwianRpIjoiY2I1OTc0OTAtYzJjMi00YTFkLThkNmQtMzBkOGU5YzE1YTNlIiwiaXNzIjoiaHR0cDovL2xvY2FsaG9zdDo0MzI3OS9yZWFsbXMvcXVhcmt1cyIsImF1ZCI6ImFjY291bnQiLCJzdWIiOiI5NDhjNTllYy00NmVkLTRkOTktYWE0My0wMjkwMDAyOWI5MzAiLCJ0eXAiOiJCZWFyZXIiLCJhenAiOiJiYWNrZW5kLXNlcnZpY2UiLCJyZWFsbV9hY2Nlc3MiOnsicm9sZXMiOlsib2ZmbGluZV9hY2Nlc3MiXX0sInJlc291cmNlX2FjY2VzcyI6eyJiYWNrZW5kLXNlcnZpY2UiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoiZW1haWwgcHJvZmlsZSIsImNsaWVudEhvc3QiOiIxNzIuMTcuMC4xIiwiZW1haWxfdmVyaWZpZWQiOmZhbHNlLCJjbGllbnRJZCI6ImJhY2tlbmQtc2VydmljZSIsInByZWZlcnJlZF91c2VybmFtZSI6InNlcnZpY2UtYWNjb3VudC1iYWNrZW5kLXNlcnZpY2UiLCJjbGllbnRBZGRyZXNzIjoiMTcyLjE3LjAuMSIsImVtYWlsIjoic2VydmljZS1hY2NvdW50LWJhY2tlbmQtc2VydmljZUBwbGFjZWhvbGRlci5vcmcifQ.VRcdoJQO5KWeDFprl6g21Gp9lAqLH1GUAegZPslI9lcL7wdEDLauleTs7cr9ODvXpBbbWVZirP445H3bIfEpyZ2UiKeoEYB6WvR2r_hIHCbNGrV9klkCVjQSuCtdB-Zf3OWHXctz_warlNXF4i4VLtkettlxeGRTVpqT-_lO-y2PhHVNe7imEcnceoKWZQe-Z0JBAJ1Gs2_mj_vgL8V2ZKAd7x0uuAcNyqo4Kmvqh75vkhIuGYAbWfY--wdv8cuphNpbKCGoz27n-D_Im8tW00B1_twctwXo8yfZHp46o1yERbTCS1Xu_eBFufKB21au6omxneyKSD47AfHLR_ymvg..Host: localhost:43279..Connection: Keep-Alive....
# #
# T 127.0.0.1:43279 -> 127.0.0.1:39282 [AP] #127
# HTTP/1.1 200 OK..Referrer-Policy: no-referrer..X-Frame-Options: SAMEORIGIN..Strict-Transport-Security: max-age=31536000; includeSubDomains..Cache-Control: no-cache..X-Content-Type-Options: nosniff..X-XSS-Protection: 1; mode=block..Content-Type: application/json..content-length: 236....[{"name":"usersme","owner":{"id":"0ac5df91-e044-4051-bd03-106a3a5fb9cc","name":"backend-service"},"ownerManagedAccess":false,"displayName":"usersme","attributes":{},"_id":"179611c3-be58-4ba2-95b2-4aacda3cc0f1","uris":["/api/users/me"]}]
# #
# T 127.0.0.1:39282 -> 127.0.0.1:43279 [AP] #128
# POST /realms/quarkus/protocol/openid-connect/token HTTP/1.1..Authorization: Basic YmFja2VuZC1zZXJ2aWNlOnNlY3JldA==..Content-Length: 1231..Content-Type: application/x-www-form-urlencoded; charset=UTF-8..Host: localhost:43279..Connection: Keep-Alive....
# #
# T 127.0.0.1:39282 -> 127.0.0.1:43279 [AP] #129
# audience=backend-service&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Auma-ticket&permission=179611c3-be58-4ba2-95b2-4aacda3cc0f1&subject_token=eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJjZklBRE5feHhDSm1Wa1d5Ti1QTlhFRXZNVVdzMnI2OEN4dG1oRUROelhVIn0.eyJleHAiOjE2NTcxMzgzNzYsImlhdCI6MTY1NzEzODA3NiwiYXV0aF90aW1lIjoxNjU3MTM4MDc2LCJqdGkiOiI0ZjMyYzljNS05NzY3LTQ0YzAtOTBlNi1kZmJhNjFmMmJmNDgiLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0OjQzMjc5L3JlYWxtcy9xdWFya3VzIiwic3ViIjoiZWI0MTIzYTMtYjcyMi00Nzk4LTlhZjUtODk1N2Y4MjM2NTdhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoiYmFja2VuZC1zZXJ2aWNlIiwibm9uY2UiOiI5SklBc2RhIiwic2Vzc2lvbl9zdGF0ZSI6IjBlZTVkNjRmLWYxM2EtNDg1Yy1hNzBhLTJmMDA0YjQ3MWIwNyIsInJlYWxtX2FjY2VzcyI6eyJyb2xlcyI6WyJ1c2VyIl19LCJzY29wZSI6Im9wZW5pZCBlbWFpbCBwcm9maWxlIiwic2lkIjoiMGVlNWQ2NGYtZjEzYS00ODVjLWE3MGEtMmYwMDRiNDcxYjA3IiwiZW1haWxfdmVyaWZpZWQiOmZhbHNlLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJhbGljZSJ9.Jjz0GYaApd_b05YOSe_Eq0tsFQk7qd-vGgIyjdeeEJAAA5xhS2f_DMpwiBLAPibk-gFnGf5CuLynA0z5bxE5vlbQHX9-aKxH8AEixDFkUsnfn7PN1NQtiG-Jj5cfuLxOShy2X2EOScZVTdRc9PgO_Xsb7ltDrtwtQ6eKOYVt-mqd7PR3cWJHjTldh4tiibjrKPccyZNBNC3W03pno3WLRVaG09Kotcsj1e5oS0safAcxACa3CSfchnY88E7Qwi1mva2F4X-gUar5-Zn2yT2iu8vqH3BCHzz8frAsYv1dOougRBaMfayLiFgKo7ZjsOI8OfPDSm7PEOMFEgHEHIloiw
# ##
# T 127.0.0.1:43279 -> 127.0.0.1:39282 [AP] #131
# HTTP/1.1 403 Forbidden..Referrer-Policy: no-referrer..X-Frame-Options: SAMEORIGIN..Strict-Transport-Security: max-age=31536000; includeSubDomains..Cache-Control: no-store..X-Content-Type-Options: nosniff..Pragma: no-cache..X-XSS-Protection: 1; mode=block..Content-Type: application/json..content-length: 62....{"error":"access_denied","error_description":"not_authorized"}
########
#### quarkus for example
# https://quarkus.io/guides/security-keycloak-authorization
# from that guide, we ultimately found that we hit GET /resource_set described at:
# https://github.com/keycloak/keycloak-documentation/blob/main/authorization_services/topics/service-protection-resources-api-papi.adoc
# when we get the resource, we just hit the token endpoint and provide the resource and scope, and token will say pass or fail.
# More info:
# * https://stackoverflow.com/a/58861610/6090676
# * https://github.com/keycloak/keycloak/discussions/10044

View File

@ -0,0 +1,10 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
curl -v -F key1=value1 -F upload=@localfilename URL

View File

@ -0,0 +1,26 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ "${1:-}" == "c" ]]; then
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{}'
elif grep -qE '^[0-9]$' <<<"${1:-}" ; then
curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d "{ \"task_identifier\": \"${1}\"}"
else
./bin/recreate_db clean
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Product Name": "G", "Quantity": "2"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Sleeve Type": "Short"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Continue shopping?": "N"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Method": "Overnight"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Address": "Somewhere"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Place Order": "Y"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Card Number": "MY_CARD"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "2", "answer": {"Was the customer charged?": "Y"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the product available?": "Y"}}' | jq .
curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the order shipped?": "Y"}}' | jq .
fi

View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
max_attempts="${1:-}"
if [[ -z "$max_attempts" ]]; then
max_attempts=100
fi
echo "waiting for backend to come up..."
attempts=0
while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7002/realms/master/.well-known/openid-configuration)" != "200" ]]; do
if [[ "$attempts" -gt "$max_attempts" ]]; then
>&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem"
exit 1
fi
attempts=$(( attempts + 1 ))
sleep 1
done

View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
max_attempts="${1:-}"
if [[ -z "$max_attempts" ]]; then
max_attempts=100
fi
echo "waiting for backend to come up..."
attempts=0
while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7000/v1.0/status)" != "200" ]]; do
if [[ "$attempts" -gt "$max_attempts" ]]; then
>&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem"
exit 1
fi
attempts=$(( attempts + 1 ))
sleep 1
done

View File

@ -0,0 +1,9 @@
comment: false
coverage:
status:
project:
default:
target: "100"
patch:
default:
target: "100"

View File

@ -0,0 +1,106 @@
"""Conftest."""
import os
import shutil
import pytest
from flask.app import Flask
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
# We need to call this before importing spiffworkflow_backend
# otherwise typeguard cannot work. hence the noqa: E402
if os.environ.get("RUN_TYPEGUARD") == "true":
from typeguard.importhook import install_import_hook
install_import_hook(packages="spiffworkflow_backend")
from spiffworkflow_backend import create_app # noqa: E402
@pytest.fixture(scope="session")
def app() -> Flask:
"""App."""
os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "testing"
# os.environ["FLASK_SESSION_SECRET_KEY"] = "this_is_testing_secret_key"
os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key"
app = create_app()
# NOTE: set this here since nox shoves tests and src code to
# different places and this allows us to know exactly where we are at the start
app.config["BPMN_SPEC_ABSOLUTE_DIR"] = os.path.join(
os.path.dirname(__file__),
"tests",
"spiffworkflow_backend",
"files",
"bpmn_specs",
)
return app
@pytest.fixture()
def with_db_and_bpmn_file_cleanup() -> None:
"""Process_group_resource."""
for model in SpiffworkflowBaseDBModel._all_subclasses():
db.session.query(model).delete()
try:
yield
finally:
process_model_service = ProcessModelService()
if os.path.exists(process_model_service.root_path()):
shutil.rmtree(process_model_service.root_path())
@pytest.fixture()
def setup_process_instances_for_reports() -> list[ProcessInstanceModel]:
"""Setup_process_instances_for_reports."""
user = BaseTest.find_or_create_user()
process_group_id = "runs_without_input"
process_model_id = "sample"
load_test_spec(process_group_id=process_group_id, process_model_id=process_model_id)
process_instances = []
for data in [kay(), ray(), jay()]:
process_instance = ProcessInstanceService.create_process_instance(
process_group_identifier=process_group_id,
process_model_identifier=process_model_id,
user=user,
)
processor = ProcessInstanceProcessor(process_instance)
processor.slam_in_data(data)
process_instance.status = "complete"
db.session.add(process_instance)
db.session.commit()
process_instances.append(process_instance)
return process_instances
def kay() -> dict:
"""Kay."""
return {"name": "kay", "grade_level": 2, "test_score": 10}
def ray() -> dict:
"""Ray."""
return {"name": "ray", "grade_level": 1, "test_score": 9}
def jay() -> dict:
"""Jay."""
return {"name": "jay", "grade_level": 2, "test_score": 8}

View File

@ -0,0 +1,100 @@
# Why we are running with network_mode: host
# Wow this has been awful. We run three things in docker: mysql, keycloak, and the backend server.
# The backend-server needs to talk to the other two.
#
# In order to talk to keycloak, it needs to go through localhost so that it can communicate with
# keycloak using the same url as the frontend so that tokens can be properly validated.
# If the domains are different, keycloak invalidates the token. There may be a way to change
# this but I didn't find it.
#
# In order for the backend server to talk to the mysql server, they need to be on the same network.
# I tried splitting it out where the mysql runs on a custom network and the backend runs on both
# the custom network AND with localhost. Nothing I tried worked and googling didn't help. They
# only ever mentioned one thing or using host.docker.internal which would cause the domains to
# be different.
#
# So instead we are running with both the mysql server and the backend server in host netowrk mode.
# There may be a better way to do this but if it works, then it works.
version: "3.8"
services:
db:
container_name: db
image: mysql:8.0.29
platform: linux/amd64
cap_add:
- SYS_NICE
restart: "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-no}"
environment:
- MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
- MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}
- MYSQL_TCP_PORT=7003
network_mode: host
ports:
- "7003"
volumes:
- spiffworkflow_backend:/var/lib/mysql
healthcheck:
test: mysql --user=root --password=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} -e 'select 1' ${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
interval: 10s
timeout: 5s
retries: 10
spiffworkflow-backend: &spiffworkflow-backend
container_name: spiffworkflow-backend
profiles:
- run
depends_on:
db:
condition: service_healthy
build:
context: .
environment:
- APPLICATION_ROOT=/
- SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development}
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
- OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002}
- SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001}
- SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000}
- SPIFFWORKFLOW_BACKEND_PORT=7000
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false}
- PROCESS_WAITING_MESSAGES=true
ports:
- "7000:7000"
network_mode: host
volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
- ./log:/app/log
healthcheck:
test: curl localhost:7000/v1.0/status --fail
interval: 10s
timeout: 5s
retries: 20
spiffworkflow-backend-local-debug:
<<: *spiffworkflow-backend
container_name: spiffworkflow-backend-local-debug
profiles:
- debug
volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
- ./:/app
command: /app/bin/boot_in_docker_debug_mode
# the docs say we can disable healthchecks with disable: true
# but it returns a bad exit code so setup one that doesn't matter
# since there is nothing to healthcheck in this case
# https://docs.docker.com/compose/compose-file/compose-file-v3/#healthcheck
healthcheck:
test: cat /etc/hosts
interval: 10s
timeout: 5s
retries: 20
volumes:
spiffworkflow_backend:
driver: local

View File

@ -0,0 +1 @@
.. include:: ../CODE_OF_CONDUCT.rst

View File

@ -0,0 +1,17 @@
"""Sphinx configuration."""
from datetime import datetime
project = "Spiffworkflow Backend"
author = "Sartography"
copyright = f"{datetime.now().year}, {author}"
extensions = [
"sphinx.ext.napoleon",
"autoapi.extension",
"sphinx_click",
]
# https://github.com/readthedocs/sphinx-autoapi
autoapi_type = "python"
autoapi_dirs = ["../src"]
html_theme = "furo"

View File

@ -0,0 +1,4 @@
.. include:: ../CONTRIBUTING.rst
:end-before: github-only
.. _Code of Conduct: codeofconduct.html

View File

@ -0,0 +1,16 @@
.. include:: ../README.rst
:end-before: github-only
.. _Contributor Guide: contributing.html
.. _Usage: usage.html
.. toctree::
:hidden:
:maxdepth: 1
usage
reference
contributing
Code of Conduct <codeofconduct>
License <license>
Changelog <https://github.com/sartography/spiffworkflow-backend/releases>

View File

@ -0,0 +1 @@
.. include:: ../LICENSE.rst

View File

@ -0,0 +1,9 @@
Reference
=========
spiffworkflow_backend
----------
.. automodule:: spiffworkflow_backend
:members:

View File

@ -0,0 +1,3 @@
furo==2022.9.29
sphinx==5.2.3
sphinx-click==4.3.0

View File

@ -0,0 +1,6 @@
Usage
=====
.. click:: spiffworkflow_backend.__main__:main
:prog: spiffworkflow-backend
:nested: full

View File

View File

@ -0,0 +1 @@
Single-database configuration for Flask.

View File

@ -0,0 +1,50 @@
# A generic, single database configuration.
[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -0,0 +1,89 @@
import logging
from logging.config import fileConfig
from flask import current_app
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option(
'sqlalchemy.url',
str(current_app.extensions['migrate'].db.get_engine().url).replace(
'%', '%%'))
target_metadata = current_app.extensions['migrate'].db.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')
connectable = current_app.extensions['migrate'].db.get_engine()
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
process_revision_directives=process_revision_directives,
**current_app.extensions['migrate'].configure_args
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -0,0 +1,350 @@
"""empty message
Revision ID: 88e30afd19ac
Revises:
Create Date: 2022-10-11 09:39:40.882490
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '88e30afd19ac'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_session',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=50), nullable=True),
sa.Column('admin_impersonate_uid', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_table('bpmn_process_id_lookup',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
sa.Column('bpmn_file_relative_path', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), 'bpmn_process_id_lookup', ['bpmn_process_identifier'], unique=True)
op.create_table('group',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('identifier', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('message_model',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=50), nullable=True),
sa.Column('name', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_message_model_identifier'), 'message_model', ['identifier'], unique=True)
op.create_index(op.f('ix_message_model_name'), 'message_model', ['name'], unique=True)
op.create_table('permission_target',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uri', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uri')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('uid', sa.String(length=50), nullable=True),
sa.Column('service', sa.String(length=50), nullable=False),
sa.Column('service_id', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('service', 'service_id', name='service_key'),
sa.UniqueConstraint('uid'),
sa.UniqueConstraint('username')
)
op.create_table('message_correlation_property',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=50), nullable=True),
sa.Column('message_model_id', sa.Integer(), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('identifier', 'message_model_id', name='message_correlation_property_unique')
)
op.create_index(op.f('ix_message_correlation_property_identifier'), 'message_correlation_property', ['identifier'], unique=False)
op.create_table('message_triggerable_process_model',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('message_model_id', sa.Integer(), nullable=False),
sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('message_model_id')
)
op.create_index(op.f('ix_message_triggerable_process_model_process_group_identifier'), 'message_triggerable_process_model', ['process_group_identifier'], unique=False)
op.create_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), 'message_triggerable_process_model', ['process_model_identifier'], unique=False)
op.create_table('principal',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.CheckConstraint('NOT(user_id IS NULL AND group_id IS NULL)'),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('group_id'),
sa.UniqueConstraint('user_id')
)
op.create_table('process_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('process_initiator_id', sa.Integer(), nullable=False),
sa.Column('bpmn_json', sa.JSON(), nullable=True),
sa.Column('start_in_seconds', sa.Integer(), nullable=True),
sa.Column('end_in_seconds', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_process_instance_process_group_identifier'), 'process_instance', ['process_group_identifier'], unique=False)
op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False)
op.create_table('process_instance_report',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=50), nullable=False),
sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('report_metadata', sa.JSON(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=False),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('process_group_identifier', 'process_model_identifier', 'identifier', name='process_instance_report_unique')
)
op.create_index(op.f('ix_process_instance_report_identifier'), 'process_instance_report', ['identifier'], unique=False)
op.create_index(op.f('ix_process_instance_report_process_group_identifier'), 'process_instance_report', ['process_group_identifier'], unique=False)
op.create_index(op.f('ix_process_instance_report_process_model_identifier'), 'process_instance_report', ['process_model_identifier'], unique=False)
op.create_table('secret',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('key', sa.String(length=50), nullable=False),
sa.Column('value', sa.String(length=255), nullable=False),
sa.Column('creator_user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['creator_user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key')
)
op.create_table('user_group_assignment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
)
op.create_table('active_task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('assigned_principal_id', sa.Integer(), nullable=True),
sa.Column('form_file_name', sa.String(length=50), nullable=True),
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('task_id', sa.String(length=50), nullable=True),
sa.Column('task_name', sa.String(length=50), nullable=True),
sa.Column('task_title', sa.String(length=50), nullable=True),
sa.Column('task_type', sa.String(length=50), nullable=True),
sa.Column('task_status', sa.String(length=50), nullable=True),
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
sa.Column('task_data', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['assigned_principal_id'], ['principal.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
)
op.create_table('file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('type', sa.String(length=50), nullable=False),
sa.Column('content_type', sa.String(length=50), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=True),
sa.Column('task_spec', sa.String(length=50), nullable=True),
sa.Column('irb_doc_code', sa.String(length=50), nullable=False),
sa.Column('md5_hash', sa.String(length=50), nullable=False),
sa.Column('data', sa.LargeBinary(), nullable=True),
sa.Column('size', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('user_uid', sa.String(length=50), nullable=True),
sa.Column('archived', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('message_correlation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('message_correlation_property_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('value', sa.String(length=255), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['message_correlation_property_id'], ['message_correlation_property.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('process_instance_id', 'message_correlation_property_id', 'name', name='message_instance_id_name_unique')
)
op.create_index(op.f('ix_message_correlation_message_correlation_property_id'), 'message_correlation', ['message_correlation_property_id'], unique=False)
op.create_index(op.f('ix_message_correlation_name'), 'message_correlation', ['name'], unique=False)
op.create_index(op.f('ix_message_correlation_process_instance_id'), 'message_correlation', ['process_instance_id'], unique=False)
op.create_index(op.f('ix_message_correlation_value'), 'message_correlation', ['value'], unique=False)
op.create_table('message_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('message_model_id', sa.Integer(), nullable=False),
sa.Column('message_type', sa.String(length=20), nullable=False),
sa.Column('payload', sa.JSON(), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('failure_cause', sa.Text(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('permission_assignment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('principal_id', sa.Integer(), nullable=False),
sa.Column('permission_target_id', sa.Integer(), nullable=False),
sa.Column('grant_type', sa.Enum('permit', 'deny', name='permitdeny'), nullable=True),
sa.Column('permission', sa.Enum('create', 'read', 'update', 'delete', 'list', 'instantiate', name='permission'), nullable=True),
sa.ForeignKeyConstraint(['permission_target_id'], ['permission_target.id'], ),
sa.ForeignKeyConstraint(['principal_id'], ['principal.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('principal_id', 'permission_target_id', 'permission', name='permission_assignment_uniq')
)
op.create_table('secret_allowed_process',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('secret_id', sa.Integer(), nullable=False),
sa.Column('allowed_relative_path', sa.String(length=500), nullable=False),
sa.ForeignKeyConstraint(['secret_id'], ['secret.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('secret_id', 'allowed_relative_path', name='unique_secret_path')
)
op.create_table('spiff_logging',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_task_type', sa.String(length=255), nullable=True),
sa.Column('spiff_task_guid', sa.String(length=50), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('message', sa.String(length=255), nullable=True),
sa.Column('current_user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['current_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('task_event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('spec_version', sa.String(length=50), nullable=True),
sa.Column('action', sa.String(length=50), nullable=True),
sa.Column('task_id', sa.String(length=50), nullable=True),
sa.Column('task_name', sa.String(length=50), nullable=True),
sa.Column('task_title', sa.String(length=50), nullable=True),
sa.Column('task_type', sa.String(length=50), nullable=True),
sa.Column('task_state', sa.String(length=50), nullable=True),
sa.Column('task_lane', sa.String(length=50), nullable=True),
sa.Column('form_data', sa.JSON(), nullable=True),
sa.Column('mi_type', sa.String(length=50), nullable=True),
sa.Column('mi_count', sa.Integer(), nullable=True),
sa.Column('mi_index', sa.Integer(), nullable=True),
sa.Column('process_name', sa.String(length=50), nullable=True),
sa.Column('date', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('data_store',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('key', sa.String(length=50), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=True),
sa.Column('task_spec', sa.String(length=50), nullable=True),
sa.Column('spec_id', sa.String(length=50), nullable=True),
sa.Column('user_id', sa.String(length=50), nullable=True),
sa.Column('file_id', sa.Integer(), nullable=True),
sa.Column('value', sa.String(length=50), nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('message_correlation_message_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('message_instance_id', sa.Integer(), nullable=False),
sa.Column('message_correlation_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['message_correlation_id'], ['message_correlation.id'], ),
sa.ForeignKeyConstraint(['message_instance_id'], ['message_instance.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('message_instance_id', 'message_correlation_id', name='message_correlation_message_instance_unique')
)
op.create_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), 'message_correlation_message_instance', ['message_correlation_id'], unique=False)
op.create_index(op.f('ix_message_correlation_message_instance_message_instance_id'), 'message_correlation_message_instance', ['message_instance_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
op.drop_table('message_correlation_message_instance')
op.drop_table('data_store')
op.drop_table('task_event')
op.drop_table('spiff_logging')
op.drop_table('secret_allowed_process')
op.drop_table('permission_assignment')
op.drop_table('message_instance')
op.drop_index(op.f('ix_message_correlation_value'), table_name='message_correlation')
op.drop_index(op.f('ix_message_correlation_process_instance_id'), table_name='message_correlation')
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
op.drop_table('message_correlation')
op.drop_table('file')
op.drop_table('active_task')
op.drop_table('user_group_assignment')
op.drop_table('secret')
op.drop_index(op.f('ix_process_instance_report_process_model_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_process_group_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report')
op.drop_table('process_instance_report')
op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_group_identifier'), table_name='process_instance')
op.drop_table('process_instance')
op.drop_table('principal')
op.drop_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), table_name='message_triggerable_process_model')
op.drop_index(op.f('ix_message_triggerable_process_model_process_group_identifier'), table_name='message_triggerable_process_model')
op.drop_table('message_triggerable_process_model')
op.drop_index(op.f('ix_message_correlation_property_identifier'), table_name='message_correlation_property')
op.drop_table('message_correlation_property')
op.drop_table('user')
op.drop_table('permission_target')
op.drop_index(op.f('ix_message_model_name'), table_name='message_model')
op.drop_index(op.f('ix_message_model_identifier'), table_name='message_model')
op.drop_table('message_model')
op.drop_table('group')
op.drop_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), table_name='bpmn_process_id_lookup')
op.drop_table('bpmn_process_id_lookup')
op.drop_table('admin_session')
# ### end Alembic commands ###

View File

@ -0,0 +1,220 @@
"""Nox sessions."""
import os
import shutil
import sys
from pathlib import Path
from textwrap import dedent
import nox
try:
from nox_poetry import Session
from nox_poetry import session
except ImportError:
message = f"""\
Nox failed to import the 'nox-poetry' package.
Please install it using the following command:
{sys.executable} -m pip install nox-poetry"""
raise SystemExit(dedent(message)) from None
package = "spiffworkflow_backend"
python_versions = ["3.10", "3.9"]
nox.needs_version = ">= 2021.6.6"
nox.options.sessions = (
"pre-commit",
"safety",
"mypy",
"tests",
"typeguard",
"xdoctest",
"docs-build",
)
def setup_database(session: Session) -> None:
"""Run database migrations against the database."""
session.env["FLASK_INSTANCE_PATH"] = os.path.join(
os.getcwd(), "instance", "testing"
)
flask_env_key = "FLASK_SESSION_SECRET_KEY"
session.env[flask_env_key] = "super_secret_key"
session.env["FLASK_APP"] = "src/spiffworkflow_backend"
session.env["SPIFFWORKFLOW_BACKEND_ENV"] = "testing"
session.run("flask", "db", "upgrade")
def activate_virtualenv_in_precommit_hooks(session: Session) -> None:
"""Activate virtualenv in hooks installed by pre-commit.
This function patches git hooks installed by pre-commit to activate the
session's virtual environment. This allows pre-commit to locate hooks in
that environment when invoked from git.
Args:
session: The Session object.
"""
assert session.bin is not None # noqa: S101
virtualenv = session.env.get("VIRTUAL_ENV")
if virtualenv is None:
return
hookdir = Path(".git") / "hooks"
if not hookdir.is_dir():
return
for hook in hookdir.iterdir():
if hook.name.endswith(".sample") or not hook.is_file():
continue
text = hook.read_text()
bindir = repr(session.bin)[1:-1] # strip quotes
if not (
Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text
):
continue
lines = text.splitlines()
if not (lines[0].startswith("#!") and "python" in lines[0].lower()):
continue
header = dedent(
f"""\
import os
os.environ["VIRTUAL_ENV"] = {virtualenv!r}
os.environ["PATH"] = os.pathsep.join((
{session.bin!r},
os.environ.get("PATH", ""),
))
"""
)
lines.insert(1, header)
hook.write_text("\n".join(lines))
@session(name="pre-commit", python="3.10")
def precommit(session: Session) -> None:
"""Lint using pre-commit."""
args = session.posargs or ["run", "--all-files", "--show-diff-on-failure"]
session.install(
"black",
"darglint",
"flake8",
"flake8-bandit",
"flake8-bugbear",
"flake8-docstrings",
"flake8-rst-docstrings",
"pep8-naming",
"pre-commit",
"pre-commit-hooks",
"pyupgrade",
"reorder-python-imports",
)
session.run("pre-commit", *args)
if args and args[0] == "install":
activate_virtualenv_in_precommit_hooks(session)
@session(python="3.10")
def safety(session: Session) -> None:
"""Scan dependencies for insecure packages."""
requirements = session.poetry.export_requirements()
session.install("safety")
session.run("safety", "check", "--full-report", f"--file={requirements}")
@session(python=python_versions)
def mypy(session: Session) -> None:
"""Type-check using mypy."""
args = session.posargs or ["src", "tests", "docs/conf.py"]
session.install(".")
session.install("mypy", "pytest", "sqlalchemy-stubs")
session.run("mypy", *args)
if not session.posargs:
session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py")
@session(python=python_versions)
def tests(session: Session) -> None:
"""Run the test suite."""
session.install(".")
session.install("coverage[toml]", "pytest", "pygments")
try:
setup_database(session)
session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs)
finally:
if session.interactive:
session.notify("coverage", posargs=[])
@session
def coverage(session: Session) -> None:
"""Produce the coverage report."""
args = session.posargs or ["report"]
session.install("coverage[toml]")
if not session.posargs and any(Path().glob(".coverage.*")):
session.run("coverage", "combine")
session.run("coverage", *args)
@session(python=python_versions)
def typeguard(session: Session) -> None:
"""Runtime type checking using Typeguard."""
session.install(".")
session.install("pytest", "typeguard", "pygments")
setup_database(session)
session.env["RUN_TYPEGUARD"] = "true"
session.run("pytest", *session.posargs)
@session(python=python_versions)
def xdoctest(session: Session) -> None:
"""Run examples with xdoctest."""
if session.posargs:
args = [package, *session.posargs]
else:
args = [f"--modname={package}", "--command=all"]
if "FORCE_COLOR" in os.environ:
args.append("--colored=1")
session.install(".")
session.install("xdoctest[colors]")
session.run("python", "-m", "xdoctest", *args)
@session(name="docs-build", python="3.10")
def docs_build(session: Session) -> None:
"""Build the documentation."""
args = session.posargs or ["docs", "docs/_build"]
if not session.posargs and "FORCE_COLOR" in os.environ:
args.insert(0, "--color")
session.install(".")
session.install("sphinx", "sphinx-click", "furo")
build_dir = Path("docs", "_build")
if build_dir.exists():
shutil.rmtree(build_dir)
session.run("sphinx-build", *args)
@session(python="3.10")
def docs(session: Session) -> None:
"""Build and serve the documentation with live reloading on file changes."""
args = session.posargs or ["--open-browser", "docs", "docs/_build"]
session.install(".")
session.install("sphinx", "sphinx-autobuild", "sphinx-click", "furo")
build_dir = Path("docs", "_build")
if build_dir.exists():
shutil.rmtree(build_dir)
session.run("sphinx-autobuild", *args)

View File

@ -0,0 +1,32 @@
group-admin:
type: Group
users: [jakub, kb, alex, dan, mike, jason]
group-finance:
type: Group
users: [harmeet, sasha]
group-hr:
type: Group
users: [manuchehr]
permission-admin:
type: Permission
groups: [group-admin]
users: []
allowed_permissions: [CREATE, READ, UPDATE, DELETE, LIST, INSTANTIATE]
uri: /*
permission-finance-admin:
type: Permission
groups: [group-a]
users: []
allowed_permissions: [CREATE, READ, UPDATE, DELETE]
uri: /v1.0/process-groups/finance/*
permission-read-all:
type: Permission
groups: [group-finance, group-hr, group-admin]
users: []
allowed_permissions: [READ]
uri: /*

3524
spiffworkflow-backend/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,146 @@
[tool.poetry]
name = "spiffworkflow-backend"
version = "0.0.0"
description = "Spiffworkflow Backend"
authors = ["Jason Lantz <sartography@users.noreply.github.com>"]
license = "MIT"
readme = "README.rst"
homepage = "https://github.com/sartography/spiffworkflow-backend"
repository = "https://github.com/sartography/spiffworkflow-backend"
documentation = "https://spiffworkflow-backend.readthedocs.io"
classifiers = [
"Development Status :: 1 - Planning",
]
[tool.poetry.urls]
Changelog = "https://github.com/sartography/spiffworkflow-backend/releases"
[tool.poetry.dependencies]
python = ">=3.9,<3.11"
click = "^8.0.1"
flask = "2.2.2"
flask-admin = "*"
flask-bcrypt = "*"
flask-cors = "*"
flask-mail = "*"
flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
sentry-sdk = "^1.9.10"
sphinx-autoapi = "^1.8.4"
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
mysql-connector-python = "^8.0.29"
pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0"
psycopg2 = "^2.9.3"
typing-extensions = "^4.3.0"
connexion = {extras = [ "swagger-ui",], version = "^2"}
lxml = "^4.9.1"
marshmallow-enum = "^1.5.1"
marshmallow-sqlalchemy = "^0.28.0"
PyJWT = "^2.4.0"
gunicorn = "^20.1.0"
types-pytz = "^2022.1.1"
python-keycloak = "^2.5.0"
APScheduler = "^3.9.1"
types-requests = "^2.28.6"
Jinja2 = "^3.1.2"
RestrictedPython = "^5.2"
Flask-SQLAlchemy = "^3"
orjson = "^3.8.0"
[tool.poetry.dev-dependencies]
pytest = "^7.1.2"
coverage = {extras = ["toml"], version = "^6.1"}
safety = "^2.3.1"
mypy = ">=0.961"
typeguard = "^2.13.2"
xdoctest = {extras = ["colors"], version = "^1.0.1"}
sphinx = "^5.0.2"
sphinx-autobuild = ">=2021.3.14"
pre-commit = "^2.20.0"
flake8 = "^4.0.1"
black = ">=21.10b0"
flake8-bandit = "^2.1.2"
# 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841
bandit = "1.7.2"
flake8-bugbear = "^22.7.1"
flake8-docstrings = "^1.6.0"
flake8-rst-docstrings = "^0.2.7"
# flask-sqlalchemy-stubs = "^0.2"
pep8-naming = "^0.13.2"
darglint = "^1.8.1"
reorder-python-imports = "^3.8.1"
pre-commit-hooks = "^4.0.1"
sphinx-click = "^4.3.0"
Pygments = "^2.10.0"
pyupgrade = "^2.37.1"
furo = ">=2021.11.12"
MonkeyType = "^22.2.0"
sqlalchemy-stubs = "^0.4"
[tool.poetry.scripts]
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
[tool.pytest.ini_options]
# ignore deprecation warnings from various packages that we don't control
filterwarnings = [
# note the use of single quote below to denote "raw" strings in TOML
# kombu/utils/compat.py:82
'ignore:SelectableGroups dict interface is deprecated. Use select.',
# flask_marshmallow/__init__.py:34
# marshmallow_sqlalchemy/convert.py:17
'ignore:distutils Version classes are deprecated. Use packaging.version instead.',
# connexion/spec.py:50
'ignore:Passing a schema to Validator.iter_errors is deprecated and will be removed in a future release',
# connexion/decorators/validation.py:16
'ignore:Accessing jsonschema.draft4_format_checker is deprecated and will be removed in a future release.',
# connexion/apis/flask_api.py:236
"ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3",
"ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3",
"ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3",
"ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3"
]
[tool.coverage.paths]
source = ["src", "*/site-packages"]
tests = ["tests", "*/tests"]
[tool.coverage.run]
branch = true
source = ["spiffworkflow_backend", "tests"]
[tool.coverage.report]
show_missing = true
fail_under = 50
[tool.mypy]
strict = true
disallow_any_generics = false
warn_unreachable = true
pretty = true
show_column_numbers = true
show_error_codes = true
show_error_context = true
plugins = "sqlmypy"
# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option
implicit_reexport = true
# allow for subdirs to NOT require __init__.py
namespace_packages = true
explicit_package_bases = false
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,13 @@
sonar.organization=sartography
sonar.projectKey=sartography_spiffworkflow-backend
sonar.host.url=https://sonarcloud.io
sonar.python.version=3.9,3.10
sonar.python.coverage.reportPaths=coverage.xml
sonar.test.inclusions=tests
# it's finding "bugs" we don't care about in the deprecated UI
sonar.exclusions=migrations/**,bin/keycloak_test_server.py,src/spiffworkflow_backend/routes/admin_blueprint/templates/*.html
sonar.coverage.exclusions=noxfile.py,conftest.py
# sonar.exclusions=crc/templates/*.html,docs/**,config/**,instance/**,migrations/**,postgres/**,readme_images/**,schema/**,templates/**
# sonar.sources=crc

View File

@ -0,0 +1,139 @@
"""__init__."""
import os
from typing import Any
import connexion # type: ignore
import flask.app
import flask.json
import sqlalchemy
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
from flask.json.provider import DefaultJSONProvider
from flask_bpmn.api.api_error import api_error_blueprint
from flask_bpmn.models.db import db
from flask_bpmn.models.db import migrate
from flask_cors import CORS # type: ignore
from flask_mail import Mail # type: ignore
import spiffworkflow_backend.load_database_models # noqa: F401
from spiffworkflow_backend.config import setup_config
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
from spiffworkflow_backend.services.background_processing_service import (
BackgroundProcessingService,
)
class MyJSONEncoder(DefaultJSONProvider):
"""MyJSONEncoder."""
def default(self, obj: Any) -> Any:
"""Default."""
if hasattr(obj, "serialized"):
return obj.serialized
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
return_dict = {}
for row_key in obj.keys():
row_value = obj[row_key]
if hasattr(row_value, "__dict__"):
return_dict.update(row_value.__dict__)
else:
return_dict.update({row_key: row_value})
return_dict.pop("_sa_instance_state")
return return_dict
return super().default(obj)
def dumps(self, obj: Any, **kwargs: Any) -> Any:
"""Dumps."""
kwargs.setdefault("default", self.default)
return super().dumps(obj, **kwargs)
def start_scheduler(app: flask.app.Flask) -> None:
"""Start_scheduler."""
scheduler = BackgroundScheduler()
scheduler.add_job(
BackgroundProcessingService(app).process_message_instances_with_app_context,
"interval",
seconds=10,
)
scheduler.add_job(
BackgroundProcessingService(app).run,
"interval",
seconds=30,
)
scheduler.start()
def create_app() -> flask.app.Flask:
"""Create_app."""
# We need to create the sqlite database in a known location.
# If we rely on the app.instance_path without setting an environment
# variable, it will be one thing when we run flask db upgrade in the
# noxfile and another thing when the tests actually run.
# instance_path is described more at https://flask.palletsprojects.com/en/2.1.x/config/
connexion_app = connexion.FlaskApp(
__name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")}
)
app = connexion_app.app
app.config["CONNEXION_APP"] = connexion_app
app.config["SESSION_TYPE"] = "filesystem"
if os.environ.get("FLASK_SESSION_SECRET_KEY") is None:
raise KeyError(
"Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY"
)
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")
setup_config(app)
db.init_app(app)
migrate.init_app(app, db)
app.register_blueprint(user_blueprint)
app.register_blueprint(process_api_blueprint)
app.register_blueprint(api_error_blueprint)
app.register_blueprint(admin_blueprint, url_prefix="/admin")
origins_re = [
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.")
for o in app.config["CORS_ALLOW_ORIGINS"]
]
CORS(app, origins=origins_re)
connexion_app.add_api("api.yml", base_path="/v1.0")
mail = Mail(app)
app.config["MAIL_APP"] = mail
app.json = MyJSONEncoder(app)
if app.config["PROCESS_WAITING_MESSAGES"]:
start_scheduler(app)
configure_sentry(app)
return app # type: ignore
def configure_sentry(app: flask.app.Flask) -> None:
"""Configure_sentry."""
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sample_rate = app.config.get("SENTRY_SAMPLE_RATE")
if sentry_sample_rate is None:
return
sentry_sdk.init(
dsn=app.config.get("SENTRY_DSN"),
integrations=[
FlaskIntegration(),
],
# Set traces_sample_rate to 1.0 to capture 100%
# of transactions for performance monitoring.
# We recommend adjusting this value in production.
traces_sample_rate=float(sentry_sample_rate),
)
app = Flask(__name__)

View File

@ -0,0 +1,13 @@
"""Command-line interface."""
import click
@click.command()
@click.version_option()
def main() -> None:
"""Spiffworkflow Backend."""
print("This does nothing")
if __name__ == "__main__":
main(prog_name="spiffworkflow-backend") # pragma: no cover

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,73 @@
"""__init__.py."""
import os
import threading
from flask.app import Flask
from werkzeug.utils import ImportStringError
from spiffworkflow_backend.services.logging_service import setup_logger
def setup_database_uri(app: Flask) -> None:
"""Setup_database_uri."""
if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if os.environ.get("SPIFF_DATABASE_TYPE") == "sqlite":
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
elif os.environ.get("SPIFF_DATABASE_TYPE") == "postgres":
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
else:
# use pswd to trick flake8 with hardcoded passwords
db_pswd = os.environ.get("DB_PASSWORD")
if db_pswd is None:
db_pswd = ""
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
)
def setup_config(app: Flask) -> None:
"""Setup_config."""
# ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
app.config["ENV_IDENTIFIER"] = os.environ.get(
"SPIFFWORKFLOW_BACKEND_ENV", "development"
)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config.from_object("spiffworkflow_backend.config.default")
# This allows config/testing.py or instance/config.py to override the default config
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
app.config.from_pyfile("config/testing.py", silent=True)
else:
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
setup_database_uri(app)
setup_logger(app)
env_config_module = "spiffworkflow_backend.config." + app.config["ENV_IDENTIFIER"]
try:
app.config.from_object(env_config_module)
except ImportStringError as exception:
raise ModuleNotFoundError(
f"Cannot find config module: {env_config_module}"
) from exception
# unversioned (see .gitignore) config that can override everything and include secrets.
# src/spiffworkflow_backend/config/secrets.py
app.config.from_pyfile(os.path.join("config", "secrets.py"))
thread_local_data = threading.local()
app.config["THREAD_LOCAL_DATA"] = thread_local_data

View File

@ -0,0 +1,47 @@
"""Default."""
import re
from os import environ
# Does the site allow self-registration of users
SELF_REGISTRATION = environ.get("SELF_REGISTRATION", default=False)
DEVELOPMENT = False
BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR", default="")
CORS_DEFAULT = "*"
CORS_ALLOW_ORIGINS = re.split(
r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT)
)
PROCESS_WAITING_MESSAGES = (
environ.get("PROCESS_WAITING_MESSAGES", default="false") == "true"
)
SPIFFWORKFLOW_FRONTEND_URL = environ.get(
"SPIFFWORKFLOW_FRONTEND_URL", default="http://localhost:7001"
)
SPIFFWORKFLOW_BACKEND_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000"
)
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
# Open ID server
OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002")
OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend")
OPEN_ID_REALM_NAME = environ.get("OPEN_ID_REALM_NAME", default="spiffworkflow")
OPEN_ID_CLIENT_SECRET_KEY = environ.get(
"OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q"
) # noqa: S105
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
)
# service task connector proxy
CONNECTOR_PROXY_URL = environ.get(
"CONNECTOR_PROXY_URL", default="http://localhost:7004"
)
# Sentry Configuration
SENTRY_DSN = environ.get("SENTRY_DSN", default="")
SENTRY_SAMPLE_RATE = environ.get("SENTRY_SAMPLE_RATE", default="1.0")

View File

@ -0,0 +1 @@
"""Development."""

View File

@ -0,0 +1,4 @@
"""Staging."""
GIT_COMMIT_ON_SAVE = True
GIT_COMMIT_USERNAME = "staging"
GIT_COMMIT_EMAIL = "staging@example.com"

View File

@ -0,0 +1,9 @@
"""Testing.py."""
from os import environ
TESTING = True
SECRET_KEY = "the_secret_key"
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = (
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
)

View File

@ -0,0 +1,5 @@
"""Process_entity_not_found."""
class ProcessEntityNotFoundError(Exception):
"""ProcessEntityNotFoundError."""

View File

@ -0,0 +1 @@
"""Fixture_data."""

View File

@ -0,0 +1,11 @@
"""Spiff_enum."""
import enum
class SpiffEnum(enum.Enum):
"""SpiffEnum."""
@classmethod
def list(cls) -> list[str]:
"""List."""
return [el.value for el in cls]

View File

@ -0,0 +1,57 @@
"""Loads and sets up all database models for SQLAlchemy.
autoflake8 will remove these lines without the noqa comment
NOTE: make sure this file is ignored by reorder-python-imports since
some models need to be loaded before others for relationships and to
avoid circular imports
"""
from flask_bpmn.models.db import add_listeners
# must load this before UserModel and GroupModel for relationships
from spiffworkflow_backend.models.user_group_assignment import (
UserGroupAssignmentModel,
) # noqa: F401
from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401
from spiffworkflow_backend.models.bpmn_process_id_lookup import (
BpmnProcessIdLookup,
) # noqa: F401
from spiffworkflow_backend.models.data_store import DataStoreModel # noqa: F401
from spiffworkflow_backend.models.file import FileModel # noqa: F401
from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel,
) # noqa: F401
from spiffworkflow_backend.models.message_instance import (
MessageInstanceModel,
) # noqa: F401
from spiffworkflow_backend.models.message_model import MessageModel # noqa: F401
from spiffworkflow_backend.models.message_triggerable_process_model import (
MessageTriggerableProcessModel,
) # noqa: F401
from spiffworkflow_backend.models.permission_assignment import (
PermissionAssignmentModel,
) # noqa: F401
from spiffworkflow_backend.models.permission_target import (
PermissionTargetModel,
) # noqa: F401
from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceModel,
) # noqa: F401
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
) # noqa: F401
from spiffworkflow_backend.models.secret_model import (
SecretAllowedProcessPathModel,
) # noqa: F401
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
from spiffworkflow_backend.models.task_event import TaskEventModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401
from spiffworkflow_backend.models.group import GroupModel # noqa: F401
add_listeners()

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,72 @@
"""Active_task."""
from __future__ import annotations
import json
from dataclasses import dataclass
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.orm import RelationshipProperty
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.task import Task
@dataclass
class ActiveTaskModel(SpiffworkflowBaseDBModel):
"""ActiveTaskModel."""
__tablename__ = "active_task"
__table_args__ = (
db.UniqueConstraint(
"task_id", "process_instance_id", name="active_task_unique"
),
)
assigned_principal: RelationshipProperty[PrincipalModel] = relationship(
PrincipalModel
)
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
assigned_principal_id: int = db.Column(ForeignKey(PrincipalModel.id))
form_file_name: str | None = db.Column(db.String(50))
ui_form_file_name: str | None = db.Column(db.String(50))
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
task_id = db.Column(db.String(50))
task_name = db.Column(db.String(50))
task_title = db.Column(db.String(50))
task_type = db.Column(db.String(50))
task_status = db.Column(db.String(50))
process_model_display_name = db.Column(db.String(255))
task_data: str = db.Column(db.Text)
@classmethod
def to_task(cls, task: ActiveTaskModel) -> Task:
"""To_task."""
task_data = json.loads(task.task_data)
new_task = Task(
task.task_id,
task.task_name,
task.task_title,
task.task_type,
task.task_status,
data=task_data,
process_instance_id=task.process_instance_id,
)
if hasattr(task, "process_model_display_name"):
new_task.process_model_display_name = task.process_model_display_name
if hasattr(task, "process_group_identifier"):
new_task.process_group_identifier = task.process_group_identifier
if hasattr(task, "process_model_identifier"):
new_task.process_model_identifier = task.process_model_identifier
return new_task

View File

@ -0,0 +1,13 @@
"""Message_model."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
class BpmnProcessIdLookup(SpiffworkflowBaseDBModel):
"""BpmnProcessIdLookup."""
__tablename__ = "bpmn_process_id_lookup"
id = db.Column(db.Integer, primary_key=True)
bpmn_process_identifier = db.Column(db.String(255), unique=True, index=True)
bpmn_file_relative_path = db.Column(db.String(255))

View File

@ -0,0 +1,31 @@
"""Data_store."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from flask_marshmallow.sqla import SQLAlchemyAutoSchema # type: ignore
class DataStoreModel(SpiffworkflowBaseDBModel):
"""DataStoreModel."""
__tablename__ = "data_store"
id = db.Column(db.Integer, primary_key=True)
updated_at_in_seconds = db.Column(db.Integer)
key = db.Column(db.String(50), nullable=False)
process_instance_id = db.Column(db.Integer)
task_spec = db.Column(db.String(50))
spec_id = db.Column(db.String(50))
user_id = db.Column(db.String(50), nullable=True)
file_id = db.Column(db.Integer, db.ForeignKey("file.id"), nullable=True)
value = db.Column(db.String(50))
class DataStoreSchema(SQLAlchemyAutoSchema): # type: ignore
"""DataStoreSchema."""
class Meta:
"""Meta."""
model = DataStoreModel
load_instance = True
include_fk = True
sqla_session = db.session

View File

@ -0,0 +1,179 @@
"""File."""
from dataclasses import dataclass
from dataclasses import field
from datetime import datetime
from typing import Optional
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from marshmallow import INCLUDE
from marshmallow import Schema
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.data_store import DataStoreModel
class FileModel(SpiffworkflowBaseDBModel):
"""FileModel."""
__tablename__ = "file"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
type = db.Column(db.String(50), nullable=False)
content_type = db.Column(db.String(50), nullable=False)
process_instance_id = db.Column(
db.Integer, db.ForeignKey("process_instance.id"), nullable=True
)
task_spec = db.Column(db.String(50), nullable=True)
irb_doc_code = db.Column(
db.String(50), nullable=False
) # Code reference to the documents.xlsx reference file.
data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file")
md5_hash = db.Column(db.String(50), unique=False, nullable=False)
data = deferred(db.Column(db.LargeBinary)) # type: ignore
size = db.Column(db.Integer, default=0)
updated_at_in_seconds = db.Column(db.Integer)
created_at_in_seconds = db.Column(db.Integer)
user_uid = db.Column(db.String(50), db.ForeignKey("user.uid"), nullable=True)
archived = db.Column(db.Boolean, default=False)
class FileType(SpiffEnum):
"""FileType."""
bpmn = "bpmn"
csv = "csv"
dmn = "dmn"
doc = "doc"
docx = "docx"
gif = "gif"
jpg = "jpg"
json = "json"
md = "md"
pdf = "pdf"
png = "png"
ppt = "ppt"
pptx = "pptx"
rtf = "rtf"
svg = "svg"
svg_xml = "svg+xml"
txt = "txt"
xls = "xls"
xlsx = "xlsx"
xml = "xml"
zip = "zip"
CONTENT_TYPES = {
"bpmn": "text/xml",
"csv": "text/csv",
"dmn": "text/xml",
"doc": "application/msword",
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"gif": "image/gif",
"jpg": "image/jpeg",
"json": "application/json",
"md": "text/plain",
"pdf": "application/pdf",
"png": "image/png",
"ppt": "application/vnd.ms-powerpoint",
"pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
"rtf": "application/rtf",
"svg": "image/svg+xml",
"svg_xml": "image/svg+xml",
"txt": "text/plain",
"xls": "application/vnd.ms-excel",
"xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"xml": "application/xml",
"zip": "application/zip",
}
@dataclass(order=True)
class File:
"""File."""
sort_index: str = field(init=False)
content_type: str
name: str
type: str
document: dict
last_modified: datetime
size: int
process_instance_id: Optional[int] = None
irb_doc_code: Optional[str] = None
data_store: Optional[dict] = field(default_factory=dict)
user_uid: Optional[str] = None
file_contents: Optional[bytes] = None
process_model_id: Optional[str] = None
process_group_id: Optional[str] = None
archived: bool = False
def __post_init__(self) -> None:
"""__post_init__."""
self.sort_index = f"{self.type}:{self.name}"
@classmethod
def from_file_system(
cls,
file_name: str,
file_type: FileType,
content_type: str,
last_modified: datetime,
file_size: int,
) -> "File":
"""From_file_system."""
instance = cls(
name=file_name,
content_type=content_type,
type=file_type.value,
document={},
last_modified=last_modified,
size=file_size,
)
return instance
class FileSchema(Schema):
"""FileSchema."""
class Meta:
"""Meta."""
model = File
fields = [
"id",
"name",
"content_type",
"process_instance_id",
"irb_doc_code",
"last_modified",
"type",
"archived",
"size",
"data_store",
"document",
"user_uid",
"url",
"file_contents",
"process_model_id",
"process_group_id",
]
unknown = INCLUDE
# url = Method("get_url")
#
# def get_url(self, obj):
# token = 'not_available'
# if hasattr(obj, 'id') and obj.id is not None:
# file_url = url_for("/v1_0.crc_api_file_get_file_data_link", file_id=obj.id, _external=True)
# if hasattr(flask.g, 'user'):
# token = flask.g.user.encode_auth_token()
# url = file_url + '?auth_token=' + urllib.parse.quote_plus(token)
# return url
# else:
# return ""
#

View File

@ -0,0 +1,32 @@
"""Group."""
from __future__ import annotations
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db
from flask_bpmn.models.group import FlaskBpmnGroupModel
from sqlalchemy.orm import relationship
if TYPE_CHECKING:
from spiffworkflow_backend.models.user_group_assignment import ( # noqa: F401
UserGroupAssignmentModel,
) # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401
class GroupModel(FlaskBpmnGroupModel):
"""GroupModel."""
__tablename__ = "group"
__table_args__ = {"extend_existing": True}
identifier = db.Column(db.String(255))
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
users = relationship( # type: ignore
"UserModel",
viewonly=True,
secondary="user_group_assignment",
overlaps="user_group_assignments,users",
)
principal = relationship("PrincipalModel", uselist=False) # type: ignore

View File

@ -0,0 +1,49 @@
"""Message_correlation."""
from dataclasses import dataclass
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
if TYPE_CHECKING:
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
MessageCorrelationMessageInstanceModel,
)
@dataclass
class MessageCorrelationModel(SpiffworkflowBaseDBModel):
"""Message Correlations to relate queued messages together."""
__tablename__ = "message_correlation"
__table_args__ = (
db.UniqueConstraint(
"process_instance_id",
"message_correlation_property_id",
"name",
name="message_instance_id_name_unique",
),
)
id = db.Column(db.Integer, primary_key=True)
process_instance_id = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
)
message_correlation_property_id = db.Column(
ForeignKey(MessageCorrelationPropertyModel.id), nullable=False, index=True
)
name = db.Column(db.String(255), nullable=False, index=True)
value = db.Column(db.String(255), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
message_correlations_message_instances = relationship(
"MessageCorrelationMessageInstanceModel", cascade="delete"
)

View File

@ -0,0 +1,32 @@
"""Message_correlation_message_instance."""
from dataclasses import dataclass
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
@dataclass
class MessageCorrelationMessageInstanceModel(SpiffworkflowBaseDBModel):
"""MessageCorrelationMessageInstanceModel."""
__tablename__ = "message_correlation_message_instance"
__table_args__ = (
db.UniqueConstraint(
"message_instance_id",
"message_correlation_id",
name="message_correlation_message_instance_unique",
),
)
id = db.Column(db.Integer, primary_key=True)
message_instance_id = db.Column(
ForeignKey(MessageInstanceModel.id), nullable=False, index=True # type: ignore
)
message_correlation_id = db.Column(
ForeignKey(MessageCorrelationModel.id), nullable=False, index=True
)

View File

@ -0,0 +1,25 @@
"""Message_correlation_property."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from spiffworkflow_backend.models.message_model import MessageModel
class MessageCorrelationPropertyModel(SpiffworkflowBaseDBModel):
"""MessageCorrelationPropertyModel."""
__tablename__ = "message_correlation_property"
__table_args__ = (
db.UniqueConstraint(
"identifier",
"message_model_id",
name="message_correlation_property_unique",
),
)
id = db.Column(db.Integer, primary_key=True)
identifier = db.Column(db.String(50), index=True)
message_model_id = db.Column(ForeignKey(MessageModel.id), nullable=False)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -0,0 +1,88 @@
"""Message_instance."""
import enum
from dataclasses import dataclass
from typing import Any
from typing import Optional
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.event import listens_for
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import validates
from spiffworkflow_backend.models.message_model import MessageModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
if TYPE_CHECKING:
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
MessageCorrelationMessageInstanceModel,
)
class MessageTypes(enum.Enum):
"""MessageTypes."""
send = "send"
receive = "receive"
class MessageStatuses(enum.Enum):
"""MessageStatuses."""
ready = "ready"
running = "running"
completed = "completed"
failed = "failed"
@dataclass
class MessageInstanceModel(SpiffworkflowBaseDBModel):
"""Messages from a process instance that are ready to send to a receiving task."""
__tablename__ = "message_instance"
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False)
message_model = relationship("MessageModel")
message_correlations_message_instances = relationship(
"MessageCorrelationMessageInstanceModel", cascade="delete"
)
message_type: str = db.Column(db.String(20), nullable=False)
payload: str = db.Column(db.JSON)
status: str = db.Column(db.String(20), nullable=False, default="ready")
failure_cause: str = db.Column(db.Text())
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
@validates("message_type")
def validate_message_type(self, key: str, value: Any) -> Any:
"""Validate_message_type."""
return self.validate_enum_field(key, value, MessageTypes)
@validates("status")
def validate_status(self, key: str, value: Any) -> Any:
"""Validate_status."""
return self.validate_enum_field(key, value, MessageStatuses)
# This runs for ALL db flushes for ANY model, not just this one even if it's in the MessageInstanceModel class
# so this may not be worth it or there may be a better way to do it
#
# https://stackoverflow.com/questions/32555829/flask-validates-decorator-multiple-fields-simultaneously/33025472#33025472
# https://docs.sqlalchemy.org/en/14/orm/session_events.html#before-flush
@listens_for(Session, "before_flush") # type: ignore
def ensure_failure_cause_is_set_if_message_instance_failed(
session: Any, _flush_context: Optional[Any], _instances: Optional[Any]
) -> None:
"""Ensure_failure_cause_is_set_if_message_instance_failed."""
for instance in session.new:
if isinstance(instance, MessageInstanceModel):
if instance.status == "failed" and instance.failure_cause is None:
raise ValueError(
f"{instance.__class__.__name__}: failure_cause must be set if status is failed"
)

View File

@ -0,0 +1,13 @@
"""Message_model."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
class MessageModel(SpiffworkflowBaseDBModel):
"""MessageModel."""
__tablename__ = "message_model"
id = db.Column(db.Integer, primary_key=True)
identifier = db.Column(db.String(50), unique=True, index=True)
name = db.Column(db.String(50), unique=True, index=True)

View File

@ -0,0 +1,22 @@
"""Message_correlation_property."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from spiffworkflow_backend.models.message_model import MessageModel
class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
"""MessageTriggerableProcessModel."""
__tablename__ = "message_triggerable_process_model"
id = db.Column(db.Integer, primary_key=True)
message_model_id = db.Column(
ForeignKey(MessageModel.id), nullable=False, unique=True
)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -0,0 +1,55 @@
"""PermissionAssignment."""
import enum
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import Enum
from sqlalchemy import ForeignKey
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel
class PermitDeny(enum.Enum):
"""PermitDeny."""
# permit, aka grant
permit = "permit"
deny = "deny"
class Permission(enum.Enum):
"""Permission."""
# from original requirements
# instantiate = 1
# administer = 2
# view_instance = 3
create = 1
read = 2
update = 3
delete = 4
list = 5
instantiate = 6 # this is something you do to a process model
class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
"""PermissionAssignmentModel."""
__tablename__ = "permission_assignment"
__table_args__ = (
db.UniqueConstraint(
"principal_id",
"permission_target_id",
"permission",
name="permission_assignment_uniq",
),
)
id = db.Column(db.Integer, primary_key=True)
principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False)
permission_target_id = db.Column(
ForeignKey(PermissionTargetModel.id), nullable=False
)
grant_type = db.Column(Enum(PermitDeny))
permission = db.Column(Enum(Permission))

View File

@ -0,0 +1,26 @@
"""PermissionTarget."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
# process groups and models are not in the db
# from sqlalchemy import ForeignKey # type: ignore
#
# from spiffworkflow_backend.models.process_group import ProcessGroupModel
# from spiffworkflow_backend.models.process_model import ProcessModel
class PermissionTargetModel(SpiffworkflowBaseDBModel):
"""PermissionTargetModel."""
__tablename__ = "permission_target"
# __table_args__ = (
# CheckConstraint(
# "NOT(process_group_id IS NULL AND process_model_identifier IS NULL AND process_instance_id IS NULL)"
# ),
# )
id = db.Column(db.Integer, primary_key=True)
uri = db.Column(db.String(255), unique=True, nullable=False)
# process_group_id = db.Column(ForeignKey(ProcessGroupModel.id), nullable=True) # type: ignore
# process_model_identifier = db.Column(ForeignKey(ProcessModel.id), nullable=True) # type: ignore
# process_instance_id = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True) # type: ignore

View File

@ -0,0 +1,30 @@
"""Principal."""
from dataclasses import dataclass
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.schema import CheckConstraint
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
class DataValidityError(Exception):
"""DataValidityError."""
class MissingPrincipalError(DataValidityError):
"""MissingPrincipalError."""
@dataclass
class PrincipalModel(SpiffworkflowBaseDBModel):
"""PrincipalModel."""
__tablename__ = "principal"
__table_args__ = (CheckConstraint("NOT(user_id IS NULL AND group_id IS NULL)"),)
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(ForeignKey(UserModel.id), nullable=True, unique=True)
group_id = db.Column(ForeignKey(GroupModel.id), nullable=True, unique=True)

View File

@ -0,0 +1,62 @@
"""Process_group."""
from __future__ import annotations
from dataclasses import dataclass
from dataclasses import field
from typing import Any
import marshmallow
from marshmallow import post_load
from marshmallow import Schema
from spiffworkflow_backend.models.process_model import ProcessModelInfo
@dataclass(order=True)
class ProcessGroup:
"""ProcessGroup."""
sort_index: str = field(init=False)
id: str # A unique string name, lower case, under scores (ie, 'my_group')
display_name: str
display_order: int | None = 0
admin: bool | None = False
process_models: list[ProcessModelInfo] = field(
default_factory=list[ProcessModelInfo]
)
def __post_init__(self) -> None:
"""__post_init__."""
self.sort_index = self.id
def __eq__(self, other: Any) -> bool:
"""__eq__."""
if not isinstance(other, ProcessGroup):
return False
if other.id == self.id:
return True
return False
class ProcessGroupSchema(Schema):
"""ProcessGroupSchema."""
class Meta:
"""Meta."""
model = ProcessGroup
fields = ["id", "display_name", "display_order", "admin", "process_models"]
process_models = marshmallow.fields.List(
marshmallow.fields.Nested(
"ProcessModelInfoSchema", dump_only=True, required=False
)
)
@post_load
def make_process_group(
self, data: dict[str, str | bool | int], **kwargs: dict
) -> ProcessGroup:
"""Make_process_group."""
return ProcessGroup(**data) # type: ignore

View File

@ -0,0 +1,295 @@
"""Process_instance."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from typing import cast
import marshmallow
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from marshmallow import INCLUDE
from marshmallow import Schema
from marshmallow_enum import EnumField # type: ignore
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship
from sqlalchemy.orm import validates
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskSchema
from spiffworkflow_backend.models.user import UserModel
class NavigationItemSchema(Schema):
"""NavigationItemSchema."""
class Meta:
"""Meta."""
fields = [
"spec_id",
"name",
"spec_type",
"task_id",
"description",
"backtracks",
"indent",
"lane",
"state",
"children",
]
unknown = INCLUDE
state = marshmallow.fields.String(required=False, allow_none=True)
description = marshmallow.fields.String(required=False, allow_none=True)
backtracks = marshmallow.fields.String(required=False, allow_none=True)
lane = marshmallow.fields.String(required=False, allow_none=True)
task_id = marshmallow.fields.String(required=False, allow_none=True)
children = marshmallow.fields.List(
marshmallow.fields.Nested(lambda: NavigationItemSchema())
)
class ProcessInstanceStatus(SpiffEnum):
"""ProcessInstanceStatus."""
not_started = "not_started"
user_input_required = "user_input_required"
waiting = "waiting"
complete = "complete"
faulted = "faulted"
suspended = "suspended"
terminated = "terminated"
erroring = "erroring"
class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceModel."""
__tablename__ = "process_instance"
id: int = db.Column(db.Integer, primary_key=True)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
process_initiator = relationship("UserModel")
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
task_events = relationship("TaskEventModel", cascade="delete") # type: ignore
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: int | None = db.Column(db.Integer)
end_in_seconds: int | None = db.Column(db.Integer)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)
status: str = db.Column(db.String(50))
bpmn_xml_file_contents: bytes | None = None
bpmn_version_control_type: str = db.Column(db.String(50))
bpmn_version_control_identifier: str = db.Column(db.String(255))
@property
def serialized(self) -> dict[str, Any]:
"""Return object data in serializeable format."""
local_bpmn_xml_file_contents = ""
if self.bpmn_xml_file_contents:
local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8")
return {
"id": self.id,
"process_model_identifier": self.process_model_identifier,
"process_group_identifier": self.process_group_identifier,
"status": self.status,
"bpmn_json": self.bpmn_json,
"start_in_seconds": self.start_in_seconds,
"end_in_seconds": self.end_in_seconds,
"process_initiator_id": self.process_initiator_id,
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
}
@property
def serialized_flat(self) -> dict:
"""Return object in serializeable format with data merged together with top-level attributes.
Top-level attributes like process_model_identifier and status win over data attributes.
"""
serialized_top_level_attributes = self.serialized
serialized_top_level_attributes.pop("data", None)
return cast(dict, DeepMerge.merge(self.data, serialized_top_level_attributes))
@validates("status")
def validate_status(self, key: str, value: Any) -> Any:
"""Validate_status."""
return self.validate_enum_field(key, value, ProcessInstanceStatus)
class ProcessInstanceModelSchema(Schema):
"""ProcessInstanceModelSchema."""
class Meta:
"""Meta."""
model = ProcessInstanceModel
fields = [
"id",
"process_model_identifier",
"process_group_identifier",
"process_initiator_id",
"start_in_seconds",
"end_in_seconds",
"updated_at_in_seconds",
"created_at_in_seconds",
"status",
"bpmn_version_control_identifier",
]
status = marshmallow.fields.Method("get_status", dump_only=True)
def get_status(self, obj: ProcessInstanceModel) -> str:
"""Get_status."""
return obj.status
class ProcessInstanceApi:
"""ProcessInstanceApi."""
def __init__(
self,
id: int,
status: ProcessInstanceStatus,
next_task: Task | None,
process_model_identifier: str,
process_group_identifier: str,
completed_tasks: int,
updated_at_in_seconds: int,
is_review: bool,
title: str,
) -> None:
"""__init__."""
self.id = id
self.status = status
self.next_task = next_task # The next task that requires user input.
# self.navigation = navigation fixme: would be a hotness.
self.process_model_identifier = process_model_identifier
self.process_group_identifier = process_group_identifier
self.completed_tasks = completed_tasks
self.updated_at_in_seconds = updated_at_in_seconds
self.title = title
self.is_review = is_review
class ProcessInstanceApiSchema(Schema):
"""ProcessInstanceApiSchema."""
class Meta:
"""Meta."""
model = ProcessInstanceApi
fields = [
"id",
"status",
"next_task",
"navigation",
"process_model_identifier",
"process_group_identifier",
"completed_tasks",
"updated_at_in_seconds",
"is_review",
"title",
"study_id",
"state",
]
unknown = INCLUDE
status = EnumField(ProcessInstanceStatus)
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False)
navigation = marshmallow.fields.List(
marshmallow.fields.Nested(NavigationItemSchema, dump_only=True)
)
state = marshmallow.fields.String(allow_none=True)
@marshmallow.post_load
def make_process_instance(
self, data: dict[str, Any], **kwargs: dict
) -> ProcessInstanceApi:
"""Make_process_instance."""
keys = [
"id",
"status",
"next_task",
"navigation",
"process_model_identifier",
"process_group_identifier",
"completed_tasks",
"updated_at_in_seconds",
"is_review",
"title",
"study_id",
"state",
]
filtered_fields = {key: data[key] for key in keys}
filtered_fields["next_task"] = TaskSchema().make_task(data["next_task"])
return ProcessInstanceApi(**filtered_fields)
@dataclass
class ProcessInstanceMetadata:
"""ProcessInstanceMetadata."""
id: int
display_name: str | None = None
description: str | None = None
spec_version: str | None = None
state: str | None = None
status: str | None = None
completed_tasks: int | None = None
is_review: bool | None = None
state_message: str | None = None
process_model_identifier: str | None = None
process_group_id: str | None = None
@classmethod
def from_process_instance(
cls, process_instance: ProcessInstanceModel, process_model: ProcessModelInfo
) -> ProcessInstanceMetadata:
"""From_process_instance."""
instance = cls(
id=process_instance.id,
display_name=process_model.display_name,
description=process_model.description,
process_group_id=process_model.process_group_id,
state_message=process_instance.state_message,
status=process_instance.status,
completed_tasks=process_instance.completed_tasks,
is_review=process_model.is_review,
process_model_identifier=process_instance.process_model_identifier,
)
return instance
class ProcessInstanceMetadataSchema(Schema):
"""ProcessInstanceMetadataSchema."""
status = EnumField(ProcessInstanceStatus)
class Meta:
"""Meta."""
model = ProcessInstanceMetadata
additional = [
"id",
"display_name",
"description",
"state",
"completed_tasks",
"process_group_id",
"is_review",
"state_message",
]
unknown = INCLUDE

View File

@ -0,0 +1,335 @@
"""Process_instance."""
from __future__ import annotations
from dataclasses import dataclass
from typing import Any
from typing import cast
from typing import Optional
from typing import TypedDict
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
ReportMetadata = dict[str, Any]
class ProcessInstanceReportResult(TypedDict):
"""ProcessInstanceReportResult."""
report_metadata: ReportMetadata
results: list[dict]
# https://stackoverflow.com/a/56842689/6090676
class Reversor:
"""Reversor."""
def __init__(self, obj: Any):
"""__init__."""
self.obj = obj
def __eq__(self, other: Any) -> Any:
"""__eq__."""
return other.obj == self.obj
def __lt__(self, other: Any) -> Any:
"""__lt__."""
return other.obj < self.obj
@dataclass
class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceReportModel."""
__tablename__ = "process_instance_report"
__table_args__ = (
db.UniqueConstraint(
"process_group_identifier",
"process_model_identifier",
"identifier",
name="process_instance_report_unique",
),
)
id = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_group_identifier = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore
created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False)
created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer)
updated_at_in_seconds = db.Column(db.Integer)
@classmethod
def add_fixtures(cls) -> None:
"""Add_fixtures."""
try:
process_model = ProcessModelService().get_process_model(
group_id="sartography-admin", process_model_id="ticket"
)
user = UserModel.query.first()
columns = [
{"Header": "id", "accessor": "id"},
{"Header": "month", "accessor": "month"},
{"Header": "milestone", "accessor": "milestone"},
{"Header": "req_id", "accessor": "req_id"},
{"Header": "feature", "accessor": "feature"},
{"Header": "dev_days", "accessor": "dev_days"},
{"Header": "priority", "accessor": "priority"},
]
json = {"order": "month asc", "columns": columns}
cls.create_report(
identifier="standard",
process_group_identifier=process_model.process_group_id,
process_model_identifier=process_model.id,
user=user,
report_metadata=json,
)
cls.create_report(
identifier="for-month",
process_group_identifier="sartography-admin",
process_model_identifier="ticket",
user=user,
report_metadata=cls.ticket_for_month_report(),
)
cls.create_report(
identifier="for-month-3",
process_group_identifier="sartography-admin",
process_model_identifier="ticket",
user=user,
report_metadata=cls.ticket_for_month_3_report(),
)
cls.create_report(
identifier="hot-report",
process_group_identifier="category_number_one",
process_model_identifier="process-model-with-form",
user=user,
report_metadata=cls.process_model_with_form_report_fixture(),
)
except ProcessEntityNotFoundError:
print("Did not find process models so not adding report fixtures for them")
@classmethod
def create_report(
cls,
identifier: str,
process_group_identifier: str,
process_model_identifier: str,
user: UserModel,
report_metadata: ReportMetadata,
) -> None:
"""Make_fixture_report."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=identifier,
process_group_identifier=process_group_identifier,
process_model_identifier=process_model_identifier,
).first()
if process_instance_report is None:
process_instance_report = cls(
identifier=identifier,
process_group_identifier=process_group_identifier,
process_model_identifier=process_model_identifier,
created_by_id=user.id,
report_metadata=report_metadata,
)
db.session.add(process_instance_report)
db.session.commit()
@classmethod
def ticket_for_month_report(cls) -> dict:
"""Ticket_for_month_report."""
return {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "month", "accessor": "month"},
{"Header": "milestone", "accessor": "milestone"},
{"Header": "req_id", "accessor": "req_id"},
{"Header": "feature", "accessor": "feature"},
{"Header": "priority", "accessor": "priority"},
],
"order": "month asc",
"filter_by": [
{
"field_name": "month",
"operator": "equals",
"field_value": "{{month}}",
}
],
}
@classmethod
def ticket_for_month_3_report(cls) -> dict:
"""Ticket_for_month_report."""
return {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "month", "accessor": "month"},
{"Header": "milestone", "accessor": "milestone"},
{"Header": "req_id", "accessor": "req_id"},
{"Header": "feature", "accessor": "feature"},
{"Header": "dev_days", "accessor": "dev_days"},
{"Header": "priority", "accessor": "priority"},
],
"order": "month asc",
"filter_by": [
{"field_name": "month", "operator": "equals", "field_value": "3"}
],
}
@classmethod
def process_model_with_form_report_fixture(cls) -> dict:
"""Process_model_with_form_report_fixture."""
return {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "system_generated_number",
"accessor": "system_generated_number",
},
{
"Header": "user_generated_number",
"accessor": "user_generated_number",
},
{"Header": "product", "accessor": "product"},
],
"order": "-id",
}
@classmethod
def create_with_attributes(
cls,
identifier: str,
process_group_identifier: str,
process_model_identifier: str,
report_metadata: dict,
user: UserModel,
) -> ProcessInstanceReportModel:
"""Create_with_attributes."""
process_model = ProcessModelService().get_process_model(
group_id=process_group_identifier, process_model_id=process_model_identifier
)
process_instance_report = cls(
identifier=identifier,
process_group_identifier=process_model.process_group_id,
process_model_identifier=process_model.id,
created_by_id=user.id,
report_metadata=report_metadata,
)
db.session.add(process_instance_report)
db.session.commit()
return process_instance_report
def with_substitutions(self, field_value: Any, substitution_variables: dict) -> Any:
"""With_substitutions."""
if substitution_variables is not None:
for key, value in substitution_variables.items():
if isinstance(value, str) or isinstance(value, int):
field_value = str(field_value).replace(
"{{" + key + "}}", str(value)
)
return field_value
# modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder
# just supports "equals" operator for now.
# perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly.
def passes_filter(
self, process_instance_dict: dict, substitution_variables: dict
) -> bool:
"""Passes_filter."""
if "filter_by" in self.report_metadata:
for filter_by in self.report_metadata["filter_by"]:
field_name = filter_by["field_name"]
operator = filter_by["operator"]
field_value = self.with_substitutions(
filter_by["field_value"], substitution_variables
)
if operator == "equals":
if str(process_instance_dict.get(field_name)) != str(field_value):
return False
return True
def order_things(self, process_instance_dicts: list) -> list:
"""Order_things."""
order_by = self.report_metadata["order_by"]
def order_by_function_for_lambda(
process_instance_dict: dict,
) -> list[Reversor | str | None]:
"""Order_by_function_for_lambda."""
comparison_values: list[Reversor | str | None] = []
for order_by_item in order_by:
if order_by_item.startswith("-"):
# remove leading - from order_by_item
order_by_item = order_by_item[1:]
sort_value = process_instance_dict.get(order_by_item)
comparison_values.append(Reversor(sort_value))
else:
sort_value = cast(
Optional[str], process_instance_dict.get(order_by_item)
)
comparison_values.append(sort_value)
return comparison_values
return sorted(process_instance_dicts, key=order_by_function_for_lambda)
def generate_report(
self,
process_instances: list[ProcessInstanceModel],
substitution_variables: dict | None,
) -> ProcessInstanceReportResult:
"""Generate_report."""
if substitution_variables is None:
substitution_variables = {}
def to_serialized(process_instance: ProcessInstanceModel) -> dict:
"""To_serialized."""
processor = ProcessInstanceProcessor(process_instance)
process_instance.data = processor.get_current_data()
return process_instance.serialized_flat
process_instance_dicts = map(to_serialized, process_instances)
results = []
for process_instance_dict in process_instance_dicts:
if self.passes_filter(process_instance_dict, substitution_variables):
results.append(process_instance_dict)
if "order_by" in self.report_metadata:
results = self.order_things(results)
if "columns" in self.report_metadata:
column_keys_to_keep = [
c["accessor"] for c in self.report_metadata["columns"]
]
pruned_results = []
for result in results:
dict_you_want = {
your_key: result[your_key]
for your_key in column_keys_to_keep
if result.get(your_key)
}
pruned_results.append(dict_you_want)
results = pruned_results
return ProcessInstanceReportResult(
report_metadata=self.report_metadata, results=results
)

View File

@ -0,0 +1,90 @@
"""Process_model."""
from __future__ import annotations
import enum
from dataclasses import dataclass
from dataclasses import field
from typing import Any
import marshmallow
from marshmallow import Schema
from marshmallow.decorators import post_load
from spiffworkflow_backend.models.file import File
class NotificationType(enum.Enum):
"""NotificationType."""
fault = "fault"
suspend = "suspend"
@dataclass(order=True)
class ProcessModelInfo:
"""ProcessModelInfo."""
sort_index: str = field(init=False)
id: str
display_name: str
description: str
process_group_id: str = ""
process_group: Any | None = None
is_master_spec: bool | None = False
standalone: bool | None = False
library: bool | None = False
primary_file_name: str | None = None
primary_process_id: str | None = None
libraries: list[str] = field(default_factory=list)
display_order: int | None = 0
is_review: bool = False
files: list[File] | None = field(default_factory=list[File])
fault_or_suspend_on_exception: str = NotificationType.fault.value
exception_notification_addresses: list[str] = field(default_factory=list)
def __post_init__(self) -> None:
"""__post_init__."""
self.sort_index = f"{self.process_group_id}:{self.id}"
def __eq__(self, other: Any) -> bool:
"""__eq__."""
if not isinstance(other, ProcessModelInfo):
return False
if other.id == self.id:
return True
return False
class ProcessModelInfoSchema(Schema):
"""ProcessModelInfoSchema."""
class Meta:
"""Meta."""
model = ProcessModelInfo
id = marshmallow.fields.String(required=True)
display_name = marshmallow.fields.String(required=True)
description = marshmallow.fields.String()
is_master_spec = marshmallow.fields.Boolean(required=True)
standalone = marshmallow.fields.Boolean(required=True)
library = marshmallow.fields.Boolean(required=True)
display_order = marshmallow.fields.Integer(allow_none=True)
primary_file_name = marshmallow.fields.String(allow_none=True)
primary_process_id = marshmallow.fields.String(allow_none=True)
is_review = marshmallow.fields.Boolean(allow_none=True)
process_group_id = marshmallow.fields.String(allow_none=True)
libraries = marshmallow.fields.List(marshmallow.fields.String(), allow_none=True)
files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema"))
fault_or_suspend_on_exception = marshmallow.fields.String()
exception_notification_addresses = marshmallow.fields.List(
marshmallow.fields.String
)
@post_load
def make_spec(
self, data: dict[str, str | bool | int | NotificationType], **_: Any
) -> ProcessModelInfo:
"""Make_spec."""
return ProcessModelInfo(**data) # type: ignore

Some files were not shown because too many files have changed in this diff Show More