This commit is contained in:
Madhurya Liyanage 2023-04-06 20:17:13 +05:30
commit 07dacdff99
191 changed files with 6771 additions and 9751 deletions

View File

@ -16,10 +16,7 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
include: include:
# FIXME: https://github.com/mysql/mysql-connector-python/pull/86 - { python: "3.11", os: "ubuntu-latest", session: "safety" }
# put back when poetry update protobuf mysql-connector-python updates protobuf
# right now mysql is forcing protobuf to version 3
# - { python: "3.11", os: "ubuntu-latest", session: "safety" }
- { python: "3.11", os: "ubuntu-latest", session: "mypy" } - { python: "3.11", os: "ubuntu-latest", session: "mypy" }
- { python: "3.10", os: "ubuntu-latest", session: "mypy" } - { python: "3.10", os: "ubuntu-latest", session: "mypy" }
- { python: "3.9", os: "ubuntu-latest", session: "mypy" } - { python: "3.9", os: "ubuntu-latest", session: "mypy" }
@ -156,7 +153,7 @@ jobs:
- name: Upload coverage data - name: Upload coverage data
# pin to upload coverage from only one matrix entry, otherwise coverage gets confused later # pin to upload coverage from only one matrix entry, otherwise coverage gets confused later
if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest' && matrix.database == 'mysql' if: always() && matrix.session == 'tests' && matrix.python == '3.11' && matrix.os == 'ubuntu-latest' && matrix.database == 'mysql'
uses: "actions/upload-artifact@v3.0.0" uses: "actions/upload-artifact@v3"
# this action doesn't seem to respect working-directory so include working-directory value in path # this action doesn't seem to respect working-directory so include working-directory value in path
with: with:
name: coverage-data name: coverage-data
@ -164,18 +161,31 @@ jobs:
- name: Upload documentation - name: Upload documentation
if: matrix.session == 'docs-build' if: matrix.session == 'docs-build'
uses: actions/upload-artifact@v3.0.0 uses: actions/upload-artifact@v3
with: with:
name: docs name: docs
path: docs/_build path: docs/_build
- name: Upload logs - name: Upload logs
if: failure() && matrix.session == 'tests' if: failure() && matrix.session == 'tests'
uses: "actions/upload-artifact@v3.0.0" uses: "actions/upload-artifact@v3"
with: with:
name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}}
path: "./log/*.log" path: "./log/*.log"
# burnettk created an account at https://app.snyk.io/org/kevin-jfx
# and added his SNYK_TOKEN secret under the spiff-arena repo.
snyk:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Run Snyk to check for vulnerabilities
uses: snyk/actions/python@master
with:
args: spiffworkflow-backend
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}
run_pre_commit_checks: run_pre_commit_checks:
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults: defaults:
@ -184,9 +194,6 @@ jobs:
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@v3.3.0 uses: actions/checkout@v3.3.0
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v4.2.0
with: with:
@ -205,9 +212,6 @@ jobs:
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@v3.3.0 uses: actions/checkout@v3.3.0
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: Checkout Samples - name: Checkout Samples
uses: actions/checkout@v3 uses: actions/checkout@v3
with: with:
@ -281,7 +285,7 @@ jobs:
# so just skip everything but main # so just skip everything but main
if: github.ref_name == 'main' if: github.ref_name == 'main'
with: with:
projectBaseDir: spiffworkflow-frontend projectBaseDir: spiffworkflow-backend
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}

View File

@ -31,7 +31,7 @@ on:
push: push:
branches: branches:
- main - main
- feature/move_task_data_into_tables - feature/use_tasks_as_logs
jobs: jobs:
create_frontend_docker_image: create_frontend_docker_image:
@ -54,7 +54,7 @@ jobs:
- name: Get current date - name: Get current date
id: date id: date
run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_OUTPUT run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> "$GITHUB_OUTPUT"
- name: Extract metadata (tags, labels) for Docker - name: Extract metadata (tags, labels) for Docker
id: meta id: meta
uses: docker/metadata-action@v4.3.0 uses: docker/metadata-action@v4.3.0
@ -72,8 +72,8 @@ jobs:
push: true push: true
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
- name: Adding markdown - run: echo 'TAGS' >> "$GITHUB_STEP_SUMMARY"
run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> $GITHUB_STEP_SUMMARY - run: echo 'for tag in ${{ steps.meta.outputs.tags }}; do echo "* $tag"; done' >> "$GITHUB_STEP_SUMMARY"
create_backend_docker_image: create_backend_docker_image:
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -95,7 +95,7 @@ jobs:
- name: Get current date - name: Get current date
id: date id: date
run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> $GITHUB_OUTPUT run: echo "date=$(date -u +'%Y-%m-%d_%H-%M-%S')" >> "$GITHUB_OUTPUT"
- name: Extract metadata (tags, labels) for Docker - name: Extract metadata (tags, labels) for Docker
id: meta id: meta
uses: docker/metadata-action@v4.3.0 uses: docker/metadata-action@v4.3.0
@ -114,4 +114,4 @@ jobs:
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
- name: Adding markdown - name: Adding markdown
run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> $GITHUB_STEP_SUMMARY run: echo 'TAGS ${{ steps.meta.outputs.tags }}' >> "$GITHUB_STEP_SUMMARY"

View File

@ -108,21 +108,21 @@ jobs:
run: ./bin/get_logs_from_docker_compose >./log/docker_compose.log run: ./bin/get_logs_from_docker_compose >./log/docker_compose.log
- name: Upload logs - name: Upload logs
if: failure() if: failure()
uses: "actions/upload-artifact@v3.0.0" uses: "actions/upload-artifact@v3"
with: with:
name: spiffworkflow-backend-logs name: spiffworkflow-backend-logs
path: "./spiffworkflow-backend/log/*.log" path: "./spiffworkflow-backend/log/*.log"
# https://github.com/cypress-io/github-action#artifacts # https://github.com/cypress-io/github-action#artifacts
- name: upload_screenshots - name: upload_screenshots
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v3
if: failure() if: failure()
with: with:
name: cypress-screenshots name: cypress-screenshots
path: ./spiffworkflow-frontend/cypress/screenshots path: ./spiffworkflow-frontend/cypress/screenshots
# Test run video was always captured, so this action uses "always()" condition # Test run video was always captured, so this action uses "always()" condition
- name: upload_videos - name: upload_videos
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v3
if: failure() if: failure()
with: with:
name: cypress-videos name: cypress-videos

3
.gitignore vendored
View File

@ -1,3 +1,6 @@
pyrightconfig.json pyrightconfig.json
.idea/ .idea/
t t
*~
.dccache
*~

View File

@ -18,8 +18,7 @@ repos:
# --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth # --line-length because then we can avoid the fancy line wrapping in more instances and jason, kb, and elizabeth
# kind of prefer long lines rather than cutely-formatted sets of lines. # kind of prefer long lines rather than cutely-formatted sets of lines.
# TODO: enable when its safe to update the files # TODO: enable when its safe to update the files
# args: [--preview, --line-length, "110"] args: [--preview, --line-length, "119"]
args: [--preview]
- id: check-added-large-files - id: check-added-large-files
files: ^spiffworkflow-backend/ files: ^spiffworkflow-backend/

55
Jenkinsfile vendored
View File

@ -32,6 +32,11 @@ pipeline {
description: 'ID of Jenkins credential for Docker registry.', description: 'ID of Jenkins credential for Docker registry.',
defaultValue: params.DOCKER_CRED_ID ?: 'MISSING' defaultValue: params.DOCKER_CRED_ID ?: 'MISSING'
) )
string(
name: 'DISCORD_WEBHOOK_CRED',
description: 'Name of cretential with Discord webhook',
defaultValue: params.DISCORD_WEBHOOK_CRED ?: "",
)
booleanParam( booleanParam(
name: 'PUBLISH', name: 'PUBLISH',
description: 'Publish built Docker images.', description: 'Publish built Docker images.',
@ -61,6 +66,16 @@ pipeline {
image.push(env.DOCKER_TAG) image.push(env.DOCKER_TAG)
} }
} } } }
post {
success { script {
if (params.DISCORD_WEBHOOK_CRED) {
discordNotify(
header: 'SpiffWorkflow Docker image published!',
cred: params.DISCORD_WEBHOOK_CRED,
)
}
} }
}
} }
} // stages } // stages
post { post {
@ -68,3 +83,43 @@ pipeline {
cleanup { cleanWs() } cleanup { cleanWs() }
} // post } // post
} // pipeline } // pipeline
def discordNotify(Map args=[:]) {
def opts = [
header: args.header ?: 'Deployment successful!',
title: args.title ?: "${env.JOB_NAME}#${env.BUILD_NUMBER}",
cred: args.cred ?: null,
]
def repo = [
url: GIT_URL.minus('.git'),
branch: GIT_BRANCH.minus('origin/'),
commit: GIT_COMMIT.take(8),
prev: (
env.GIT_PREVIOUS_SUCCESSFUL_COMMIT ?: env.GIT_PREVIOUS_COMMIT ?: 'master'
).take(8),
]
wrap([$class: 'BuildUser']) {
BUILD_USER_ID = env.BUILD_USER_ID
}
withCredentials([
string(
credentialsId: opts.cred,
variable: 'DISCORD_WEBHOOK',
),
]) {
discordSend(
link: env.BUILD_URL,
result: currentBuild.currentResult,
webhookURL: env.DISCORD_WEBHOOK,
title: opts.title,
description: """
${opts.header}
Image: [`${params.DOCKER_NAME}:${params.DOCKER_TAG}`](https://hub.docker.com/r/${params.DOCKER_NAME}/tags?name=${params.DOCKER_TAG})
Branch: [`${repo.branch}`](${repo.url}/commits/${repo.branch})
Commit: [`${repo.commit}`](${repo.url}/commit/${repo.commit})
Diff: [`${repo.prev}...${repo.commit}`](${repo.url}/compare/${repo.prev}...${repo.commit})
By: [`${BUILD_USER_ID}`](${repo.url}/commits?author=${BUILD_USER_ID})
""",
)
}
}

View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# this intends to replicate the behavior of the pre-commit hook
poetry run reorder-python-imports --application-directories=spiffworkflow-backend/src $(find spiffworkflow-backend/src -name '*.py' -type f -not -path '*load_database_models.py' -not -path '*/migrations/*')

3247
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -13,71 +13,8 @@ classifiers = [
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.11,<3.12" python = ">=3.11,<3.12"
click = "^8.0.1"
flask = "2.2.2"
flask-admin = "*"
flask-bcrypt = "*"
flask-cors = "*"
flask-mail = "*"
flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0"
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
mysql-connector-python = "^8.0.29"
pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0"
psycopg2 = "^2.9.3"
typing-extensions = "^4.4.0"
connexion = {extras = [ "swagger-ui",], version = "^2"}
lxml = "^4.9.1"
marshmallow-enum = "^1.5.1"
marshmallow-sqlalchemy = "^0.28.0"
PyJWT = "^2.6.0"
gunicorn = "^20.1.0"
python-keycloak = "^2.5.0"
APScheduler = "^3.9.1"
Jinja2 = "^3.1.2"
RestrictedPython = "^6.0"
Flask-SQLAlchemy = "^3"
# type hinting stuff
# these need to be in the normal (non dev-dependencies) section
# because if not then poetry export won't have them and nox -s mypy --pythons 3.10
# will fail
types-Werkzeug = "^1.0.9"
types-PyYAML = "^6.0.12"
types-Flask = "^1.1.6"
types-requests = "^2.28.6"
types-pytz = "^2022.1.1"
# https://github.com/dropbox/sqlalchemy-stubs/pull/251
# someday get off github
# sqlalchemy-stubs = "^0.4"
# sqlalchemy-stubs = { git = "https://github.com/dropbox/sqlalchemy-stubs.git", rev = "master" }
# sqlalchemy-stubs = {develop = true, path = "/Users/kevin/projects/github/sqlalchemy-stubs"}
# for now use my fork
sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" }
simplejson = "^3.17.6"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
pytest = "^7.1.2"
coverage = {extras = ["toml"], version = "^6.1"}
safety = "^2.3.1"
mypy = ">=0.961"
typeguard = "^2.13.2"
xdoctest = {extras = ["colors"], version = "^1.0.1"}
sphinx = "^5.0.2"
sphinx-autobuild = ">=2021.3.14"
pre-commit = "^2.20.0" pre-commit = "^2.20.0"
flake8 = "^4.0.1" flake8 = "^4.0.1"
black = ">=21.10b0" black = ">=21.10b0"
@ -89,71 +26,9 @@ bandit = "1.7.2"
flake8-bugbear = "^22.10.25" flake8-bugbear = "^22.10.25"
flake8-docstrings = "^1.6.0" flake8-docstrings = "^1.6.0"
flake8-rst-docstrings = "^0.2.7" flake8-rst-docstrings = "^0.2.7"
# flask-sqlalchemy-stubs = "^0.2"
pep8-naming = "^0.13.2"
darglint = "^1.8.1"
reorder-python-imports = "^3.9.0" reorder-python-imports = "^3.9.0"
pre-commit-hooks = "^4.0.1" pre-commit-hooks = "^4.0.1"
sphinx-click = "^4.3.0"
Pygments = "^2.10.0"
pyupgrade = "^3.1.0" pyupgrade = "^3.1.0"
furo = ">=2021.11.12"
[tool.poetry.scripts]
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
tomli = "^2.0.1" tomli = "^2.0.1"
[tool.pytest.ini_options]
# ignore deprecation warnings from various packages that we don't control
filterwarnings = [
# note the use of single quote below to denote "raw" strings in TOML
# kombu/utils/compat.py:82
'ignore:SelectableGroups dict interface is deprecated. Use select.',
# flask_marshmallow/__init__.py:34
# marshmallow_sqlalchemy/convert.py:17
'ignore:distutils Version classes are deprecated. Use packaging.version instead.',
# connexion/spec.py:50
'ignore:Passing a schema to Validator.iter_errors is deprecated and will be removed in a future release',
# connexion/decorators/validation.py:16
'ignore:Accessing jsonschema.draft4_format_checker is deprecated and will be removed in a future release.',
# connexion/apis/flask_api.py:236
"ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3",
"ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3",
"ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3",
"ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3"
]
[tool.coverage.paths]
source = ["src", "*/site-packages"]
tests = ["tests", "*/tests"]
[tool.coverage.run]
branch = true
source = ["spiffworkflow_backend", "tests"]
[tool.coverage.report]
show_missing = true
fail_under = 50
[tool.mypy]
strict = true
disallow_any_generics = false
warn_unreachable = true
pretty = true
show_column_numbers = true
show_error_codes = true
show_error_context = true
plugins = "sqlmypy"
# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option
implicit_reexport = true
# allow for subdirs to NOT require __init__.py
namespace_packages = true
explicit_package_bases = false
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -21,22 +21,14 @@ def main(process_instance_id: str) -> None:
os.environ[flask_env_key] = "whatevs" os.environ[flask_env_key] = "whatevs"
app = create_app() app = create_app()
with app.app_context(): with app.app_context():
process_instance = ProcessInstanceModel.query.filter_by( process_instance = ProcessInstanceModel.query.filter_by(id=process_instance_id).first()
id=process_instance_id
).first()
file_path = f"/var/tmp/{process_instance_id}_bpmn_json.json" file_path = f"/var/tmp/{process_instance_id}_bpmn_json.json"
if not process_instance: if not process_instance:
raise Exception( raise Exception(f"Could not find a process instance with id: {process_instance_id}")
f"Could not find a process instance with id: {process_instance_id}"
)
with open(file_path, "w", encoding="utf-8") as f: with open(file_path, "w", encoding="utf-8") as f:
f.write( f.write(json.dumps(ProcessInstanceProcessor._get_full_bpmn_json(process_instance)))
json.dumps(
ProcessInstanceProcessor._get_full_bpmn_json(process_instance)
)
)
print(f"Saved to {file_path}") print(f"Saved to {file_path}")

View File

@ -28,8 +28,7 @@ def main():
with app.app_context(): with app.app_context():
process_model_identifier_ticket = "ticket" process_model_identifier_ticket = "ticket"
db.session.query(ProcessInstanceModel).filter( db.session.query(ProcessInstanceModel).filter(
ProcessInstanceModel.process_model_identifier ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket
== process_model_identifier_ticket
).delete() ).delete()
db.session.commit() db.session.commit()
@ -60,9 +59,7 @@ def main():
header = next(reader) header = next(reader)
for column_name in columns_to_data_key_mappings: for column_name in columns_to_data_key_mappings:
columns_to_header_index_mappings[column_name] = header.index( columns_to_header_index_mappings[column_name] = header.index(column_name)
column_name
)
id_index = header.index("ID") id_index = header.index("ID")
priority_index = header.index("Priority") priority_index = header.index("Priority")
print(f"header: {header}") print(f"header: {header}")
@ -87,9 +84,7 @@ def main():
desired_data_key, desired_data_key,
) in columns_to_data_key_mappings.items(): ) in columns_to_data_key_mappings.items():
appropriate_index = columns_to_header_index_mappings[column_name] appropriate_index = columns_to_header_index_mappings[column_name]
processor.bpmn_process_instance.data[desired_data_key] = row[ processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index]
appropriate_index
]
print(f"datas: {processor.bpmn_process_instance.data}") print(f"datas: {processor.bpmn_process_instance.data}")
if processor.bpmn_process_instance.data["month"] == "": if processor.bpmn_process_instance.data["month"] == "":

View File

@ -84,9 +84,7 @@ def main():
) in columns_to_data_key_mappings.items(): ) in columns_to_data_key_mappings.items():
appropriate_index = columns_to_header_index_mappings[column_name] appropriate_index = columns_to_header_index_mappings[column_name]
print(f"appropriate_index: {appropriate_index}") print(f"appropriate_index: {appropriate_index}")
processor.bpmn_process_instance.data[desired_data_key] = row[ processor.bpmn_process_instance.data[desired_data_key] = row[appropriate_index]
appropriate_index
]
# you at least need a month, or else this row in the csv is considered garbage # you at least need a month, or else this row in the csv is considered garbage
month_value = processor.bpmn_process_instance.data["month"] month_value = processor.bpmn_process_instance.data["month"]

View File

@ -28,7 +28,7 @@ REALM_NAME=${2-spiffworkflow}
while read -r input_line; do while read -r input_line; do
if ! grep -qE '(^#|email)' <<<"$input_line" ; then if ! grep -qE '(^#|email)' <<<"$input_line" ; then
username=$(awk -F '@' '{print $1}' <<<"$input_line") username=$(awk -F '@' '{print $1}' <<<"$input_line")
access_token=$("${script_dir}/get_token" "$username" "$username" "$REALM_NAME") access_token=$("${script_dir}/get_token" "$username" "$username" "$REALM_NAME" || echo '')
if [[ -z "$access_token" || "$access_token" == "null" ]]; then if [[ -z "$access_token" || "$access_token" == "null" ]]; then
>&2 echo "ERROR: failed to get access token for '$username'" >&2 echo "ERROR: failed to get access token for '$username'"
else else

View File

@ -44,6 +44,17 @@ if [[ "${1:-}" == "clean" ]]; then
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up. # TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
# start postgres in background with one db # start postgres in background with one db
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
container_name="postgres-spiff"
container_regex="^postgres-spiff$"
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then
echo ":: Found postgres container - $container_name"
if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then
echo ":: Stopping running container - $container_name"
docker stop $container_name
fi
echo ":: Removing stopped container - $container_name"
docker rm $container_name
fi
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "select 1"; then if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "select 1"; then
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_unit_testing -d postgres
sleep 4 # classy sleep 4 # classy

View File

@ -0,0 +1,70 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
if [[ -z "${KEYCLOAK_BASE_URL:-}" ]]; then
export KEYCLOAK_BASE_URL=https://keycloak.dev.spiffworkflow.org
fi
if [[ -z "${BACKEND_BASE_URL:-}" ]]; then
export BACKEND_BASE_URL=https://api.dev.spiffworkflow.org
fi
process_model_identifier="${1:-}"
username="${2:-admin}"
password="${3:-admin}"
realm_name="${4:-spiffworkflow}"
if [[ -z "${1:-}" ]]; then
>&2 echo "usage: $(basename "$0") [process_model_identifier] [username: OPTONAL] [password: OPTONAL] [realm_name: OPTONAL]"
exit 1
fi
modified_process_model_identifier=$(tr '/' ':' <<<"$process_model_identifier")
function check_result_for_error() {
local result="$1"
error_code=$(jq '.error_code' <<<"$result")
if [[ -n "$error_code" && "$error_code" != "null" ]]; then
>&2 echo "ERROR: Failed to run process instance. Received error: $result"
exit 1
fi
}
function process_next_task() {
local next_task="$1"
if [[ -n "$next_task" && "$next_task" != "null" ]]; then
task_type=$(jq -r '.type' <<<"$next_task")
task_state=$(jq -r '.state' <<<"$next_task")
task_guid=$(jq -r '.id' <<<$"$next_task")
if grep -qE "Manual ?Task" <<<"$task_type" && [[ "${task_state}" == "READY" ]]; then
next_task=$(curl --silent -X PUT "${BACKEND_BASE_URL}/v1.0/tasks/${process_instance_id}/${task_guid}" -H "Authorization: Bearer $access_token")
check_result_for_error "$next_task"
process_next_task "$next_task"
elif [[ "$(jq '.ok' <<<"$next_task")" == "null" ]]; then
echo -e "\n\nThe next task is not a Manual Task and requires user input. It must be completed manually."
echo "$next_task"
fi
fi
}
access_token=$("${script_dir}/get_token" "$username" "$password" "$realm_name")
curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/login_with_access_token?access_token=${access_token}" -H "Authorization: Bearer $access_token" >/dev/null
result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}" -H "Authorization: Bearer $access_token")
process_instance_id=$(jq -r '.id' <<<"$result")
if ! grep -qE '^[0-9]+$' <<<"$process_instance_id"; then
>&2 echo "ERROR: Did not receive valid process instance id when instantiating process model. result was ${result}"
exit 1
fi
result=$(curl --silent -X POST "${BACKEND_BASE_URL}/v1.0/process-instances/${modified_process_model_identifier}/${process_instance_id}/run" -H "Authorization: Bearer $access_token")
check_result_for_error "$result"
next_task=$(jq '.next_task' <<<"$result")
process_next_task "$next_task"

View File

@ -13,8 +13,7 @@ def main() -> None:
for bpmn_errors in failing_process_models: for bpmn_errors in failing_process_models:
print(bpmn_errors) print(bpmn_errors)
if ( if (
os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") os.environ.get("SPIFFWORKFLOW_BACKEND_FAIL_ON_INVALID_PROCESS_MODELS") != "false"
!= "false"
and len(failing_process_models) > 0 and len(failing_process_models) > 0
): ):
exit(1) exit(1)

View File

@ -19,8 +19,6 @@ from spiffworkflow_backend.services.process_instance_service import (
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
# from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
# We need to call this before importing spiffworkflow_backend # We need to call this before importing spiffworkflow_backend
# otherwise typeguard cannot work. hence the noqa: E402 # otherwise typeguard cannot work. hence the noqa: E402
@ -47,7 +45,8 @@ def app() -> Flask:
def with_db_and_bpmn_file_cleanup() -> None: def with_db_and_bpmn_file_cleanup() -> None:
"""Do it cleanly!""" """Do it cleanly!"""
meta = db.metadata meta = db.metadata
db.session.execute(db.update(BpmnProcessModel, values={"parent_process_id": None})) db.session.execute(db.update(BpmnProcessModel).values(top_level_process_id=None))
db.session.execute(db.update(BpmnProcessModel).values(direct_parent_process_id=None))
for table in reversed(meta.sorted_tables): for table in reversed(meta.sorted_tables):
db.session.execute(table.delete()) db.session.execute(table.delete())

View File

@ -54,9 +54,10 @@ backend_token=$(jq -r '.access_token' <<< "$result")
function add_user() { function add_user() {
local user_email=$1 local user_email=$1
local username=$2 local username=$2
local user_attribute_one=$3 local pass=$3
local user_attribute_one=$4
local credentials='{"type":"password","value":"'"${username}"'","temporary":false}' local credentials='{"type":"password","value":"'"${pass}"'","temporary":false}'
local data='{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']' local data='{"email":"'"${user_email}"'", "enabled":"true", "username":"'"${username}"'", "credentials":['"${credentials}"']'
if [[ -n "$user_attribute_one" ]]; then if [[ -n "$user_attribute_one" ]]; then
@ -79,18 +80,31 @@ while read -r input_line; do
if ! grep -qE '^#' <<<"$input_line" ; then if ! grep -qE '^#' <<<"$input_line" ; then
if [[ "$first_line_processed" == "false" ]]; then if [[ "$first_line_processed" == "false" ]]; then
email_header=$(awk -F ',' '{print $1}' <<<"$input_line") email_header=$(awk -F ',' '{print $1}' <<<"$input_line")
pass_header=$(awk -F ',' '{print $2}' <<<"$input_line")
if [[ "$email_header" != "email" ]]; then if [[ "$email_header" != "email" ]]; then
>&2 echo "ERROR: the first column in the first row must be email." >&2 echo "ERROR: the first column in the first row must be email."
exit 1 exit 1
fi fi
custom_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line") if [[ "$pass_header" != "pass" ]]; then
>&2 echo "ERROR: the first column in the first row must be pass."
exit 1
fi
custom_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line")
first_line_processed="true" first_line_processed="true"
elif [[ -n "$input_line" ]]; then elif [[ -n "$input_line" ]]; then
echo "Importing: $input_line" echo "Importing: $input_line"
user_email=$(awk -F ',' '{print $1}' <<<"$input_line") user_email=$(awk -F ',' '{print $1}' <<<"$input_line")
username=$(awk -F '@' '{print $1}' <<<"$user_email") username=$(awk -F '@' '{print $1}' <<<"$user_email")
user_attribute_one=$(awk -F ',' '{print $2}' <<<"$input_line")
http_code=$(add_user "$user_email" "$username" "$user_attribute_one") if [[ "$username" == "$ADMIN_USERNAME" || "$user_email" == "$ADMIN_USERNAME" ]]; then
>&2 echo "ERROR: The user used as the admin user matches a user in the current import list. This should not happen. Comment out that user from the list or use a different admin user: ${ADMIN_USERNAME}"
exit 1
fi
password=$(awk -F ',' '{print $2}' <<<"$input_line")
echo "Password: $password"
user_attribute_one=$(awk -F ',' '{print $3}' <<<"$input_line")
http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one")
if [[ "$http_code" == "409" ]]; then if [[ "$http_code" == "409" ]]; then
user_info=$(curl --fail --silent --location --request GET "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users?username=${username}&exact=true" \ user_info=$(curl --fail --silent --location --request GET "${KEYCLOAK_BASE_URL}/admin/realms/${keycloak_realm}/users?username=${username}&exact=true" \
@ -106,7 +120,7 @@ while read -r input_line; do
-H 'Content-Type: application/json' \ -H 'Content-Type: application/json' \
-H "Authorization: Bearer $backend_token" -H "Authorization: Bearer $backend_token"
http_code=$(add_user "$user_email" "$username" "$user_attribute_one") http_code=$(add_user "$user_email" "$username" "$password" "$user_attribute_one")
fi fi
if [[ "$http_code" != "201" ]]; then if [[ "$http_code" != "201" ]]; then
>&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}" >&2 echo "ERROR: Failed to create user: ${user_email} with http_code: ${http_code}"

View File

@ -26,9 +26,10 @@ fi
# https://stackoverflow.com/a/60579344/6090676 # https://stackoverflow.com/a/60579344/6090676
container_name="keycloak" container_name="keycloak"
if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then container_regex="^keycloak$"
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then
echo ":: Found container - $container_name" echo ":: Found container - $container_name"
if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then if [[ -n "$(docker ps -q -f name=$container_regex)" ]]; then
echo ":: Stopping running container - $container_name" echo ":: Stopping running container - $container_name"
docker stop $container_name docker stop $container_name
fi fi

View File

@ -396,7 +396,7 @@
"otpPolicyLookAheadWindow" : 1, "otpPolicyLookAheadWindow" : 1,
"otpPolicyPeriod" : 30, "otpPolicyPeriod" : 30,
"otpPolicyCodeReusable" : false, "otpPolicyCodeReusable" : false,
"otpSupportedApplications" : [ "totpAppGoogleName", "totpAppFreeOTPName" ], "otpSupportedApplications" : [ "totpAppFreeOTPName", "totpAppGoogleName" ],
"webAuthnPolicyRpEntityName" : "keycloak", "webAuthnPolicyRpEntityName" : "keycloak",
"webAuthnPolicySignatureAlgorithms" : [ "ES256" ], "webAuthnPolicySignatureAlgorithms" : [ "ES256" ],
"webAuthnPolicyRpId" : "", "webAuthnPolicyRpId" : "",
@ -807,190 +807,6 @@
"realmRoles" : [ "default-roles-spiffworkflow" ], "realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, {
"id" : "3730e6ec-4b0c-4fbe-a34b-2cd43d8c9854",
"createdTimestamp" : 1678461819329,
"username" : "core10.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core10.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "225" ]
},
"credentials" : [ {
"id" : "223cbe3b-d432-4707-b826-6220caa14bd7",
"type" : "password",
"createdDate" : 1678461819366,
"secretData" : "{\"value\":\"Mp81SeHhDQa2U/i/S2CfPnKvjwRDJCKZMgCQX3BkZWE/a6791XjXmwB8DE5qS8tiST68BQoQRuc1VCiNKL3zaQ==\",\"salt\":\"Jb0BB2tIQ+HUJQIFr82g9w==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "88e7ca9e-1825-4d4a-9f60-29368023c67b",
"createdTimestamp" : 1678461819411,
"username" : "core11.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core11.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "226" ]
},
"credentials" : [ {
"id" : "46dc7656-b70b-4d86-80fc-aa08d807be2b",
"type" : "password",
"createdDate" : 1678461819447,
"secretData" : "{\"value\":\"hgBEI05fhPMVx47O9KmnrTvPomKJXK0IjEHZ30zM3fu6maT2fOHGh4+ti6MVhKqQeXKZR4wtC3i1RoqLNOsjpQ==\",\"salt\":\"BWxZnmTfzggGqzVKkFY+vQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "6504eeda-be24-488b-ace4-1d50a7a354bc",
"createdTimestamp" : 1678461819494,
"username" : "core12.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core12.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "227" ]
},
"credentials" : [ {
"id" : "bde05120-10b5-4796-b559-9238847d2604",
"type" : "password",
"createdDate" : 1678461819527,
"secretData" : "{\"value\":\"njdHu9w1jeSvaNbdwVf0X+3TZaHmZVwUc+/TOAtv05eNGBIW9Vt1+500AsLReHS8lb/I3fglr5I9ZskYHUc0fA==\",\"salt\":\"lH6xJHf1jQGX1j4bYH6GXA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "ed249cd3-c66e-46e0-9184-1e6468b57afa",
"createdTimestamp" : 1678461819557,
"username" : "core13.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core13.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "228" ]
},
"credentials" : [ {
"id" : "81b65ee8-6fcd-4cd6-8886-aa44feefa55f",
"type" : "password",
"createdDate" : 1678461819592,
"secretData" : "{\"value\":\"ywBsPI0pdoCOjNWinYNZQBBzL3NRp2u2jv3aXBGxneTo9v8XaVweGL52HIyTikdfmX46TEMIH6LQopaYFcwhng==\",\"salt\":\"GTw17rcE4UvB/Dx4UUkAog==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "1b7b3aa4-b0fe-46c7-a9a1-3fb3c99c7576",
"createdTimestamp" : 1678461819624,
"username" : "core14.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core14.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "229" ]
},
"credentials" : [ {
"id" : "0c24ffe5-cb97-4b0d-a0d1-920de540742e",
"type" : "password",
"createdDate" : 1678461819658,
"secretData" : "{\"value\":\"3RXjoEUpqxH6RM0sZUf393H9nzyVADId8IWNru9fWgdQg6tHaZezRBZ/lRRERvvdmLiupQ3cMsL/HHvPRQA6tA==\",\"salt\":\"zkaBJY+Dvg5Az74MACBBUg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "8e2b39a8-a744-4345-928f-da1a36f15f46",
"createdTimestamp" : 1678461819686,
"username" : "core15.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core15.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "230" ]
},
"credentials" : [ {
"id" : "14a91e80-cec9-44cf-aa85-28e0043f660d",
"type" : "password",
"createdDate" : 1678461819720,
"secretData" : "{\"value\":\"JnP9MpLDM92LuzJnEVUy0vzm9LoSttezepYu4ANfJlmcS6cUvnnh1yDKm43I2YzM4+mXRdxJyoLZTk/ZpmshSQ==\",\"salt\":\"5CKz6mrqr4IaUeEuu/hR9Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "ffe3e131-9479-49d2-8125-83dc86a16478",
"createdTimestamp" : 1678461819751,
"username" : "core16.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core16.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "231" ]
},
"credentials" : [ {
"id" : "cf010c6c-035e-4a2f-ab74-5617fd23c808",
"type" : "password",
"createdDate" : 1678461819786,
"secretData" : "{\"value\":\"WeZ+YxLVtjRhlLZnb6j3AfecmQEsvTm3iM8ZqQthgq9c4BuZ23qare3PEVlRCA1+Oj5sAOOS1hs9iab6ia49wQ==\",\"salt\":\"uai22Okju4dg7GfO7p3C1Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "94bcef08-2af1-4805-864d-cbabcd851d67",
"createdTimestamp" : 1678461819815,
"username" : "core17.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core17.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "232" ]
},
"credentials" : [ {
"id" : "c7a58ff0-7c56-464b-9009-b6e845075087",
"type" : "password",
"createdDate" : 1678461819850,
"secretData" : "{\"value\":\"R53+DKM2eyUXDYJDjW9BtwdY+x0/CUhgUDDYjip7BvGAepzRqPvZVbCLqJjFf6YctO4Va7F65n4evd40GbO7fQ==\",\"salt\":\"U/ia7H+I4yeD3bpP1vnH6Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, { }, {
"id" : "3b81b45e-759b-4d7a-aa90-adf7b447208c", "id" : "3b81b45e-759b-4d7a-aa90-adf7b447208c",
"createdTimestamp" : 1676302140358, "createdTimestamp" : 1676302140358,
@ -1084,8 +900,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "5119e7f6-9b0f-4e04-824a-9c5ef87fdb42", "id" : "8c6cf190-66e3-4c8d-aa06-1b9972ecd982",
"createdTimestamp" : 1678126023934, "createdTimestamp" : 1680538438437,
"username" : "core6.contributor", "username" : "core6.contributor",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -1095,79 +911,10 @@
"spiffworkflow-employeeid" : [ "199" ] "spiffworkflow-employeeid" : [ "199" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "f219e401-0fdb-4b73-be77-d01bb0caa448", "id" : "1dadc9a8-6f7d-4795-bcc7-2b9d8aacb54a",
"type" : "password", "type" : "password",
"createdDate" : 1678126023967, "createdDate" : 1680538438553,
"secretData" : "{\"value\":\"zdr8Psnlti56oHo8f/wuuZb5p7ZRpDQKHGFsrkjtl0VaOn2uNOeUmCqXLQ4UGyGssK8Qn8s8R62yrFKUNeeSjA==\",\"salt\":\"9MlVZL9xo3OWvlsvyXt0UQ==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"YbDgbKbiIjHB76RAJN7Q1AWYkdNvDMHUC1P3RJ6AV8ASEUr6fJ8U11WroIMmkiWs1TlewJi0mF4rWBsVkLzjlg==\",\"salt\":\"BbrA/rjtvxwrZAsS3BYARA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "89d57569-1a90-412a-ba01-aa8ff19ed171",
"createdTimestamp" : 1678461819085,
"username" : "core7.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core7.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "222" ]
},
"credentials" : [ {
"id" : "cfeb64ec-a38a-4f95-b0cd-28b5501524d8",
"type" : "password",
"createdDate" : 1678461819121,
"secretData" : "{\"value\":\"w4WKqWXTlin6MPQi0mO+Bvktb2zuMdIylqNNxYgBCnd5vwzq2widp7G9f3wz8Iy0wY8K2rqBjdSmmbZ7fJ8//Q==\",\"salt\":\"SRuRkx3572cDGoWhqAQGLQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "81efd609-b6ae-42ec-800e-d6fcca2f8282",
"createdTimestamp" : 1678461819150,
"username" : "core8.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core8.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "223" ]
},
"credentials" : [ {
"id" : "0b476f6f-7aa4-4f75-bf5c-ac47521f3900",
"type" : "password",
"createdDate" : 1678461819185,
"secretData" : "{\"value\":\"ALWI40OEZUhMJ1CQTV9wSrwQUWfYNiYbN2JTmCUfbLUcUbY+rTrKOfAn9Mc/bCEFJomiTb9u/eqnkKX/lCGgew==\",\"salt\":\"wW2T8PkpCnnPfMNwpPVUVQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "a1233c9f-e59a-48dc-aaa7-1513f1aa5654",
"createdTimestamp" : 1678461819225,
"username" : "core9.contributor",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "core9.contributor@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "224" ]
},
"credentials" : [ {
"id" : "907b9d46-b8a3-4a14-ab89-b07d2c4d431a",
"type" : "password",
"createdDate" : 1678461819266,
"secretData" : "{\"value\":\"v9aFLHzLyiwWuAxNeVtRjtXzRtug6KU2f19SbS8dBdPC0mlHORoLYXy6VoAMdcTv8bfrW6e9iCgqWnXdXU6yMg==\",\"salt\":\"giVxblJWbFNNPiZZKxWYxg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -1615,8 +1362,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "4f3fadc8-f0a3-45fb-8710-c054385b866b", "id" : "1a8cb2a3-09ec-4f24-9f5e-13bab170c4a9",
"createdTimestamp" : 1676302141941, "createdTimestamp" : 1680210955180,
"username" : "infra.project-lead", "username" : "infra.project-lead",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -1626,10 +1373,10 @@
"spiffworkflow-employeeid" : [ "130" ] "spiffworkflow-employeeid" : [ "130" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "e422f671-1693-4469-8cdc-0ea7dcb27c66", "id" : "1283acee-35b4-40cd-a1cb-9dd3c41dfd3c",
"type" : "password", "type" : "password",
"createdDate" : 1676302141975, "createdDate" : 1680210955239,
"secretData" : "{\"value\":\"gWFNRdQhmsN2IMyaZEHgTk8A0mna72VYfeWk7PX31MhBQjQIGsctuEKK3TNxiB046LM8ZiUntA59sTPBgouVeQ==\",\"salt\":\"AtU0bmAz1z4f7wh/Z/ru1Q==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"7wW+4snc/57IFEyCApWM7jwxJSLAlndSy/F3rSE0KOv/StS4HOByov02uDuTQ3h4CbW+zVp4+EqPFJiNWgf5WA==\",\"salt\":\"/BYeWVg0iy8Ou/YroWoeSw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -1776,8 +1523,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "9a4d176c-e61e-4392-8c50-a04988606aa6", "id" : "ec8a613d-de94-4696-910d-635ab0d90fc1",
"createdTimestamp" : 1678461818383, "createdTimestamp" : 1680538439046,
"username" : "infra6.sme", "username" : "infra6.sme",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -1787,10 +1534,10 @@
"spiffworkflow-employeeid" : [ "212" ] "spiffworkflow-employeeid" : [ "212" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "c381e58c-3e06-4e10-bd23-46f258c1c91f", "id" : "59e02828-28cb-4555-9497-0b9f674ecd43",
"type" : "password", "type" : "password",
"createdDate" : 1678461818420, "createdDate" : 1680538439110,
"secretData" : "{\"value\":\"m17+awcU3Ezhfi/gBK0xyxvnGKHads95lhn7uxvEXaPCJF0ioN8C27tH1RwU1w9ptdWjWKWAM9dcimIegy7M7g==\",\"salt\":\"0kCljoos7qzCnVdv+3IMjQ==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"DFa3Yz3ZRdFGmAFqiq6Sg+s673FFnjVGOzS/e4SnDAdv1JzavYka2QngSHDvZfi5bO7ecDE0+idwJP/vtcMjyQ==\",\"salt\":\"iSHEw6brz62W6RqGULCyug==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -1838,6 +1585,29 @@
"realmRoles" : [ "default-roles-spiffworkflow" ], "realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, {
"id" : "992c7cfb-377f-4d80-b399-edf218ad640e",
"createdTimestamp" : 1679595782179,
"username" : "jamescheung",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "jamescheung@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "234" ]
},
"credentials" : [ {
"id" : "3e62811d-d294-4c2b-a681-3a93ea0f8bc2",
"type" : "password",
"createdDate" : 1679595782238,
"secretData" : "{\"value\":\"oFDel18kGBSpCvfrni1SSY2Ti3eJmYxCuwcar5PoBHECXISIbuz0t5i97COiXCI52vxSkorwl3c8r2j+77B2kw==\",\"salt\":\"tVvRYyNH4ktBXNjmfP6JtQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, { }, {
"id" : "2df44301-506a-4053-9ece-830d2b3c295b", "id" : "2df44301-506a-4053-9ece-830d2b3c295b",
"createdTimestamp" : 1676302142640, "createdTimestamp" : 1676302142640,
@ -1985,8 +1755,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "6e9129f9-34f8-43bb-953b-de4156d425ba", "id" : "7596232c-47bd-40db-bc0d-fbe984ebb22a",
"createdTimestamp" : 1676302142894, "createdTimestamp" : 1680210955394,
"username" : "legal.project-lead", "username" : "legal.project-lead",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -1996,10 +1766,10 @@
"spiffworkflow-employeeid" : [ "133" ] "spiffworkflow-employeeid" : [ "133" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "b17d488c-7665-40d4-b758-c392ecc9e793", "id" : "e379cc51-564f-4950-92dd-7fa18cff5d3b",
"type" : "password", "type" : "password",
"createdDate" : 1676302142929, "createdDate" : 1680210955428,
"secretData" : "{\"value\":\"FiEmNY1c+4xOepA3lzOzzaaNgthk9rMz1xXiV+5F2DUwBtoEqFRrlGTdHVVz5XjrcFhgW15+R3rSEfHsCLJTiA==\",\"salt\":\"xYYuuodywbhxqXcj3XMqKw==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"k+No1LvsqQmYTOQzuXN9oeVKne+FTCNAe4lZ4qVZq2M4pSRqKeySJWdtLYjxzHRfLufVpir6gXRCvs7ZiUL9GQ==\",\"salt\":\"XQ469z9b2a8Jw1IeZc9NaQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -2146,8 +1916,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "a368625b-b905-4e0d-83f6-dfe707b6320a", "id" : "a8f54828-b188-41e6-80a6-920cab95f7db",
"createdTimestamp" : 1678461818455, "createdTimestamp" : 1680538439162,
"username" : "legal6.sme", "username" : "legal6.sme",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -2157,56 +1927,10 @@
"spiffworkflow-employeeid" : [ "213" ] "spiffworkflow-employeeid" : [ "213" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "53a21d32-1da5-45f1-a7d9-e45304b213d1", "id" : "8e70e379-7974-40b6-ba31-08a1632a1a08",
"type" : "password", "type" : "password",
"createdDate" : 1678461818490, "createdDate" : 1680538439219,
"secretData" : "{\"value\":\"9zEoc1uV0QXsMvAS8lA1xdh4bOqcPdSAItg7zBFr5i+In/xOBtpRM0277nMgDNLtar4s+HRhytWgJ7OidVmjsw==\",\"salt\":\"ahEvQYvH0bHbT/uHz1I9QA==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"Mwqt3FKuQ1q+OUpb8dIOOGwTKNmVuOCBnnJhSzFHUSa/9nrfWuL2GXCspHwPnMP4fF1eEXAg5B8SBC8cL/paEQ==\",\"salt\":\"o5Sj16r/DznxOzGJi6xJJg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "e02e085f-eb50-4fe3-844c-24e41479ab47",
"createdTimestamp" : 1678461818523,
"username" : "legal7.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal7.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "214" ]
},
"credentials" : [ {
"id" : "f5377236-8b0b-4be4-8dab-afb2c4a6470f",
"type" : "password",
"createdDate" : 1678461818557,
"secretData" : "{\"value\":\"dyQhBsrNeYHkbJudEjiay3duLFO9B66l0d+2L26S+/HMGuKfuI4NT+gju1MfQPVJhyC01FH7EmDGGS8I45i2jw==\",\"salt\":\"kU4NM5QOWvGSX+kVyvwSoA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "4de624bd-485f-49d5-817c-ba66c31be7a9",
"createdTimestamp" : 1678461818589,
"username" : "legal8.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal8.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "215" ]
},
"credentials" : [ {
"id" : "5d71a02b-2f4b-484d-9125-a4454a17a800",
"type" : "password",
"createdDate" : 1678461818632,
"secretData" : "{\"value\":\"UH+hrjz9F+X0vQlbgzaFiZBA5uol9Lnjs1/5VpBnbWuISF6MAlxj2fmbnZbw4ILVSllaQvVSFaD4YUxbnRhUmw==\",\"salt\":\"MuAF2Rl7IOxOgZ7Xbqs3RQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -2283,69 +2007,6 @@
"realmRoles" : [ "default-roles-spiffworkflow" ], "realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, {
"id" : "058b60f8-799e-48b0-a2b7-2e65e7a35724",
"createdTimestamp" : 1675718484672,
"username" : "mike",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "mike@sartography.com",
"credentials" : [ {
"id" : "669f5421-843d-411d-9f24-1be41e545e52",
"type" : "password",
"createdDate" : 1675718484715,
"secretData" : "{\"value\":\"YILRiRdrsy8CA716ZQazpQOf7mpiXGaYnR26ra3pSjmHkZS9tsePTRwU2OIGPwbN1LKJcIzrpfEP7cVW2Lm17w==\",\"salt\":\"7mfD1X7Hns/5pPgHb9uZ1Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "97843876-e1b6-469a-bab4-f9bce4aa5936",
"createdTimestamp" : 1678461819014,
"username" : "mobile.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "mobile.project-lead@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "221" ]
},
"credentials" : [ {
"id" : "96c00769-4348-4ad3-82c5-f34124602c17",
"type" : "password",
"createdDate" : 1678461819049,
"secretData" : "{\"value\":\"E7nVydRqQ+TZs54VmJcT4AjjtT1la7PmQbOnylqTPkkcOdLRmZbNTw/K429lOhqUHX7y1prC3OjGdY1VI8bjsg==\",\"salt\":\"D61yv2zS3Bi8epVKjRpWQw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "9d23748e-23a7-4c48-956c-64da75871277",
"createdTimestamp" : 1675718484779,
"username" : "natalia",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "natalia@sartography.com",
"credentials" : [ {
"id" : "476024e5-62e4-48b6-afbb-cc2834fae4c7",
"type" : "password",
"createdDate" : 1675718484823,
"secretData" : "{\"value\":\"FfrpgES+XI2w4NRe1aBmolPFcERbEUDXZcFtUWucrbhBspQLYNaN2VLmeDRV0VcT47Bn8dqjU11ct64WDtffWA==\",\"salt\":\"7rZd3fqY54i1eoNyXCcZ1w==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, { }, {
"id" : "7f34beba-e1e1-458a-8d23-eb07d6e3800c", "id" : "7f34beba-e1e1-458a-8d23-eb07d6e3800c",
"createdTimestamp" : 1678126023154, "createdTimestamp" : 1678126023154,
@ -2369,29 +2030,6 @@
"realmRoles" : [ "default-roles-spiffworkflow" ], "realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, {
"id" : "e8e67210-5088-46bc-97db-09dbcaf9de97",
"createdTimestamp" : 1678461818939,
"username" : "nomos.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "nomos.project-lead@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "220" ]
},
"credentials" : [ {
"id" : "8139f9b8-bad9-41d2-b3c6-589a2c11bf45",
"type" : "password",
"createdDate" : 1678461818975,
"secretData" : "{\"value\":\"6g5XIaFghMzx8CFYO6VJLGpUqBRiAEwFklZSI+uzJ5vrMsDvrcGjDuWtY+lmRO4lKqy30lBvqhMFvPT6pCxF3g==\",\"salt\":\"dT+XvwD+hxUwRAJCZFFYiA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, { }, {
"id" : "df72b3d2-07fd-4cb0-a447-a1c433db49d5", "id" : "df72b3d2-07fd-4cb0-a447-a1c433db49d5",
"createdTimestamp" : 1676302143785, "createdTimestamp" : 1676302143785,
@ -2577,8 +2215,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "07f7a010-7542-4c2f-adf8-04b39433181d", "id" : "b5bd1dc1-308d-4912-b3e4-92bf5fc45ed5",
"createdTimestamp" : 1678461818663, "createdTimestamp" : 1680538439258,
"username" : "peopleops.partner6.sme", "username" : "peopleops.partner6.sme",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -2588,10 +2226,10 @@
"spiffworkflow-employeeid" : [ "216" ] "spiffworkflow-employeeid" : [ "216" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "867e9236-3a15-4198-b085-d36a7fa859e9", "id" : "c719418c-b203-4056-9e19-43c5e87d1d43",
"type" : "password", "type" : "password",
"createdDate" : 1678461818713, "createdDate" : 1680538439300,
"secretData" : "{\"value\":\"kmQkAD459XkLCGaWWTr1rrwZYQ2gQ4k2xTroJZAyHmWvBBnKg+a74cRaW2Y3dnzcGTlcprtuMvwYVfq7HIOkmg==\",\"salt\":\"uKORqhpJJnceOf/q56BiSA==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"pzmtPn2OllnAYKIIS2M38n0UFrtbkX5zN44DpI/PrzmnxRgT2TvlJmjCtxp5HRUi3lngT6Jdr3IvqpO5o93Y5g==\",\"salt\":\"1WKPI8ktFMZoLCAv2ir5+A==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -2600,77 +2238,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "5d41b5b7-bc3c-42fe-b20b-56a7c6cd3801", "id" : "b57086d7-f301-4e11-ab02-60b02c79163a",
"createdTimestamp" : 1678461818743, "createdTimestamp" : 1680210955550,
"username" : "peopleops.partner7.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "peopleops.partner7.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "217" ]
},
"credentials" : [ {
"id" : "745d419f-c6de-4504-9c8e-c3f7b1ac747e",
"type" : "password",
"createdDate" : 1678461818778,
"secretData" : "{\"value\":\"myjshlqPW/3DpwC5X4vsAaqcsisdKwqr+CQXP18mt3AQMzqipHJaVAEAJzkZS4j42VB/XAvh0olMxb8Vapyw3g==\",\"salt\":\"jNpX6DyT5Tt/5dPXYiQfpQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "73523c93-6104-4494-b1c8-2af6087bcdd9",
"createdTimestamp" : 1678461818810,
"username" : "peopleops.partner8.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "peopleops.partner8.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "218" ]
},
"credentials" : [ {
"id" : "e839763b-aba2-4b4f-b715-b2c061b7430f",
"type" : "password",
"createdDate" : 1678461818843,
"secretData" : "{\"value\":\"M0KfNRU/4qt1WL/cGiSm6sKfN9PTK+6JiV96Y55Zg5CYaXH0ihTyGo62wS4T4YuyMm6/yTKz7+w3gdU4Zg/3Uw==\",\"salt\":\"sd/JEXtWTW4PetXzEBCNQA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "cdff7ae3-72eb-45b6-9424-6f56df9c3b1c",
"createdTimestamp" : 1678461818873,
"username" : "peopleops.partner9.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "peopleops.partner9.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "219" ]
},
"credentials" : [ {
"id" : "5ff8e042-a72e-4b46-9efa-e1910cd09d13",
"type" : "password",
"createdDate" : 1678461818908,
"secretData" : "{\"value\":\"q/hdvLKerMbnpe6yjC3VxDqCFi0ne7rD5A1K39EM+XgD6bFI62qKW5JIBB5BaGz/GrWYw7ipwMBaOvLBOubSkg==\",\"salt\":\"vfnCbi47kaYpILxbL0b3Tg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "dbf941e7-0b45-4bc6-ae9e-d7153d32ce47",
"createdTimestamp" : 1676302143401,
"username" : "peopleops.project-lead", "username" : "peopleops.project-lead",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -2680,10 +2249,10 @@
"spiffworkflow-employeeid" : [ "147" ] "spiffworkflow-employeeid" : [ "147" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "85fa4e0a-2f59-4c51-8e8b-20acb9813ab9", "id" : "e17da85a-70ab-4f7d-8cff-6f4826f35bbc",
"type" : "password", "type" : "password",
"createdDate" : 1676302143434, "createdDate" : 1680210955585,
"secretData" : "{\"value\":\"FBi/INvDb50hA4QNRcSbd5gc10Dspq7QppiCvQ6ualnH/MlTyVq5CL9o1BWya0xxVdG/4jxFkUlgpN1w5liZ1Q==\",\"salt\":\"s2yJeI/k96iSy8zHAdTVSQ==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"Llqk65fjzqPK6koWNRBPY6S1/T3GXgc4PHJSw/qlH7qzEQALzkKqMG1/C0s2EkAonj8WpIzZyEZKzRgMGqgh1g==\",\"salt\":\"1PoYqx4FYOST9EUEqbf9mA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -2967,6 +2536,29 @@
"realmRoles" : [ "default-roles-spiffworkflow" ], "realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, {
"id" : "62862d90-e996-48ac-a8ee-5af43356dca4",
"createdTimestamp" : 1680538439355,
"username" : "ppg.ba6.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba6.sme@status.im",
"attributes" : {
"spiffworkflow-employeeid" : [ "236" ]
},
"credentials" : [ {
"id" : "b242e740-4d6f-412a-9719-84da41c8d1ed",
"type" : "password",
"createdDate" : 1680538439405,
"secretData" : "{\"value\":\"oveDoHPfm0m+SkrY3rLyFfIOK1tH+Fc8y5KC+CGMccNIPqLN5p7ytXcMjjcIhRdxAW9CzCGFUKhVnGAXa/PGIQ==\",\"salt\":\"kQZeYzICjjs6DO2hEgEbDw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, { }, {
"id" : "f56fe387-d153-42c2-880a-6726bd624bae", "id" : "f56fe387-d153-42c2-880a-6726bd624bae",
"createdTimestamp" : 1676302144802, "createdTimestamp" : 1676302144802,
@ -3175,8 +2767,8 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "c684e919-6ae0-4031-a160-8e90338567b3", "id" : "3ac1954a-713a-47c7-bd41-d618063a1053",
"createdTimestamp" : 1678461818310, "createdTimestamp" : 1680538438655,
"username" : "security6.sme", "username" : "security6.sme",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
@ -3186,10 +2778,10 @@
"spiffworkflow-employeeid" : [ "211" ] "spiffworkflow-employeeid" : [ "211" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "aff2f083-f6aa-4f93-899f-aaa3119a9739", "id" : "e3ceb7b3-617d-4e52-980c-e5edd9ba48fb",
"type" : "password", "type" : "password",
"createdDate" : 1678461818346, "createdDate" : 1680538438713,
"secretData" : "{\"value\":\"7XGMuiylxKmwDwJZtiPNLllERwN8KLoILLE/BjjXOkqN3c+C+KYgNxPhrDt8dG9PDYOq/59vh/4E2y82GLaoEw==\",\"salt\":\"ufzmAcoMLoi0jtRHwGDadg==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"iD1TfnQecNf0giE/5Ji0JQL/z91X4QmeqtiJKp/Dsfc55vPVh7llJlVygL7x2Ctcl4/+X10XgtSUkdAvdi3Tvw==\",\"salt\":\"6c0hHyISU/BOwh8vntCIfg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -3324,21 +2916,21 @@
"notBefore" : 0, "notBefore" : 0,
"groups" : [ ] "groups" : [ ]
}, { }, {
"id" : "cb99a5c4-2c28-4b19-b8c7-635b757fc817", "id" : "654d55c5-2380-456f-a99b-936aa8cce4ee",
"createdTimestamp" : 1678461818231, "createdTimestamp" : 1680538439445,
"username" : "waku.research.project-lead", "username" : "web.project-lead",
"enabled" : true, "enabled" : true,
"totp" : false, "totp" : false,
"emailVerified" : false, "emailVerified" : false,
"email" : "waku.research.project-lead@status.im", "email" : "web.project-lead@status.im",
"attributes" : { "attributes" : {
"spiffworkflow-employeeid" : [ "164" ] "spiffworkflow-employeeid" : [ "235" ]
}, },
"credentials" : [ { "credentials" : [ {
"id" : "ed5fc4a1-d574-4940-b5e4-3a1ad9d122ba", "id" : "c28af9d4-37bb-445a-a8cc-12a87bd8dd2c",
"type" : "password", "type" : "password",
"createdDate" : 1678461818268, "createdDate" : 1680538439501,
"secretData" : "{\"value\":\"K7MRRw2gO4bXHJH8U4cZU2rcVQT/hxw7kMHqN1uDae9FVqFEKh014qiwePOHr5K1xjUw8uU5e/d3HCcwhuRUQw==\",\"salt\":\"R4FdsDK6NvelgQ8gH7Me0g==\",\"additionalParameters\":{}}", "secretData" : "{\"value\":\"1ug7sJNXy9qUby6hABKyLJ8R0xa1pVldXFltuO6Xtqe7qIt9+eUbhN2o9dZ8vk5/aPIFaaIcQPOFZdaKOE/XWw==\",\"salt\":\"F3utYf4viApmPmC6FSZ0vA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ], } ],
"disableableCredentialTypes" : [ ], "disableableCredentialTypes" : [ ],
@ -4578,7 +4170,7 @@
"subType" : "authenticated", "subType" : "authenticated",
"subComponents" : { }, "subComponents" : { },
"config" : { "config" : {
"allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "oidc-address-mapper" ] "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper" ]
} }
}, { }, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@ -4596,7 +4188,7 @@
"subType" : "anonymous", "subType" : "anonymous",
"subComponents" : { }, "subComponents" : { },
"config" : { "config" : {
"allowed-protocol-mapper-types" : [ "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper" ] "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper" ]
} }
}, { }, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@ -4686,7 +4278,7 @@
"internationalizationEnabled" : false, "internationalizationEnabled" : false,
"supportedLocales" : [ ], "supportedLocales" : [ ],
"authenticationFlows" : [ { "authenticationFlows" : [ {
"id" : "04b09640-f53c-4c1b-b2b1-8cac25afc2bb", "id" : "62d7bb2a-5919-48b2-a9f9-511ecf5474c7",
"alias" : "Account verification options", "alias" : "Account verification options",
"description" : "Method with which to verity the existing account", "description" : "Method with which to verity the existing account",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4708,7 +4300,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "e7c246f4-71c3-4a48-9037-72438bdcfcbb", "id" : "7675760b-666a-4b8c-a9b8-da1e01c207fe",
"alias" : "Authentication Options", "alias" : "Authentication Options",
"description" : "Authentication options.", "description" : "Authentication options.",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4737,7 +4329,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "6e9d415e-98f7-4459-b10b-45b08302c681", "id" : "34e18ea8-f515-46dc-9dbf-5b79f8154564",
"alias" : "Browser - Conditional OTP", "alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication", "description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4759,7 +4351,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "c86b0fad-f7dd-4c58-974e-25eb83c1dacf", "id" : "933e581c-56d8-4614-b2a3-d2db10397ea0",
"alias" : "Direct Grant - Conditional OTP", "alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication", "description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4781,7 +4373,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "cb7f4c87-a8fa-445a-a8d4-53869cdfed12", "id" : "0986dc8c-4bcf-477f-8ba2-3cac02ea656f",
"alias" : "First broker login - Conditional OTP", "alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication", "description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4803,7 +4395,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "8fa87954-bc65-4f1e-bc55-f5bb49f59fbb", "id" : "534381e4-b0b9-43b2-9ac5-9f1e006b5920",
"alias" : "Handle Existing Account", "alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4825,7 +4417,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "e617d826-c654-4c35-96ad-8381bd1e2298", "id" : "922e84ab-85db-494a-8a8c-84d3b0c675f4",
"alias" : "Reset - Conditional OTP", "alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4847,7 +4439,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "2e4a46ae-2813-4b71-9386-c08b2f063fa6", "id" : "24b1b409-b6fc-44dc-9a97-93b2f4a78c89",
"alias" : "User creation or linking", "alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives", "description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4870,7 +4462,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "8fa69de0-13cf-4252-899b-c59a30ebd132", "id" : "c015a916-a45b-4797-a466-2399164da6fe",
"alias" : "Verify Existing Account by Re-authentication", "alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account", "description" : "Reauthentication of existing account",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4892,7 +4484,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "204d20f6-d9a7-49ff-a7a3-45386fb884f4", "id" : "fc7aec31-855b-4993-b770-57660ff0524f",
"alias" : "browser", "alias" : "browser",
"description" : "browser based authentication", "description" : "browser based authentication",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4928,7 +4520,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "3c0c2987-65db-4920-ae44-34aba220c3fb", "id" : "9769d765-42c8-4391-a7ec-aa24f0e84040",
"alias" : "clients", "alias" : "clients",
"description" : "Base authentication for clients", "description" : "Base authentication for clients",
"providerId" : "client-flow", "providerId" : "client-flow",
@ -4964,7 +4556,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "68a92113-be75-4e63-a322-8076d6c67650", "id" : "49a937cc-9d51-43d0-a379-67aaae38c51a",
"alias" : "direct grant", "alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant", "description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -4993,7 +4585,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "a630d78f-4fe1-4350-a19d-d091d1af514d", "id" : "1a766b69-7ead-442a-84a4-083cd84949cd",
"alias" : "docker auth", "alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP", "description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5008,7 +4600,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "f73b4437-8e82-4788-be69-e437b09b500c", "id" : "e4ac0543-cfb6-4232-947d-52b8615e0629",
"alias" : "first broker login", "alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5031,7 +4623,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "b7c8cc6d-bc1f-446e-b263-72214b2f5c56", "id" : "86247ee8-b507-406b-9d32-3c68c80084a5",
"alias" : "forms", "alias" : "forms",
"description" : "Username, password, otp and other auth forms.", "description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5053,7 +4645,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "a3bdf79f-8c7d-4bff-807d-76fa61093446", "id" : "70ef5a26-e3bb-4ba7-a05a-d205b0a3836c",
"alias" : "http challenge", "alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5075,7 +4667,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "ada41b4e-5a12-496d-aa1e-d31cf8c08226", "id" : "89abf09a-bfb4-4dea-b164-ca7c563b4009",
"alias" : "registration", "alias" : "registration",
"description" : "registration flow", "description" : "registration flow",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5091,7 +4683,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "1c858bcd-2031-4056-bbf0-1fbaecdd7068", "id" : "52d31bf0-dcb6-4b01-a252-b2ba705df036",
"alias" : "registration form", "alias" : "registration form",
"description" : "registration form", "description" : "registration form",
"providerId" : "form-flow", "providerId" : "form-flow",
@ -5127,7 +4719,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "ff91e251-d85e-450b-bff7-d45be26777d5", "id" : "22041b6b-6d9e-43eb-8d2a-94a3052c49aa",
"alias" : "reset credentials", "alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something", "description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5163,7 +4755,7 @@
"userSetupAllowed" : false "userSetupAllowed" : false
} ] } ]
}, { }, {
"id" : "7b0680a2-99b9-454c-b145-f286e9d60c58", "id" : "153aaf25-b6d9-42b4-9740-f63c94c16626",
"alias" : "saml ecp", "alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow", "description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow", "providerId" : "basic-flow",
@ -5179,13 +4771,13 @@
} ] } ]
} ], } ],
"authenticatorConfig" : [ { "authenticatorConfig" : [ {
"id" : "aa1e4f55-3e7f-445a-a432-7a972776d719", "id" : "e0075b39-a2ad-47de-9ee6-e61073387e71",
"alias" : "create unique user config", "alias" : "create unique user config",
"config" : { "config" : {
"require.password.update.after.registration" : "false" "require.password.update.after.registration" : "false"
} }
}, { }, {
"id" : "fd69765e-309b-4c5d-bdd5-51343427cd27", "id" : "aa24bff3-bd25-4b2a-973f-63fea5c21dd1",
"alias" : "review profile config", "alias" : "review profile config",
"config" : { "config" : {
"update.profile.on.first.login" : "missing" "update.profile.on.first.login" : "missing"

View File

@ -0,0 +1,4 @@
email,spiffworkflow-employeeid
admin@spiffworkflow.org
jason@sartography.com
kevin@sartography.com

View File

@ -1,15 +1,9 @@
email,spiffworkflow-employeeid email,pass,spiffworkflow-employeeid
admin@spiffworkflow.org alex@sartography.com,,111
alex@sartography.com,111 dan@sartography.com,,115
dan@sartography.com,115 daniel@sartography.com,,
daniel@sartography.com elizabeth@sartography.com,,
elizabeth@sartography.com j@sartography.com,,
j@sartography.com jon@sartography.com,,
jason@sartography.com kb@sartography.com,,
jon@sartography.com madhurya@sartography.com,,160
kb@sartography.com
kevin@sartography.com
madhurya@sartography.com,160
madhurya@ymail.com,161
mike@sartography.com
natalia@sartography.com

View File

@ -1,113 +1,97 @@
email,spiffworkflow-employeeid email,pass,spiffworkflow-employeeid
# admin@spiffworkflow.org # admin@spiffworkflow.org
amir@status.im amir@status.im
app.program-lead@status.im,121 app.program-lead@status.im,,121
codex-a1.sme@status.im,209 codex-a1.sme@status.im,,209
codex.project-lead@status.im,153 codex.project-lead@status.im,,153
codex.sme@status.im,185 codex.sme@status.im,,185
codex1.sme@status.im,186 codex1.sme@status.im,,186
codex2.sme@status.im,187 codex2.sme@status.im,,187
codex3.sme@status.im,188 codex3.sme@status.im,,188
codex4.sme@status.im,189 codex4.sme@status.im,,189
codex5.sme@status.im,190 codex5.sme@status.im,,190
core-a1.contributor@status.im,202 core-a1.contributor@status.im,,202
core-a2.contributor@status.im,203 core-a2.contributor@status.im,,203
core1.contributor@status.im,155 core1.contributor@status.im,,155
core10.contributor@status.im,225 core2.contributor@status.im,,156
core11.contributor@status.im,226 core3.contributor@status.im,,157
core12.contributor@status.im,227 core4.contributor@status.im,,158
core13.contributor@status.im,228 core5.contributor@status.im,,159
core14.contributor@status.im,229 core6.contributor@status.im,core6.contributorx,199
core15.contributor@status.im,230 core@status.im,,113
core16.contributor@status.im,231
core17.contributor@status.im,232
core2.contributor@status.im,156
core3.contributor@status.im,157
core4.contributor@status.im,158
core5.contributor@status.im,159
core6.contributor@status.im,199
core7.contributor@status.im,222
core8.contributor@status.im,223
core9.contributor@status.im,224
core@status.im,113
dao.project.lead@status.im dao.project.lead@status.im
desktop-a1.sme@status.im,210 desktop-a1.sme@status.im,,210
desktop.program.lead@status.im desktop.program.lead@status.im
desktop.project-lead@status.im,192 desktop.project-lead@status.im,,192
desktop.project.lead@status.im desktop.project.lead@status.im
desktop.sme@status.im,193 desktop.sme@status.im,,193
desktop1.sme@status.im,194 desktop1.sme@status.im,,194
desktop2.sme@status.im,195 desktop2.sme@status.im,,195
desktop3.sme@status.im,196 desktop3.sme@status.im,,196
desktop4.sme@status.im,197 desktop4.sme@status.im,,197
desktop5.sme@status.im,198 desktop5.sme@status.im,,198
fin@status.im,118 fin@status.im,,118
finance_user1@status.im finance_user1@status.im
fluffy.project-lead@status.im,162 fluffy.project-lead@status.im,,162
harmeet@status.im,109 harmeet@status.im,,109
infra-a1.sme@status.im,204 infra-a1.sme@status.im,,204
infra.project-lead@status.im,130 infra.project-lead@status.im,infra.project-leadx,130
infra.sme@status.im,119 infra.sme@status.im,,119
infra1.sme@status.im,131 infra1.sme@status.im,,131
infra2.sme@status.im,132 infra2.sme@status.im,,132
infra3.sme@status.im,167 infra3.sme@status.im,,167
infra4.sme@status.im,175 infra4.sme@status.im,,175
infra5.sme@status.im,176 infra5.sme@status.im,,176
infra6.sme@status.im,212 infra6.sme@status.im,infra6.smex,212
jakub@status.im jakub@status.im
jamescheung@status.im,,234
jarrad@status.im jarrad@status.im
lead@status.im,114 lead@status.im,,114
legal-a1.sme@status.im,205 legal-a1.sme@status.im,,205
legal.project-lead@status.im,133 legal.project-lead@status.im,legal.project-leadx,133
legal.sme@status.im,125 legal.sme@status.im,,125
legal1.sme@status.im,134 legal1.sme@status.im,,134
legal2.sme@status.im,165 legal2.sme@status.im,,165
legal3.sme@status.im,166 legal3.sme@status.im,,166
legal4.sme@status.im,177 legal4.sme@status.im,,177
legal5.sme@status.im,178 legal5.sme@status.im,,178
legal6.sme@status.im,213 legal6.sme@status.im,legal6.smex,213
legal7.sme@status.im,214 logos.program-lead@status.im,,160
legal8.sme@status.im,215 manuchehr@status.im,,110
logos.program-lead@status.im,160 nimbus.program-lead@status.im,,161
manuchehr@status.im,110 peopleops.partner-a1.sme@status.im,,208
mobile.project-lead@status.im,221 peopleops.partner.sme@status.im,,148
nimbus.program-lead@status.im,161 peopleops.partner1.sme@status.im,,149
nomos.project-lead@status.im,220 peopleops.partner2.sme@status.im,,173
peopleops.partner-a1.sme@status.im,208 peopleops.partner3.sme@status.im,,174
peopleops.partner.sme@status.im,148 peopleops.partner4.sme@status.im,,181
peopleops.partner1.sme@status.im,149 peopleops.partner5.sme@status.im,,182
peopleops.partner2.sme@status.im,173 peopleops.partner6.sme@status.im,peopleops.partner6.smex,216
peopleops.partner3.sme@status.im,174 peopleops.partner@status.im,,150
peopleops.partner4.sme@status.im,181 peopleops.project-lead@status.im,peopleops.project-leadx,147
peopleops.partner5.sme@status.im,182 peopleops.talent.sme@status.im,,143
peopleops.partner6.sme@status.im,216 peopleops.talent1.sme@status.im,,142
peopleops.partner7.sme@status.im,217 peopleops.talent@status.im,,141
peopleops.partner8.sme@status.im,218 ppg.ba-a1.sme@status.im,,207
peopleops.partner9.sme@status.im,219 ppg.ba.project-lead@status.im,,137
peopleops.partner@status.im,150 ppg.ba.sme@status.im,,138
peopleops.project-lead@status.im,147 ppg.ba1.sme@status.im,,170
peopleops.talent.sme@status.im,143 ppg.ba2.sme@status.im,,171
peopleops.talent1.sme@status.im,142 ppg.ba3.sme@status.im,,172
peopleops.talent@status.im,141 ppg.ba4.sme@status.im,,200
ppg.ba-a1.sme@status.im,207 ppg.ba5.sme@status.im,,201
ppg.ba.project-lead@status.im,137 ppg.ba6.sme@status.im,ppg.ba6.smex,236
ppg.ba.sme@status.im,138 ppg.ba@status.im,,127
ppg.ba1.sme@status.im,170 sasha@status.im,,112
ppg.ba2.sme@status.im,171 security-a1.sme@status.im,,206
ppg.ba3.sme@status.im,172 security.project-lead@status.im,,151
ppg.ba4.sme@status.im,200 security.sme@status.im,,123
ppg.ba5.sme@status.im,201 security1.sme@status.im,,135
ppg.ba@status.im,127 security2.sme@status.im,,168
sasha@status.im,112 security3.sme@status.im,,169
security-a1.sme@status.im,206 security4.sme@status.im,,179
security.project-lead@status.im,151 security5.sme@status.im,,180
security.sme@status.im,123 security6.sme@status.im,security6.smex,211
security1.sme@status.im,135 services.lead@status.im,,122
security2.sme@status.im,168 vac.program-lead@status.im,,163
security3.sme@status.im,169 web.project-lead@status.im,web.project-leadx,235
security4.sme@status.im,179
security5.sme@status.im,180
security6.sme@status.im,211
services.lead@status.im,122
vac.program-lead@status.im,163
waku.research.project-lead@status.im,164

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: 389800c352ee Revision ID: 0b5dd14bfbac
Revises: Revises:
Create Date: 2023-03-07 10:40:43.709777 Create Date: 2023-03-23 16:25:33.288500
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects import mysql from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = '389800c352ee' revision = '0b5dd14bfbac'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -18,33 +18,21 @@ depends_on = None
def upgrade(): def upgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table('bpmn_process',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('guid', sa.String(length=36), nullable=True),
sa.Column('parent_process_id', sa.Integer(), nullable=True),
sa.Column('properties_json', sa.JSON(), nullable=False),
sa.Column('json_data_hash', sa.String(length=255), nullable=False),
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.ForeignKeyConstraint(['parent_process_id'], ['bpmn_process.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_bpmn_process_guid'), 'bpmn_process', ['guid'], unique=True)
op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False)
op.create_table('bpmn_process_definition', op.create_table('bpmn_process_definition',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hash', sa.String(length=255), nullable=False), sa.Column('hash', sa.String(length=255), nullable=False),
sa.Column('bpmn_identifier', sa.String(length=255), nullable=False), sa.Column('bpmn_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_name', sa.String(length=255), nullable=True),
sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('properties_json', sa.JSON(), nullable=False),
sa.Column('type', sa.String(length=32), nullable=True),
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('hash')
) )
op.create_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), 'bpmn_process_definition', ['bpmn_identifier'], unique=False) op.create_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), 'bpmn_process_definition', ['bpmn_identifier'], unique=False)
op.create_index(op.f('ix_bpmn_process_definition_hash'), 'bpmn_process_definition', ['hash'], unique=True) op.create_index(op.f('ix_bpmn_process_definition_bpmn_name'), 'bpmn_process_definition', ['bpmn_name'], unique=False)
op.create_table('correlation_property_cache', op.create_table('correlation_property_cache',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False), sa.Column('name', sa.String(length=50), nullable=False),
@ -53,19 +41,23 @@ def upgrade():
sa.Column('retrieval_expression', sa.String(length=255), nullable=True), sa.Column('retrieval_expression', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_correlation_property_cache_message_name'), 'correlation_property_cache', ['message_name'], unique=False)
op.create_index(op.f('ix_correlation_property_cache_name'), 'correlation_property_cache', ['name'], unique=False)
op.create_table('group', op.create_table('group',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=True), sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('identifier', sa.String(length=255), nullable=True), sa.Column('identifier', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_group_identifier'), 'group', ['identifier'], unique=False)
op.create_index(op.f('ix_group_name'), 'group', ['name'], unique=False)
op.create_table('json_data', op.create_table('json_data',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hash', sa.String(length=255), nullable=False), sa.Column('hash', sa.String(length=255), nullable=False),
sa.Column('data', sa.JSON(), nullable=False), sa.Column('data', sa.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('hash')
) )
op.create_index(op.f('ix_json_data_hash'), 'json_data', ['hash'], unique=True)
op.create_table('message_triggerable_process_model', op.create_table('message_triggerable_process_model',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('message_name', sa.String(length=255), nullable=True), sa.Column('message_name', sa.String(length=255), nullable=True),
@ -74,6 +66,7 @@ def upgrade():
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_message_triggerable_process_model_message_name'), 'message_triggerable_process_model', ['message_name'], unique=False)
op.create_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), 'message_triggerable_process_model', ['process_model_identifier'], unique=False) op.create_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), 'message_triggerable_process_model', ['process_model_identifier'], unique=False)
op.create_table('permission_target', op.create_table('permission_target',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
@ -97,29 +90,15 @@ def upgrade():
) )
op.create_index(op.f('ix_spec_reference_cache_display_name'), 'spec_reference_cache', ['display_name'], unique=False) op.create_index(op.f('ix_spec_reference_cache_display_name'), 'spec_reference_cache', ['display_name'], unique=False)
op.create_index(op.f('ix_spec_reference_cache_identifier'), 'spec_reference_cache', ['identifier'], unique=False) op.create_index(op.f('ix_spec_reference_cache_identifier'), 'spec_reference_cache', ['identifier'], unique=False)
op.create_index(op.f('ix_spec_reference_cache_process_model_id'), 'spec_reference_cache', ['process_model_id'], unique=False)
op.create_index(op.f('ix_spec_reference_cache_type'), 'spec_reference_cache', ['type'], unique=False) op.create_index(op.f('ix_spec_reference_cache_type'), 'spec_reference_cache', ['type'], unique=False)
op.create_table('spiff_logging',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_process_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
sa.Column('bpmn_task_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_task_type', sa.String(length=255), nullable=True),
sa.Column('spiff_task_guid', sa.String(length=50), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('message', sa.String(length=255), nullable=True),
sa.Column('current_user_id', sa.Integer(), nullable=True),
sa.Column('spiff_step', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user', op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False), sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('service', sa.String(length=255), nullable=False), sa.Column('service', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.String(length=255), nullable=False), sa.Column('service_id', sa.String(length=255), nullable=False),
sa.Column('display_name', sa.String(length=255), nullable=True), sa.Column('display_name', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('tenant_specific_field_1', sa.String(length=255), nullable=True), sa.Column('tenant_specific_field_1', sa.String(length=255), nullable=True),
sa.Column('tenant_specific_field_2', sa.String(length=255), nullable=True), sa.Column('tenant_specific_field_2', sa.String(length=255), nullable=True),
sa.Column('tenant_specific_field_3', sa.String(length=255), nullable=True), sa.Column('tenant_specific_field_3', sa.String(length=255), nullable=True),
@ -129,6 +108,29 @@ def upgrade():
sa.UniqueConstraint('service', 'service_id', name='service_key'), sa.UniqueConstraint('service', 'service_id', name='service_key'),
sa.UniqueConstraint('username') sa.UniqueConstraint('username')
) )
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False)
op.create_index(op.f('ix_user_service'), 'user', ['service'], unique=False)
op.create_index(op.f('ix_user_service_id'), 'user', ['service_id'], unique=False)
op.create_table('bpmn_process',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('guid', sa.String(length=36), nullable=True),
sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False),
sa.Column('top_level_process_id', sa.Integer(), nullable=True),
sa.Column('direct_parent_process_id', sa.Integer(), nullable=True),
sa.Column('properties_json', sa.JSON(), nullable=False),
sa.Column('json_data_hash', sa.String(length=255), nullable=False),
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
sa.ForeignKeyConstraint(['direct_parent_process_id'], ['bpmn_process.id'], ),
sa.ForeignKeyConstraint(['top_level_process_id'], ['bpmn_process.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('guid')
)
op.create_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), 'bpmn_process', ['bpmn_process_definition_id'], unique=False)
op.create_index(op.f('ix_bpmn_process_direct_parent_process_id'), 'bpmn_process', ['direct_parent_process_id'], unique=False)
op.create_index(op.f('ix_bpmn_process_json_data_hash'), 'bpmn_process', ['json_data_hash'], unique=False)
op.create_index(op.f('ix_bpmn_process_top_level_process_id'), 'bpmn_process', ['top_level_process_id'], unique=False)
op.create_table('bpmn_process_definition_relationship', op.create_table('bpmn_process_definition_relationship',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_parent_id', sa.Integer(), nullable=False),
@ -138,6 +140,8 @@ def upgrade():
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('bpmn_process_definition_parent_id', 'bpmn_process_definition_child_id', name='bpmn_process_definition_relationship_unique') sa.UniqueConstraint('bpmn_process_definition_parent_id', 'bpmn_process_definition_child_id', name='bpmn_process_definition_relationship_unique')
) )
op.create_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_parent_id'), 'bpmn_process_definition_relationship', ['bpmn_process_definition_parent_id'], unique=False)
op.create_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_child_id'), 'bpmn_process_definition_relationship', ['bpmn_process_definition_child_id'], unique=False)
op.create_table('principal', op.create_table('principal',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True),
@ -149,32 +153,6 @@ def upgrade():
sa.UniqueConstraint('group_id'), sa.UniqueConstraint('group_id'),
sa.UniqueConstraint('user_id') sa.UniqueConstraint('user_id')
) )
op.create_table('process_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_model_identifier', sa.String(length=255), nullable=False),
sa.Column('process_model_display_name', sa.String(length=255), nullable=False),
sa.Column('process_initiator_id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=True),
sa.Column('bpmn_process_id', sa.Integer(), nullable=True),
sa.Column('spiff_serializer_version', sa.String(length=50), nullable=True),
sa.Column('bpmn_json', sa.JSON(), nullable=True),
sa.Column('start_in_seconds', sa.Integer(), nullable=True),
sa.Column('end_in_seconds', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
sa.Column('spiff_step', sa.Integer(), nullable=True),
sa.Column('locked_by', sa.String(length=80), nullable=True),
sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ),
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_process_instance_process_model_display_name'), 'process_instance', ['process_model_display_name'], unique=False)
op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False)
op.create_table('process_instance_report', op.create_table('process_instance_report',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=50), nullable=False), sa.Column('identifier', sa.String(length=50), nullable=False),
@ -207,26 +185,14 @@ def upgrade():
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key') sa.UniqueConstraint('key')
) )
op.create_table('task', op.create_index(op.f('ix_secret_user_id'), 'secret', ['user_id'], unique=False)
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('guid', sa.String(length=36), nullable=False),
sa.Column('bpmn_process_id', sa.Integer(), nullable=False),
sa.Column('state', sa.String(length=10), nullable=False),
sa.Column('properties_json', sa.JSON(), nullable=False),
sa.Column('json_data_hash', sa.String(length=255), nullable=False),
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_task_guid'), 'task', ['guid'], unique=True)
op.create_index(op.f('ix_task_json_data_hash'), 'task', ['json_data_hash'], unique=False)
op.create_table('task_definition', op.create_table('task_definition',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False), sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=False),
sa.Column('bpmn_identifier', sa.String(length=255), nullable=False), sa.Column('bpmn_identifier', sa.String(length=255), nullable=False),
sa.Column('properties_json', sa.JSON(), nullable=False), sa.Column('bpmn_name', sa.String(length=255), nullable=True),
sa.Column('typename', sa.String(length=255), nullable=False), sa.Column('typename', sa.String(length=255), nullable=False),
sa.Column('properties_json', sa.JSON(), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ), sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
@ -234,6 +200,9 @@ def upgrade():
sa.UniqueConstraint('bpmn_process_definition_id', 'bpmn_identifier', name='task_definition_unique') sa.UniqueConstraint('bpmn_process_definition_id', 'bpmn_identifier', name='task_definition_unique')
) )
op.create_index(op.f('ix_task_definition_bpmn_identifier'), 'task_definition', ['bpmn_identifier'], unique=False) op.create_index(op.f('ix_task_definition_bpmn_identifier'), 'task_definition', ['bpmn_identifier'], unique=False)
op.create_index(op.f('ix_task_definition_bpmn_name'), 'task_definition', ['bpmn_name'], unique=False)
op.create_index(op.f('ix_task_definition_bpmn_process_definition_id'), 'task_definition', ['bpmn_process_definition_id'], unique=False)
op.create_index(op.f('ix_task_definition_typename'), 'task_definition', ['typename'], unique=False)
op.create_table('user_group_assignment', op.create_table('user_group_assignment',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False),
@ -243,6 +212,8 @@ def upgrade():
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique') sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
) )
op.create_index(op.f('ix_user_group_assignment_group_id'), 'user_group_assignment', ['group_id'], unique=False)
op.create_index(op.f('ix_user_group_assignment_user_id'), 'user_group_assignment', ['user_id'], unique=False)
op.create_table('user_group_assignment_waiting', op.create_table('user_group_assignment_waiting',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False), sa.Column('username', sa.String(length=255), nullable=False),
@ -251,48 +222,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique') sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique')
) )
op.create_table('human_task', op.create_index(op.f('ix_user_group_assignment_waiting_group_id'), 'user_group_assignment_waiting', ['group_id'], unique=False)
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
sa.Column('form_file_name', sa.String(length=50), nullable=True),
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('task_id', sa.String(length=50), nullable=True),
sa.Column('task_name', sa.String(length=255), nullable=True),
sa.Column('task_title', sa.String(length=50), nullable=True),
sa.Column('task_type', sa.String(length=50), nullable=True),
sa.Column('task_status', sa.String(length=50), nullable=True),
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False)
op.create_table('message_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('message_type', sa.String(length=20), nullable=False),
sa.Column('payload', sa.JSON(), nullable=True),
sa.Column('correlation_keys', sa.JSON(), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('counterpart_id', sa.Integer(), nullable=True),
sa.Column('failure_cause', sa.Text(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('permission_assignment', op.create_table('permission_assignment',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('principal_id', sa.Integer(), nullable=False), sa.Column('principal_id', sa.Integer(), nullable=False),
@ -304,6 +234,72 @@ def upgrade():
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('principal_id', 'permission_target_id', 'permission', name='permission_assignment_uniq') sa.UniqueConstraint('principal_id', 'permission_target_id', 'permission', name='permission_assignment_uniq')
) )
op.create_index(op.f('ix_permission_assignment_permission_target_id'), 'permission_assignment', ['permission_target_id'], unique=False)
op.create_index(op.f('ix_permission_assignment_principal_id'), 'permission_assignment', ['principal_id'], unique=False)
op.create_table('process_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_model_identifier', sa.String(length=255), nullable=False),
sa.Column('process_model_display_name', sa.String(length=255), nullable=False),
sa.Column('process_initiator_id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_definition_id', sa.Integer(), nullable=True),
sa.Column('bpmn_process_id', sa.Integer(), nullable=True),
sa.Column('spiff_serializer_version', sa.String(length=50), nullable=True),
sa.Column('start_in_seconds', sa.Integer(), nullable=True),
sa.Column('end_in_seconds', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_definition_id'], ['bpmn_process_definition.id'], ),
sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ),
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_process_instance_bpmn_process_definition_id'), 'process_instance', ['bpmn_process_definition_id'], unique=False)
op.create_index(op.f('ix_process_instance_bpmn_process_id'), 'process_instance', ['bpmn_process_id'], unique=False)
op.create_index(op.f('ix_process_instance_end_in_seconds'), 'process_instance', ['end_in_seconds'], unique=False)
op.create_index(op.f('ix_process_instance_process_initiator_id'), 'process_instance', ['process_initiator_id'], unique=False)
op.create_index(op.f('ix_process_instance_process_model_display_name'), 'process_instance', ['process_model_display_name'], unique=False)
op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False)
op.create_index(op.f('ix_process_instance_start_in_seconds'), 'process_instance', ['start_in_seconds'], unique=False)
op.create_index(op.f('ix_process_instance_status'), 'process_instance', ['status'], unique=False)
op.create_table('message_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('message_type', sa.String(length=20), nullable=False),
sa.Column('payload', sa.JSON(), nullable=True),
sa.Column('correlation_keys', sa.JSON(), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('counterpart_id', sa.Integer(), nullable=True),
sa.Column('failure_cause', sa.Text(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_message_instance_process_instance_id'), 'message_instance', ['process_instance_id'], unique=False)
op.create_index(op.f('ix_message_instance_status'), 'message_instance', ['status'], unique=False)
op.create_index(op.f('ix_message_instance_user_id'), 'message_instance', ['user_id'], unique=False)
op.create_table('process_instance_event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('task_guid', sa.String(length=36), nullable=True),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('event_type', sa.String(length=50), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_process_instance_event_event_type'), 'process_instance_event', ['event_type'], unique=False)
op.create_index(op.f('ix_process_instance_event_process_instance_id'), 'process_instance_event', ['process_instance_id'], unique=False)
op.create_index(op.f('ix_process_instance_event_task_guid'), 'process_instance_event', ['task_guid'], unique=False)
op.create_index(op.f('ix_process_instance_event_timestamp'), 'process_instance_event', ['timestamp'], unique=False)
op.create_index(op.f('ix_process_instance_event_user_id'), 'process_instance_event', ['user_id'], unique=False)
op.create_table('process_instance_file_data', op.create_table('process_instance_file_data',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False),
@ -319,6 +315,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_index(op.f('ix_process_instance_file_data_digest'), 'process_instance_file_data', ['digest'], unique=False) op.create_index(op.f('ix_process_instance_file_data_digest'), 'process_instance_file_data', ['digest'], unique=False)
op.create_index(op.f('ix_process_instance_file_data_process_instance_id'), 'process_instance_file_data', ['process_instance_id'], unique=False)
op.create_table('process_instance_metadata', op.create_table('process_instance_metadata',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False),
@ -331,32 +328,80 @@ def upgrade():
sa.UniqueConstraint('process_instance_id', 'key', name='process_instance_metadata_unique') sa.UniqueConstraint('process_instance_id', 'key', name='process_instance_metadata_unique')
) )
op.create_index(op.f('ix_process_instance_metadata_key'), 'process_instance_metadata', ['key'], unique=False) op.create_index(op.f('ix_process_instance_metadata_key'), 'process_instance_metadata', ['key'], unique=False)
op.create_table('spiff_step_details', op.create_index(op.f('ix_process_instance_metadata_process_instance_id'), 'process_instance_metadata', ['process_instance_id'], unique=False)
op.create_table('process_instance_queue',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('spiff_step', sa.Integer(), nullable=False), sa.Column('run_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('task_json', sa.JSON(), nullable=False), sa.Column('priority', sa.Integer(), nullable=True),
sa.Column('task_id', sa.String(length=50), nullable=False), sa.Column('locked_by', sa.String(length=80), nullable=True),
sa.Column('task_state', sa.String(length=50), nullable=False), sa.Column('locked_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('delta_json', sa.JSON(), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=False), sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('process_instance_id', 'spiff_step', name='process_instance_id_spiff_step') sa.UniqueConstraint('process_instance_id')
) )
op.create_table('human_task_user', op.create_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), 'process_instance_queue', ['locked_at_in_seconds'], unique=False)
op.create_index(op.f('ix_process_instance_queue_locked_by'), 'process_instance_queue', ['locked_by'], unique=False)
op.create_index(op.f('ix_process_instance_queue_status'), 'process_instance_queue', ['status'], unique=False)
op.create_table('task',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('human_task_id', sa.Integer(), nullable=False), sa.Column('guid', sa.String(length=36), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('bpmn_process_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ), sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.Column('task_definition_id', sa.Integer(), nullable=False),
sa.Column('state', sa.String(length=10), nullable=False),
sa.Column('properties_json', sa.JSON(), nullable=False),
sa.Column('json_data_hash', sa.String(length=255), nullable=False),
sa.Column('python_env_data_hash', sa.String(length=255), nullable=False),
sa.Column('start_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.Column('end_in_seconds', sa.DECIMAL(precision=17, scale=6), nullable=True),
sa.ForeignKeyConstraint(['bpmn_process_id'], ['bpmn_process.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['task_definition_id'], ['task_definition.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique') sa.UniqueConstraint('guid')
) )
op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False) op.create_index(op.f('ix_task_bpmn_process_id'), 'task', ['bpmn_process_id'], unique=False)
op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False) op.create_index(op.f('ix_task_json_data_hash'), 'task', ['json_data_hash'], unique=False)
op.create_index(op.f('ix_task_process_instance_id'), 'task', ['process_instance_id'], unique=False)
op.create_index(op.f('ix_task_python_env_data_hash'), 'task', ['python_env_data_hash'], unique=False)
op.create_index(op.f('ix_task_state'), 'task', ['state'], unique=False)
op.create_index(op.f('ix_task_task_definition_id'), 'task', ['task_definition_id'], unique=False)
op.create_table('human_task',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
sa.Column('form_file_name', sa.String(length=50), nullable=True),
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('task_model_id', sa.Integer(), nullable=True),
sa.Column('task_id', sa.String(length=50), nullable=True),
sa.Column('task_name', sa.String(length=255), nullable=True),
sa.Column('task_title', sa.String(length=50), nullable=True),
sa.Column('task_type', sa.String(length=50), nullable=True),
sa.Column('task_status', sa.String(length=50), nullable=True),
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
sa.Column('completed', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['task_model_id'], ['task.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_human_task_actual_owner_id'), 'human_task', ['actual_owner_id'], unique=False)
op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False)
op.create_index(op.f('ix_human_task_completed_by_user_id'), 'human_task', ['completed_by_user_id'], unique=False)
op.create_index(op.f('ix_human_task_lane_assignment_id'), 'human_task', ['lane_assignment_id'], unique=False)
op.create_index(op.f('ix_human_task_process_instance_id'), 'human_task', ['process_instance_id'], unique=False)
op.create_index(op.f('ix_human_task_task_model_id'), 'human_task', ['task_model_id'], unique=False)
op.create_table('message_instance_correlation_rule', op.create_table('message_instance_correlation_rule',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('message_instance_id', sa.Integer(), nullable=False), sa.Column('message_instance_id', sa.Integer(), nullable=False),
@ -369,59 +414,121 @@ def upgrade():
sa.UniqueConstraint('message_instance_id', 'name', name='message_instance_id_name_unique') sa.UniqueConstraint('message_instance_id', 'name', name='message_instance_id_name_unique')
) )
op.create_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), 'message_instance_correlation_rule', ['message_instance_id'], unique=False) op.create_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), 'message_instance_correlation_rule', ['message_instance_id'], unique=False)
op.create_index(op.f('ix_message_instance_correlation_rule_name'), 'message_instance_correlation_rule', ['name'], unique=False)
op.create_table('human_task_user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('human_task_id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique')
)
op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False)
op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False)
# ### end Alembic commands ### # ### end Alembic commands ###
def downgrade(): def downgrade():
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), table_name='message_instance_correlation_rule')
op.drop_table('message_instance_correlation_rule')
op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user') op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user')
op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user') op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user')
op.drop_table('human_task_user') op.drop_table('human_task_user')
op.drop_table('spiff_step_details') op.drop_index(op.f('ix_message_instance_correlation_rule_name'), table_name='message_instance_correlation_rule')
op.drop_index(op.f('ix_message_instance_correlation_rule_message_instance_id'), table_name='message_instance_correlation_rule')
op.drop_table('message_instance_correlation_rule')
op.drop_index(op.f('ix_human_task_task_model_id'), table_name='human_task')
op.drop_index(op.f('ix_human_task_process_instance_id'), table_name='human_task')
op.drop_index(op.f('ix_human_task_lane_assignment_id'), table_name='human_task')
op.drop_index(op.f('ix_human_task_completed_by_user_id'), table_name='human_task')
op.drop_index(op.f('ix_human_task_completed'), table_name='human_task')
op.drop_index(op.f('ix_human_task_actual_owner_id'), table_name='human_task')
op.drop_table('human_task')
op.drop_index(op.f('ix_task_task_definition_id'), table_name='task')
op.drop_index(op.f('ix_task_state'), table_name='task')
op.drop_index(op.f('ix_task_python_env_data_hash'), table_name='task')
op.drop_index(op.f('ix_task_process_instance_id'), table_name='task')
op.drop_index(op.f('ix_task_json_data_hash'), table_name='task')
op.drop_index(op.f('ix_task_bpmn_process_id'), table_name='task')
op.drop_table('task')
op.drop_index(op.f('ix_process_instance_queue_status'), table_name='process_instance_queue')
op.drop_index(op.f('ix_process_instance_queue_locked_by'), table_name='process_instance_queue')
op.drop_index(op.f('ix_process_instance_queue_locked_at_in_seconds'), table_name='process_instance_queue')
op.drop_table('process_instance_queue')
op.drop_index(op.f('ix_process_instance_metadata_process_instance_id'), table_name='process_instance_metadata')
op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata') op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata')
op.drop_table('process_instance_metadata') op.drop_table('process_instance_metadata')
op.drop_index(op.f('ix_process_instance_file_data_process_instance_id'), table_name='process_instance_file_data')
op.drop_index(op.f('ix_process_instance_file_data_digest'), table_name='process_instance_file_data') op.drop_index(op.f('ix_process_instance_file_data_digest'), table_name='process_instance_file_data')
op.drop_table('process_instance_file_data') op.drop_table('process_instance_file_data')
op.drop_table('permission_assignment') op.drop_index(op.f('ix_process_instance_event_user_id'), table_name='process_instance_event')
op.drop_index(op.f('ix_process_instance_event_timestamp'), table_name='process_instance_event')
op.drop_index(op.f('ix_process_instance_event_task_guid'), table_name='process_instance_event')
op.drop_index(op.f('ix_process_instance_event_process_instance_id'), table_name='process_instance_event')
op.drop_index(op.f('ix_process_instance_event_event_type'), table_name='process_instance_event')
op.drop_table('process_instance_event')
op.drop_index(op.f('ix_message_instance_user_id'), table_name='message_instance')
op.drop_index(op.f('ix_message_instance_status'), table_name='message_instance')
op.drop_index(op.f('ix_message_instance_process_instance_id'), table_name='message_instance')
op.drop_table('message_instance') op.drop_table('message_instance')
op.drop_index(op.f('ix_human_task_completed'), table_name='human_task') op.drop_index(op.f('ix_process_instance_status'), table_name='process_instance')
op.drop_table('human_task') op.drop_index(op.f('ix_process_instance_start_in_seconds'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_model_display_name'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_initiator_id'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_end_in_seconds'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_bpmn_process_id'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_bpmn_process_definition_id'), table_name='process_instance')
op.drop_table('process_instance')
op.drop_index(op.f('ix_permission_assignment_principal_id'), table_name='permission_assignment')
op.drop_index(op.f('ix_permission_assignment_permission_target_id'), table_name='permission_assignment')
op.drop_table('permission_assignment')
op.drop_index(op.f('ix_user_group_assignment_waiting_group_id'), table_name='user_group_assignment_waiting')
op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment_waiting')
op.drop_index(op.f('ix_user_group_assignment_user_id'), table_name='user_group_assignment')
op.drop_index(op.f('ix_user_group_assignment_group_id'), table_name='user_group_assignment')
op.drop_table('user_group_assignment') op.drop_table('user_group_assignment')
op.drop_index(op.f('ix_task_definition_typename'), table_name='task_definition')
op.drop_index(op.f('ix_task_definition_bpmn_process_definition_id'), table_name='task_definition')
op.drop_index(op.f('ix_task_definition_bpmn_name'), table_name='task_definition')
op.drop_index(op.f('ix_task_definition_bpmn_identifier'), table_name='task_definition') op.drop_index(op.f('ix_task_definition_bpmn_identifier'), table_name='task_definition')
op.drop_table('task_definition') op.drop_table('task_definition')
op.drop_index(op.f('ix_task_json_data_hash'), table_name='task') op.drop_index(op.f('ix_secret_user_id'), table_name='secret')
op.drop_index(op.f('ix_task_guid'), table_name='task')
op.drop_table('task')
op.drop_table('secret') op.drop_table('secret')
op.drop_table('refresh_token') op.drop_table('refresh_token')
op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report') op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_created_by_id'), table_name='process_instance_report') op.drop_index(op.f('ix_process_instance_report_created_by_id'), table_name='process_instance_report')
op.drop_table('process_instance_report') op.drop_table('process_instance_report')
op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_model_display_name'), table_name='process_instance')
op.drop_table('process_instance')
op.drop_table('principal') op.drop_table('principal')
op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_child_id'), table_name='bpmn_process_definition_relationship')
op.drop_index(op.f('ix_bpmn_process_definition_relationship_bpmn_process_definition_parent_id'), table_name='bpmn_process_definition_relationship')
op.drop_table('bpmn_process_definition_relationship') op.drop_table('bpmn_process_definition_relationship')
op.drop_index(op.f('ix_bpmn_process_top_level_process_id'), table_name='bpmn_process')
op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process')
op.drop_index(op.f('ix_bpmn_process_direct_parent_process_id'), table_name='bpmn_process')
op.drop_index(op.f('ix_bpmn_process_bpmn_process_definition_id'), table_name='bpmn_process')
op.drop_table('bpmn_process')
op.drop_index(op.f('ix_user_service_id'), table_name='user')
op.drop_index(op.f('ix_user_service'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user') op.drop_table('user')
op.drop_table('spiff_logging')
op.drop_index(op.f('ix_spec_reference_cache_type'), table_name='spec_reference_cache') op.drop_index(op.f('ix_spec_reference_cache_type'), table_name='spec_reference_cache')
op.drop_index(op.f('ix_spec_reference_cache_process_model_id'), table_name='spec_reference_cache')
op.drop_index(op.f('ix_spec_reference_cache_identifier'), table_name='spec_reference_cache') op.drop_index(op.f('ix_spec_reference_cache_identifier'), table_name='spec_reference_cache')
op.drop_index(op.f('ix_spec_reference_cache_display_name'), table_name='spec_reference_cache') op.drop_index(op.f('ix_spec_reference_cache_display_name'), table_name='spec_reference_cache')
op.drop_table('spec_reference_cache') op.drop_table('spec_reference_cache')
op.drop_table('permission_target') op.drop_table('permission_target')
op.drop_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), table_name='message_triggerable_process_model') op.drop_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), table_name='message_triggerable_process_model')
op.drop_index(op.f('ix_message_triggerable_process_model_message_name'), table_name='message_triggerable_process_model')
op.drop_table('message_triggerable_process_model') op.drop_table('message_triggerable_process_model')
op.drop_index(op.f('ix_json_data_hash'), table_name='json_data')
op.drop_table('json_data') op.drop_table('json_data')
op.drop_index(op.f('ix_group_name'), table_name='group')
op.drop_index(op.f('ix_group_identifier'), table_name='group')
op.drop_table('group') op.drop_table('group')
op.drop_index(op.f('ix_correlation_property_cache_name'), table_name='correlation_property_cache')
op.drop_index(op.f('ix_correlation_property_cache_message_name'), table_name='correlation_property_cache')
op.drop_table('correlation_property_cache') op.drop_table('correlation_property_cache')
op.drop_index(op.f('ix_bpmn_process_definition_hash'), table_name='bpmn_process_definition') op.drop_index(op.f('ix_bpmn_process_definition_bpmn_name'), table_name='bpmn_process_definition')
op.drop_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), table_name='bpmn_process_definition') op.drop_index(op.f('ix_bpmn_process_definition_bpmn_identifier'), table_name='bpmn_process_definition')
op.drop_table('bpmn_process_definition') op.drop_table('bpmn_process_definition')
op.drop_index(op.f('ix_bpmn_process_json_data_hash'), table_name='bpmn_process')
op.drop_index(op.f('ix_bpmn_process_guid'), table_name='bpmn_process')
op.drop_table('bpmn_process')
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@ -36,9 +36,7 @@ nox.options.sessions = (
def setup_database(session: Session) -> None: def setup_database(session: Session) -> None:
"""Run database migrations against the database.""" """Run database migrations against the database."""
session.env["FLASK_INSTANCE_PATH"] = os.path.join( session.env["FLASK_INSTANCE_PATH"] = os.path.join(os.getcwd(), "instance", "testing")
os.getcwd(), "instance", "testing"
)
flask_env_key = "FLASK_SESSION_SECRET_KEY" flask_env_key = "FLASK_SESSION_SECRET_KEY"
session.env[flask_env_key] = "e7711a3ba96c46c68e084a86952de16f" session.env[flask_env_key] = "e7711a3ba96c46c68e084a86952de16f"
session.env["FLASK_APP"] = "src/spiffworkflow_backend" session.env["FLASK_APP"] = "src/spiffworkflow_backend"
@ -72,9 +70,7 @@ def activate_virtualenv_in_precommit_hooks(session: Session) -> None:
text = hook.read_text() text = hook.read_text()
bindir = repr(session.bin)[1:-1] # strip quotes bindir = repr(session.bin)[1:-1] # strip quotes
if not ( if not (Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text):
Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text
):
continue continue
lines = text.splitlines() lines = text.splitlines()

File diff suppressed because it is too large Load Diff

View File

@ -27,7 +27,8 @@ flask-marshmallow = "*"
flask-migrate = "*" flask-migrate = "*"
flask-restful = "*" flask-restful = "*"
werkzeug = "*" werkzeug = "*"
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/remove-loop-reset"} SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
# SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" } # SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
sentry-sdk = "^1.10" sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0" sphinx-autoapi = "^2.0"
@ -38,10 +39,16 @@ pytest-flask = "^1.2.0"
pytest-flask-sqlalchemy = "^1.1.0" pytest-flask-sqlalchemy = "^1.1.0"
psycopg2 = "^2.9.3" psycopg2 = "^2.9.3"
typing-extensions = "^4.4.0" typing-extensions = "^4.4.0"
# pinned to higher than 65.5.0 because of a vulnerability
# and to lower than 67 because i didn't feel like addressing
# new deprecation warnings. we don't need this library explicitly,
# but at one time it was pulled in by various libs we depend on.
setuptools = "^65.5.1"
connexion = {extras = [ "swagger-ui",], version = "^2"} connexion = {extras = [ "swagger-ui",], version = "^2"}
lxml = "^4.9.1" lxml = "^4.9.1"
marshmallow-enum = "^1.5.1" marshmallow-enum = "^1.5.1"
marshmallow-sqlalchemy = "^0.28.0"
PyJWT = "^2.6.0" PyJWT = "^2.6.0"
gunicorn = "^20.1.0" gunicorn = "^20.1.0"
APScheduler = "*" APScheduler = "*"
@ -74,7 +81,9 @@ flask-jwt-extended = "^4.4.4"
pylint = "^2.15.10" pylint = "^2.15.10"
flask-simple-crypt = "^0.3.3" flask-simple-crypt = "^0.3.3"
cryptography = "^39.0.2" cryptography = "^39.0.2"
safety = "^2.3.5"
sqlalchemy = "^2.0.7"
marshmallow-sqlalchemy = "^0.29.0"
[tool.poetry.dev-dependencies] [tool.poetry.dev-dependencies]
pytest = "^7.1.2" pytest = "^7.1.2"
@ -126,7 +135,9 @@ filterwarnings = [
"ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3", "ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3",
"ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3", "ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3",
"ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3", "ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3",
"ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3" "ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3",
# SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py
'ignore:The usage of Box has been deprecated'
] ]
[tool.coverage.paths] [tool.coverage.paths]

View File

@ -44,8 +44,9 @@ class MyJSONEncoder(DefaultJSONProvider):
return obj.serialized return obj.serialized
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
return_dict = {} return_dict = {}
for row_key in obj.keys(): row_mapping = obj._mapping
row_value = obj[row_key] for row_key in row_mapping.keys():
row_value = row_mapping[row_key]
if hasattr(row_value, "serialized"): if hasattr(row_value, "serialized"):
return_dict.update(row_value.serialized) return_dict.update(row_value.serialized)
elif hasattr(row_value, "__dict__"): elif hasattr(row_value, "__dict__"):
@ -63,11 +64,19 @@ class MyJSONEncoder(DefaultJSONProvider):
return super().dumps(obj, **kwargs) return super().dumps(obj, **kwargs)
def start_scheduler( def start_scheduler(app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler) -> None:
app: flask.app.Flask, scheduler_class: BaseScheduler = BackgroundScheduler
) -> None:
"""Start_scheduler.""" """Start_scheduler."""
scheduler = scheduler_class() scheduler = scheduler_class()
# TODO: polling intervals for different jobs
polling_interval_in_seconds = app.config["SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS"]
user_input_required_polling_interval_in_seconds = app.config[
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS"
]
# TODO: add job to release locks to simplify other queries
# TODO: add job to delete completed entires
# TODO: add job to run old/low priority instances so they do not get drowned out
scheduler.add_job( scheduler.add_job(
BackgroundProcessingService(app).process_message_instances_with_app_context, BackgroundProcessingService(app).process_message_instances_with_app_context,
"interval", "interval",
@ -76,16 +85,27 @@ def start_scheduler(
scheduler.add_job( scheduler.add_job(
BackgroundProcessingService(app).process_waiting_process_instances, BackgroundProcessingService(app).process_waiting_process_instances,
"interval", "interval",
seconds=10, seconds=polling_interval_in_seconds,
) )
scheduler.add_job( scheduler.add_job(
BackgroundProcessingService(app).process_user_input_required_process_instances, BackgroundProcessingService(app).process_user_input_required_process_instances,
"interval", "interval",
seconds=120, seconds=user_input_required_polling_interval_in_seconds,
) )
scheduler.start() scheduler.start()
def should_start_scheduler(app: flask.app.Flask) -> bool:
if not app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]:
return False
# do not start the scheduler twice in flask debug mode but support code reloading
if app.config["ENV_IDENTIFIER"] != "local_development" or os.environ.get("WERKZEUG_RUN_MAIN") != "true":
return False
return True
class NoOpCipher: class NoOpCipher:
def encrypt(self, value: str) -> bytes: def encrypt(self, value: str) -> bytes:
return str.encode(value) return str.encode(value)
@ -103,9 +123,7 @@ def create_app() -> flask.app.Flask:
# variable, it will be one thing when we run flask db upgrade in the # variable, it will be one thing when we run flask db upgrade in the
# noxfile and another thing when the tests actually run. # noxfile and another thing when the tests actually run.
# instance_path is described more at https://flask.palletsprojects.com/en/2.1.x/config/ # instance_path is described more at https://flask.palletsprojects.com/en/2.1.x/config/
connexion_app = connexion.FlaskApp( connexion_app = connexion.FlaskApp(__name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")})
__name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")}
)
app = connexion_app.app app = connexion_app.app
app.config["CONNEXION_APP"] = connexion_app app.config["CONNEXION_APP"] = connexion_app
app.config["SESSION_TYPE"] = "filesystem" app.config["SESSION_TYPE"] = "filesystem"
@ -122,8 +140,7 @@ def create_app() -> flask.app.Flask:
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't # we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't
# need to continually keep asking for the same path. # need to continually keep asking for the same path.
origins_re = [ origins_re = [
r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]
for o in app.config["SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS"]
] ]
CORS(app, origins=origins_re, max_age=3600, supports_credentials=True) CORS(app, origins=origins_re, max_age=3600, supports_credentials=True)
@ -134,11 +151,7 @@ def create_app() -> flask.app.Flask:
app.json = MyJSONEncoder(app) app.json = MyJSONEncoder(app)
# do not start the scheduler twice in flask debug mode if should_start_scheduler(app):
if (
app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]
and os.environ.get("WERKZEUG_RUN_MAIN") != "true"
):
start_scheduler(app) start_scheduler(app)
configure_sentry(app) configure_sentry(app)
@ -176,13 +189,9 @@ def get_hacked_up_app_for_script() -> flask.app.Flask:
os.environ[flask_env_key] = "whatevs" os.environ[flask_env_key] = "whatevs"
if "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" not in os.environ: if "SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR" not in os.environ:
home = os.environ["HOME"] home = os.environ["HOME"]
full_process_model_path = ( full_process_model_path = f"{home}/projects/github/sartography/sample-process-models"
f"{home}/projects/github/sartography/sample-process-models"
)
if os.path.isdir(full_process_model_path): if os.path.isdir(full_process_model_path):
os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = ( os.environ["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path
full_process_model_path
)
else: else:
raise Exception(f"Could not find {full_process_model_path}") raise Exception(f"Could not find {full_process_model_path}")
app = create_app() app = create_app()
@ -226,28 +235,24 @@ def configure_sentry(app: flask.app.Flask) -> None:
return None return None
return event return event
sentry_errors_sample_rate = app.config.get( sentry_errors_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE")
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE"
)
if sentry_errors_sample_rate is None: if sentry_errors_sample_rate is None:
raise Exception( raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow")
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE is not set somehow"
)
sentry_traces_sample_rate = app.config.get( sentry_traces_sample_rate = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE")
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE"
)
if sentry_traces_sample_rate is None: if sentry_traces_sample_rate is None:
raise Exception( raise Exception("SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow")
"SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE is not set somehow"
) sentry_env_identifier = app.config["ENV_IDENTIFIER"]
if app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER"):
sentry_env_identifier = app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER")
sentry_configs = { sentry_configs = {
"dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"), "dsn": app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN"),
"integrations": [ "integrations": [
FlaskIntegration(), FlaskIntegration(),
], ],
"environment": app.config["ENV_IDENTIFIER"], "environment": sentry_env_identifier,
# sample_rate is the errors sample rate. we usually set it to 1 (100%) # sample_rate is the errors sample rate. we usually set it to 1 (100%)
# so we get all errors in sentry. # so we get all errors in sentry.
"sample_rate": float(sentry_errors_sample_rate), "sample_rate": float(sentry_errors_sample_rate),
@ -265,8 +270,6 @@ def configure_sentry(app: flask.app.Flask) -> None:
# but also we commented out profiling because it was causing segfaults (i guess it is marked experimental) # but also we commented out profiling because it was causing segfaults (i guess it is marked experimental)
profiles_sample_rate = 0 if sys.platform.startswith("win") else 1 profiles_sample_rate = 0 if sys.platform.startswith("win") else 1
if profiles_sample_rate > 0: if profiles_sample_rate > 0:
sentry_configs["_experiments"] = { sentry_configs["_experiments"] = {"profiles_sample_rate": profiles_sample_rate}
"profiles_sample_rate": profiles_sample_rate
}
sentry_sdk.init(**sentry_configs) sentry_sdk.init(**sentry_configs)

View File

@ -901,18 +901,24 @@ paths:
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
schema: schema:
type: string type: string
- name: all_tasks - name: most_recent_tasks_only
in: query in: query
required: false required: false
description: If true, this wil return all tasks associated with the process instance and not just user tasks. description: If true, this wil return only the most recent tasks.
schema: schema:
type: boolean type: boolean
- name: spiff_step - name: bpmn_process_guid
in: query in: query
required: false required: false
description: If set will return the tasks as they were during a specific step of execution. description: The guid of the bpmn process to get the tasks for.
schema: schema:
type: integer type: string
- name: to_task_guid
in: query
required: false
description: Get the tasks only up to the given guid.
schema:
type: string
get: get:
tags: tags:
- Process Instances - Process Instances
@ -948,18 +954,24 @@ paths:
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
schema: schema:
type: string type: string
- name: all_tasks - name: most_recent_tasks_only
in: query in: query
required: false required: false
description: If true, this wil return all tasks associated with the process instance and not just user tasks. description: If true, this wil return only the most recent tasks.
schema: schema:
type: boolean type: boolean
- name: spiff_step - name: bpmn_process_guid
in: query in: query
required: false required: false
description: If set will return the tasks as they were during a specific step of execution. description: The guid of the bpmn process to get the tasks for.
schema: schema:
type: integer type: string
- name: to_task_guid
in: query
required: false
description: Get the tasks only up to the given guid.
schema:
type: string
get: get:
tags: tags:
- Process Instances - Process Instances
@ -1164,7 +1176,7 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{to_task_guid}:
parameters: parameters:
- name: modified_process_model_identifier - name: modified_process_model_identifier
in: path in: path
@ -1178,12 +1190,12 @@ paths:
description: The unique id of an existing process instance. description: The unique id of an existing process instance.
schema: schema:
type: integer type: integer
- name: spiff_step - name: to_task_guid
in: query in: path
required: false required: true
description: Reset the process to this state description: Get the tasks only up to the given guid.
schema: schema:
type: integer type: string
post: post:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset
summary: Reset a process instance to an earlier step summary: Reset a process instance to an earlier step
@ -1239,9 +1251,16 @@ paths:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-instances/reports/columns: /process-instances/reports/columns:
parameters:
- name: process_model_identifier
in: query
required: false
description: The process model identifier to filter by
schema:
type: string
get: get:
operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list
summary: Returns all available columns for a process instance report. summary: Returns all available columns for a process instance report, including custom metadata
tags: tags:
- Process Instances - Process Instances
responses: responses:
@ -1526,6 +1545,25 @@ paths:
items: items:
$ref: "#/components/schemas/User" $ref: "#/components/schemas/User"
/users/exists/by-username:
post:
tags:
- Users
operationId: spiffworkflow_backend.routes.users_controller.user_exists_by_username
summary: Returns a true if user exists by username.
requestBody:
content:
application/json:
schema:
$ref: "#/components/schemas/User"
responses:
"200":
description: true if user exists
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/user-groups/for-current-user: /user-groups/for-current-user:
get: get:
tags: tags:
@ -1542,7 +1580,7 @@ paths:
items: items:
$ref: "#/components/schemas/Task" $ref: "#/components/schemas/Task"
/task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: /task-data/{modified_process_model_identifier}/{process_instance_id}/{task_guid}:
parameters: parameters:
- name: modified_process_model_identifier - name: modified_process_model_identifier
in: path in: path
@ -1556,15 +1594,15 @@ paths:
description: The unique id of an existing process instance. description: The unique id of an existing process instance.
schema: schema:
type: integer type: integer
- name: spiff_step - name: task_guid
in: path in: path
required: true required: true
description: If set will return the tasks as they were during a specific step of execution. description: The unique id of the task.
schema: schema:
type: integer type: string
get: get:
operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show
summary: Get task data for a single task in a spiff step. summary: Get task data for a single task.
tags: tags:
- Process Instances - Process Instances
responses: responses:
@ -1574,35 +1612,8 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/Task" $ref: "#/components/schemas/Task"
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The modified id of an existing process model
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
- name: task_id
in: path
required: true
description: The unique id of the task.
schema:
type: string
- name: spiff_step
in: query
required: false
description: If set will return the tasks as they were during a specific step of execution.
schema:
type: integer
put: put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update operationId: spiffworkflow_backend.routes.tasks_controller.task_data_update
summary: Update the task data for requested instance and task summary: Update the task data for requested instance and task
tags: tags:
- Process Instances - Process Instances
@ -1707,7 +1718,7 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}: /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_guid}:
parameters: parameters:
- name: modified_process_model_identifier - name: modified_process_model_identifier
in: path in: path
@ -1721,14 +1732,14 @@ paths:
description: The unique id of the process instance description: The unique id of the process instance
schema: schema:
type: string type: string
- name: task_id - name: task_guid
in: path in: path
required: true required: true
description: The unique id of the task. description: The unique id of the task.
schema: schema:
type: string type: string
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task operationId: spiffworkflow_backend.routes.tasks_controller.manual_complete_task
summary: Mark a task complete without executing it summary: Mark a task complete without executing it
tags: tags:
- Process Instances - Process Instances
@ -1807,9 +1818,9 @@ paths:
schema: schema:
$ref: "#/components/schemas/ServiceTask" $ref: "#/components/schemas/ServiceTask"
/tasks/{process_instance_id}/{task_id}: /tasks/{process_instance_id}/{task_guid}:
parameters: parameters:
- name: task_id - name: task_guid
in: path in: path
required: true required: true
description: The unique id of an existing process group. description: The unique id of an existing process group.
@ -1821,10 +1832,10 @@ paths:
description: The unique id of an existing process instance. description: The unique id of an existing process instance.
schema: schema:
type: integer type: integer
- name: terminate_loop - name: save_as_draft
in: query in: query
required: false required: false
description: Terminate the loop on a looping task description: Save the data to task but do not complete it.
schema: schema:
type: boolean type: boolean
get: get:
@ -2078,6 +2089,37 @@ paths:
schema: schema:
$ref: "#/components/schemas/Secret" $ref: "#/components/schemas/Secret"
/connector-proxy/type-ahead/{category}:
parameters:
- name: category
in: path
required: true
description: The category for the type-ahead search
schema:
type: string
- name: prefix
in: query
required: true
description: The prefix to search for
schema:
type: string
- name: limit
in: query
required: true
description: The maximum number of search results
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.connector_proxy_controller.type_ahead
summary: Return type ahead search results
tags:
- Type Ahead
responses:
"200":
description: We return type ahead search results
#content:
# - application/json
components: components:
securitySchemes: securitySchemes:
jwt: jwt:

View File

@ -30,13 +30,9 @@ def setup_database_uri(app: Flask) -> None:
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD") db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")
if db_pswd is None: if db_pswd is None:
db_pswd = "" db_pswd = ""
app.config["SQLALCHEMY_DATABASE_URI"] = ( app.config["SQLALCHEMY_DATABASE_URI"] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
)
else: else:
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get( app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI")
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
)
def load_config_file(app: Flask, env_config_module: str) -> None: def load_config_file(app: Flask, env_config_module: str) -> None:
@ -45,30 +41,20 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
app.config.from_object(env_config_module) app.config.from_object(env_config_module)
print(f"loaded config: {env_config_module}") print(f"loaded config: {env_config_module}")
except ImportStringError as exception: except ImportStringError as exception:
if ( if os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
os.environ.get("SPIFFWORKFLOW_BACKEND_TERRAFORM_DEPLOYED_ENVIRONMENT") raise ModuleNotFoundError(f"Cannot find config module: {env_config_module}") from exception
!= "true"
):
raise ModuleNotFoundError(
f"Cannot find config module: {env_config_module}"
) from exception
def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None: def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
tenant_specific_fields = app.config.get( tenant_specific_fields = app.config.get("SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS")
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"
)
if tenant_specific_fields is None or tenant_specific_fields == "": if tenant_specific_fields is None or tenant_specific_fields == "":
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = [] app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = []
else: else:
app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = ( app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"] = tenant_specific_fields.split(",")
tenant_specific_fields.split(",")
)
if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3: if len(app.config["SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS"]) > 3:
raise ConfigurationError( raise ConfigurationError(
"SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a" "SPIFFWORKFLOW_BACKEND_OPEN_ID_TENANT_SPECIFIC_FIELDS can have a maximum of 3 fields"
" maximum of 3 fields"
) )
@ -80,9 +66,7 @@ def setup_config(app: Flask) -> None:
except OSError: except OSError:
pass pass
app.config["ENV_IDENTIFIER"] = os.environ.get( app.config["ENV_IDENTIFIER"] = os.environ.get("SPIFFWORKFLOW_BACKEND_ENV", "local_development")
"SPIFFWORKFLOW_BACKEND_ENV", "local_development"
)
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
load_config_file(app, "spiffworkflow_backend.config.default") load_config_file(app, "spiffworkflow_backend.config.default")
@ -99,10 +83,7 @@ def setup_config(app: Flask) -> None:
# This allows config/testing.py or instance/config.py to override the default config # This allows config/testing.py or instance/config.py to override the default config
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing": if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
app.config.from_pyfile("config/testing.py", silent=True) app.config.from_pyfile("config/testing.py", silent=True)
elif ( elif "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "unit_testing":
"ENV_IDENTIFIER" in app.config
and app.config["ENV_IDENTIFIER"] == "unit_testing"
):
app.config.from_pyfile("config/unit_testing.py", silent=True) app.config.from_pyfile("config/unit_testing.py", silent=True)
else: else:
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True) app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
@ -125,15 +106,10 @@ def setup_config(app: Flask) -> None:
app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True) app.config.from_pyfile(os.path.join("config", "secrets.py"), silent=True)
if app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] is None: if app.config["SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"] is None:
raise ConfigurationError( raise ConfigurationError("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set")
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR config must be set"
)
if app.config["FLASK_SESSION_SECRET_KEY"] is None: if app.config["FLASK_SESSION_SECRET_KEY"] is None:
raise KeyError( raise KeyError("Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY")
"Cannot find the secret_key from the environment. Please set"
" FLASK_SESSION_SECRET_KEY"
)
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY") app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")

View File

@ -8,9 +8,7 @@ from os import environ
FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY") FLASK_SESSION_SECRET_KEY = environ.get("FLASK_SESSION_SECRET_KEY")
SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get( SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR = environ.get("SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR")
"SPIFFWORKFLOW_BACKEND_BPMN_SPEC_ABSOLUTE_DIR"
)
cors_allow_all = "*" cors_allow_all = "*"
SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split( SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
r",\s*", r",\s*",
@ -18,19 +16,35 @@ SPIFFWORKFLOW_BACKEND_CORS_ALLOW_ORIGINS = re.split(
) )
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
== "true" )
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS = (
environ.get("SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_ALLOW_OPTIMISTIC_CHECKS", default="true") == "true"
)
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS = int(
environ.get(
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_POLLING_INTERVAL_IN_SECONDS",
default="10",
)
)
SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS = int(
environ.get(
"SPIFFWORKFLOW_BACKEND_BACKGROUND_SCHEDULER_USER_INPUT_REQUIRED_POLLING_INTERVAL_IN_SECONDS",
default="120",
)
) )
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get( SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get(
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001" "SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001"
) )
SPIFFWORKFLOW_BACKEND_URL = environ.get( SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000")
"SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000"
)
# service task connector proxy # service task connector proxy
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get( SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004" "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004"
) )
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL",
default="https://emehvlxpwodjawtgi7ctkbvpse0vmaow.lambda-url.us-east-1.on.aws",
)
# Open ID server # Open ID server
# use "http://localhost:7000/openid" for running with simple openid # use "http://localhost:7000/openid" for running with simple openid
@ -62,18 +76,12 @@ SPIFFWORKFLOW_BACKEND_ENCRYPTION_LIB = environ.get(
default="no_op_cipher", default="no_op_cipher",
) )
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = ( SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get("SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME")
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"
)
# Sentry Configuration # Sentry Configuration
SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get( SPIFFWORKFLOW_BACKEND_SENTRY_DSN = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default="")
"SPIFFWORKFLOW_BACKEND_SENTRY_DSN", default=""
)
SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE = environ.get( SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE = environ.get(
"SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE", default="1" "SPIFFWORKFLOW_BACKEND_SENTRY_ERRORS_SAMPLE_RATE", default="1"
) # send all errors ) # send all errors
@ -83,43 +91,29 @@ SPIFFWORKFLOW_BACKEND_SENTRY_TRACES_SAMPLE_RATE = environ.get(
SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG = environ.get( SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG = environ.get(
"SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG", default=None "SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG", default=None
) )
SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get( SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None)
"SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG", default=None SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER = environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_ENV_IDENTIFIER", default=None)
)
SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED = ( SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED = (
environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false") environ.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROFILING_ENABLED", default="false") == "true"
== "true"
) )
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info")
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
)
# When a user clicks on the `Publish` button, this is the default branch this server merges into. # When a user clicks on the `Publish` button, this is the default branch this server merges into.
# I.e., dev server could have `staging` here. Staging server might have `production` here. # I.e., dev server could have `staging` here. Staging server might have `production` here.
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH")
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"
)
# This is the branch that the app automatically commits to every time the user clicks the save button # This is the branch that the app automatically commits to every time the user clicks the save button
# or otherwise changes a process model. # or otherwise changes a process model.
# If publishing is enabled, the contents of this "staging area" / "scratch pad" / WIP spot will be used # If publishing is enabled, the contents of this "staging area" / "scratch pad" / WIP spot will be used
# as the relevant contents for process model that the user wants to publish. # as the relevant contents for process model that the user wants to publish.
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get( SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH")
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH" SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get("SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL")
)
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL"
)
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = ( SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = (
environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true" environ.get("SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE", default="false") == "true"
) )
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME") SPIFFWORKFLOW_BACKEND_GIT_USERNAME = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USERNAME")
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get( SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = environ.get("SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL")
"SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL" SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get("SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None)
)
SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET = environ.get(
"SPIFFWORKFLOW_BACKEND_GITHUB_WEBHOOK_SECRET", default=None
)
SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH = environ.get( SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH", default=None "SPIFFWORKFLOW_BACKEND_GIT_SSH_PRIVATE_KEY_PATH", default=None
) )
@ -129,25 +123,25 @@ SPIFFWORKFLOW_BACKEND_DATABASE_TYPE = environ.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_TYPE", default="mysql" "SPIFFWORKFLOW_BACKEND_DATABASE_TYPE", default="mysql"
) # can also be sqlite, postgres ) # can also be sqlite, postgres
# Overide above with specific sqlalchymy connection string. # Overide above with specific sqlalchymy connection string.
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get( SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None)
"SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None
)
SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get( SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID = environ.get(
"SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID", "SPIFFWORKFLOW_BACKEND_SYSTEM_NOTIFICATION_PROCESS_MODEL_MESSAGE_ID",
default="Message_SystemMessageNotification", default="Message_SystemMessageNotification",
) )
SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int( SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS = int(
environ.get( environ.get("SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600")
"SPIFFWORKFLOW_BACKEND_ALLOW_CONFISCATING_LOCK_AFTER_SECONDS", default="600"
)
) )
SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get( SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP = environ.get("SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody")
"SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP", default="everybody"
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get(
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND", default="greedy"
)
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get(
"SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy"
) )
# this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration # this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration
SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get( SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD = environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None)
"SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD", default=None
)

View File

@ -10,6 +10,5 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
) )
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
== "true"
) )

View File

@ -5,19 +5,14 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="local_development.yml" "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="local_development.yml"
) )
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug")
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
)
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
== "true"
) )
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
default="https://github.com/sartography/sample-process-models.git", default="https://github.com/sartography/sample-process-models.git",
) )
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer" SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = ( SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
)

View File

@ -5,10 +5,6 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml" "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
) )
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org" SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = "https://qa2.spiffworkflow.org"
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = ( SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = "https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
"https://qa2.spiffworkflow.org/keycloak/realms/spiffworkflow"
)
SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api" SPIFFWORKFLOW_BACKEND_URL = "https://qa2.spiffworkflow.org/api"
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = ( SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = "https://qa2.spiffworkflow.org/connector-proxy"
"https://qa2.spiffworkflow.org/connector-proxy"
)

View File

@ -3,12 +3,9 @@ from os import environ
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"] environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = ( SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = (
f"https://keycloak.{environment_identifier_for_this_config_file_only}" f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/sartography"
".spiffworkflow.org/realms/sartography"
)
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main"
) )
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="main")
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",
default="https://github.com/sartography/sartography-process-models.git", default="https://github.com/sartography/sartography-process-models.git",

View File

@ -1,9 +1,7 @@
"""Staging.""" """Staging."""
from os import environ from os import environ
SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get( SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH = environ.get("SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging")
"SPIFFWORKFLOW_BACKEND_GIT_SOURCE_BRANCH", default="staging"
)
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get( SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="main" "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH", default="main"
) )

View File

@ -6,36 +6,29 @@ environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEN
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = True
SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer" SPIFFWORKFLOW_BACKEND_GIT_USERNAME = "sartography-automated-committer"
SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = ( SPIFFWORKFLOW_BACKEND_GIT_USER_EMAIL = f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
f"{SPIFFWORKFLOW_BACKEND_GIT_USERNAME}@users.noreply.github.com"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
default="terraform_deployed_environment.yml", default="terraform_deployed_environment.yml",
) )
SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = ( SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER = (
environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") environ.get("SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER", default="false") == "true"
== "true"
) )
SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = environ.get( SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL", "SPIFFWORKFLOW_BACKEND_OPEN_ID_SERVER_URL",
default=( default=(
f"https://keycloak.{environment_identifier_for_this_config_file_only}" f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/spiffworkflow"
".spiffworkflow.org/realms/spiffworkflow"
), ),
) )
SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = ( SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = (
f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org" f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
) )
SPIFFWORKFLOW_BACKEND_URL = ( SPIFFWORKFLOW_BACKEND_URL = f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
)
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = ( SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = (
f"https://connector-proxy.{environment_identifier_for_this_config_file_only}" f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
".spiffworkflow.org"
) )
SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get( SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL", "SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_CLONE_URL",

View File

@ -4,17 +4,13 @@ from os import environ
TESTING = True TESTING = True
SECRET_KEY = "the_secret_key" SECRET_KEY = "the_secret_key"
SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = ( SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true"
)
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="unit_testing.yml" "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="unit_testing.yml"
) )
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get("SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug")
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
)
SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE = False
# NOTE: set this here since nox shoves tests and src code to # NOTE: set this here since nox shoves tests and src code to

View File

@ -202,20 +202,13 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
if isinstance(exception, ApiError): if isinstance(exception, ApiError):
current_app.logger.info( current_app.logger.info(
f"Sending ApiError exception to sentry: {exception} with error code" f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}"
f" {exception.error_code}"
) )
organization_slug = current_app.config.get( organization_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG")
"SPIFFWORKFLOW_BACKEND_SENTRY_ORGANIZATION_SLUG" project_slug = current_app.config.get("SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG")
)
project_slug = current_app.config.get(
"SPIFFWORKFLOW_BACKEND_SENTRY_PROJECT_SLUG"
)
if organization_slug and project_slug: if organization_slug and project_slug:
sentry_link = ( sentry_link = f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
)
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception # !!!NOTE!!!: do this after sentry stuff since calling logger.exception
# seems to break the sentry sdk context where we no longer get back # seems to break the sentry sdk context where we no longer get back

View File

@ -2,6 +2,7 @@
import time import time
import sqlalchemy import sqlalchemy
from sqlalchemy.sql import text
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
@ -9,7 +10,7 @@ from spiffworkflow_backend.models.db import db
def try_to_connect(start_time: float) -> None: def try_to_connect(start_time: float) -> None:
"""Try to connect.""" """Try to connect."""
try: try:
db.first_or_404("select 1") # type: ignore db.first_or_404(text("select 1")) # type: ignore
except sqlalchemy.exc.DatabaseError as exception: except sqlalchemy.exc.DatabaseError as exception:
if time.time() - start_time > 15: if time.time() - start_time > 15:
raise exception raise exception

View File

@ -41,10 +41,6 @@ from spiffworkflow_backend.models.process_instance_report import (
) # noqa: F401 ) # noqa: F401
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401 from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
from spiffworkflow_backend.models.spiff_step_details import (
SpiffStepDetailsModel,
) # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel # noqa: F401
from spiffworkflow_backend.models.group import GroupModel # noqa: F401 from spiffworkflow_backend.models.group import GroupModel # noqa: F401
from spiffworkflow_backend.models.process_instance_metadata import ( from spiffworkflow_backend.models.process_instance_metadata import (
@ -66,5 +62,8 @@ from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.bpmn_process_definition_relationship import ( from spiffworkflow_backend.models.bpmn_process_definition_relationship import (
BpmnProcessDefinitionRelationshipModel, BpmnProcessDefinitionRelationshipModel,
) # noqa: F401 ) # noqa: F401
from spiffworkflow_backend.models.process_instance_queue import (
ProcessInstanceQueueModel,
) # noqa: F401
add_listeners() add_listeners()

View File

@ -1,31 +1,43 @@
from __future__ import annotations from __future__ import annotations
from sqlalchemy import ForeignKey from dataclasses import dataclass
from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
class BpmnProcessNotFoundError(Exception):
pass
# properties_json attributes: # properties_json attributes:
# "last_task", # guid generated by spiff # "last_task", # guid generated by spiff
# "root", # guid generated by spiff # "root", # guid generated by spiff
# "success", # boolean # "success", # boolean
# "bpmn_messages", # if top-level process # "bpmn_messages", # if top-level process
# "correlations", # if top-level process # "correlations", # if top-level process
@dataclass
class BpmnProcessModel(SpiffworkflowBaseDBModel): class BpmnProcessModel(SpiffworkflowBaseDBModel):
__tablename__ = "bpmn_process" __tablename__ = "bpmn_process"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
guid: str | None = db.Column(db.String(36), nullable=True, unique=True, index=True) guid: str | None = db.Column(db.String(36), nullable=True, unique=True)
parent_process_id: int | None = db.Column( bpmn_process_definition_id: int = db.Column(
ForeignKey("bpmn_process.id"), nullable=True ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore
) )
bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
top_level_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True)
direct_parent_process_id: int | None = db.Column(ForeignKey("bpmn_process.id"), nullable=True, index=True)
properties_json: dict = db.Column(db.JSON, nullable=False) properties_json: dict = db.Column(db.JSON, nullable=False)
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True) json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
# subprocess or top_level_process tasks = relationship("TaskModel", back_populates="bpmn_process", cascade="delete") # type: ignore
# process_type: str = db.Column(db.String(30), nullable=False)
# FIXME: find out how to set this but it'd be cool # FIXME: find out how to set this but it'd be cool
start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) start_in_seconds: float = db.Column(db.DECIMAL(17, 6))

View File

@ -1,5 +1,7 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
# #
# each subprocess will have its own row in this table. # each subprocess will have its own row in this table.
# there is a join table to link them together: bpmn_process_definition_relationship # there is a join table to link them together: bpmn_process_definition_relationship
@dataclass
class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel): class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel):
__tablename__ = "bpmn_process_definition" __tablename__ = "bpmn_process_definition"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
@ -18,17 +21,13 @@ class BpmnProcessDefinitionModel(SpiffworkflowBaseDBModel):
# note that a call activity is its own row in this table, with its own hash, # note that a call activity is its own row in this table, with its own hash,
# and therefore it only gets stored once per version, and can be reused # and therefore it only gets stored once per version, and can be reused
# by multiple calling processes. # by multiple calling processes.
hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True) hash: str = db.Column(db.String(255), nullable=False, unique=True)
bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True) bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True)
bpmn_name: str = db.Column(db.String(255), nullable=True, index=True)
properties_json: dict = db.Column(db.JSON, nullable=False) properties_json: dict = db.Column(db.JSON, nullable=False)
# process or subprocess
# FIXME: will probably ignore for now since we do not strictly need it
# make this nullable false and index it once we actually start using it
type: str = db.Column(db.String(32), nullable=True)
# TODO: remove these from process_instance # TODO: remove these from process_instance
bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_type: str = db.Column(db.String(50))
bpmn_version_control_identifier: str = db.Column(db.String(255)) bpmn_version_control_identifier: str = db.Column(db.String(255))

View File

@ -1,5 +1,7 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
@ -10,6 +12,7 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@dataclass
class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel): class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel):
__tablename__ = "bpmn_process_definition_relationship" __tablename__ = "bpmn_process_definition_relationship"
__table_args__ = ( __table_args__ = (
@ -22,8 +25,8 @@ class BpmnProcessDefinitionRelationshipModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
bpmn_process_definition_parent_id: int = db.Column( bpmn_process_definition_parent_id: int = db.Column(
ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore
) )
bpmn_process_definition_child_id: int = db.Column( bpmn_process_definition_child_id: int = db.Column(
ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore
) )

View File

@ -18,7 +18,7 @@ class CorrelationPropertyCache(SpiffworkflowBaseDBModel):
__tablename__ = "correlation_property_cache" __tablename__ = "correlation_property_cache"
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
name: str = db.Column(db.String(50), nullable=False) name: str = db.Column(db.String(50), nullable=False, index=True)
message_name: str = db.Column(db.String(50), nullable=False) message_name: str = db.Column(db.String(50), nullable=False, index=True)
process_model_id: str = db.Column(db.String(255), nullable=False) process_model_id: str = db.Column(db.String(255), nullable=False)
retrieval_expression: str = db.Column(db.String(255)) retrieval_expression: str = db.Column(db.String(255))

View File

@ -39,16 +39,12 @@ class SpiffworkflowBaseDBModel(db.Model): # type: ignore
children.append(subclass) children.append(subclass)
return result return result
def validate_enum_field( def validate_enum_field(self, key: str, value: Any, enum_variable: enum.EnumMeta) -> Any:
self, key: str, value: Any, enum_variable: enum.EnumMeta
) -> Any:
"""Validate_enum_field.""" """Validate_enum_field."""
try: try:
m_type = getattr(enum_variable, value, None) m_type = getattr(enum_variable, value, None)
except Exception as e: except Exception as e:
raise ValueError( raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}") from e
f"{self.__class__.__name__}: invalid {key}: {value}"
) from e
if m_type is None: if m_type is None:
raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}") raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}")

View File

@ -126,6 +126,4 @@ class FileSchema(Schema):
"process_model_id", "process_model_id",
] ]
unknown = INCLUDE unknown = INCLUDE
references = marshmallow.fields.List( references = marshmallow.fields.List(marshmallow.fields.Nested("SpecReferenceSchema"))
marshmallow.fields.Nested("SpecReferenceSchema")
)

View File

@ -26,13 +26,11 @@ class GroupModel(SpiffworkflowBaseDBModel):
__table_args__ = {"extend_existing": True} __table_args__ = {"extend_existing": True}
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(255)) name = db.Column(db.String(255), index=True)
identifier = db.Column(db.String(255)) identifier = db.Column(db.String(255), index=True)
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
user_group_assignments_waiting = relationship( # type: ignore user_group_assignments_waiting = relationship("UserGroupAssignmentWaitingModel", cascade="delete") # type: ignore
"UserGroupAssignmentWaitingModel", cascade="delete"
)
users = relationship( # type: ignore users = relationship( # type: ignore
"UserModel", "UserModel",
viewonly=True, viewonly=True,

View File

@ -12,6 +12,7 @@ from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -29,16 +30,14 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column( process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
) )
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id)) lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id), index=True)
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
completed_by_user = relationship( completed_by_user = relationship("UserModel", foreign_keys=[completed_by_user_id], viewonly=True)
"UserModel", foreign_keys=[completed_by_user_id], viewonly=True
)
actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) # type: ignore actual_owner_id: int = db.Column(ForeignKey(UserModel.id), index=True) # type: ignore
# actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
form_file_name: str | None = db.Column(db.String(50)) form_file_name: str | None = db.Column(db.String(50))
@ -47,6 +46,8 @@ class HumanTaskModel(SpiffworkflowBaseDBModel):
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
# task_id came first which is why it's a string and task_model_id is the int and foreignkey
task_model_id: int = db.Column(ForeignKey(TaskModel.id), nullable=True, index=True) # type: ignore
task_id: str = db.Column(db.String(50)) task_id: str = db.Column(db.String(50))
task_name: str = db.Column(db.String(255)) task_name: str = db.Column(db.String(255))
task_title: str = db.Column(db.String(50)) task_title: str = db.Column(db.String(50))

View File

@ -27,9 +27,7 @@ class HumanTaskUserModel(SpiffworkflowBaseDBModel):
) )
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
human_task_id = db.Column( human_task_id = db.Column(ForeignKey(HumanTaskModel.id), nullable=False, index=True) # type: ignore
ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
)
user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
human_task = relationship(HumanTaskModel) human_task = relationship(HumanTaskModel, back_populates="human_task_users")

View File

@ -4,6 +4,10 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
class JsonDataModelNotFoundError(Exception):
pass
# delta algorithm <- just to save it for when we want to try to implement it: # delta algorithm <- just to save it for when we want to try to implement it:
# a = {"hey": { "hey2": 2, "hey3": 3, "hey6": 7 }, "hey30": 3, "hey40": 4} # a = {"hey": { "hey2": 2, "hey3": 3, "hey6": 7 }, "hey30": 3, "hey40": 4}
# b = {"hey": { "hey2": 4, "hey5": 3 }, "hey20": 2, "hey30": 3} # b = {"hey": { "hey2": 4, "hey5": 3 }, "hey20": 2, "hey30": 3}
@ -25,5 +29,16 @@ class JsonDataModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
# this is a sha256 hash of spec and serializer_version # this is a sha256 hash of spec and serializer_version
hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True) hash: str = db.Column(db.String(255), nullable=False, unique=True)
data: dict = db.Column(db.JSON, nullable=False) data: dict = db.Column(db.JSON, nullable=False)
@classmethod
def find_object_by_hash(cls, hash: str) -> JsonDataModel:
json_data_model: JsonDataModel | None = JsonDataModel.query.filter_by(hash=hash).first()
if json_data_model is None:
raise JsonDataModelNotFoundError(f"Could not find a json data model entry with hash: {hash}")
return json_data_model
@classmethod
def find_data_dict_by_hash(cls, hash: str) -> dict:
return cls.find_object_by_hash(hash).data

View File

@ -47,15 +47,15 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
__tablename__ = "message_instance" __tablename__ = "message_instance"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True) # type: ignore process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True, index=True) # type: ignore
name: str = db.Column(db.String(255)) name: str = db.Column(db.String(255))
message_type: str = db.Column(db.String(20), nullable=False) message_type: str = db.Column(db.String(20), nullable=False)
# Only Send Messages have a payload # Only Send Messages have a payload
payload: dict = db.Column(db.JSON) payload: dict = db.Column(db.JSON)
# The correlation keys of the process at the time the message was created. # The correlation keys of the process at the time the message was created.
correlation_keys: dict = db.Column(db.JSON) correlation_keys: dict = db.Column(db.JSON)
status: str = db.Column(db.String(20), nullable=False, default="ready") status: str = db.Column(db.String(20), nullable=False, default="ready", index=True)
user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) # type: ignore user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
user = relationship("UserModel") user = relationship("UserModel")
counterpart_id: int = db.Column( counterpart_id: int = db.Column(
db.Integer db.Integer
@ -63,9 +63,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
failure_cause: str = db.Column(db.Text()) failure_cause: str = db.Column(db.Text())
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
correlation_rules = relationship( correlation_rules = relationship("MessageInstanceCorrelationRuleModel", back_populates="message_instance")
"MessageInstanceCorrelationRuleModel", back_populates="message_instance"
)
@validates("message_type") @validates("message_type")
def validate_message_type(self, key: str, value: Any) -> Any: def validate_message_type(self, key: str, value: Any) -> Any:
@ -94,10 +92,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
return False return False
if not self.is_receive(): if not self.is_receive():
return False return False
if ( if isinstance(self.correlation_keys, dict) and self.correlation_keys == other.correlation_keys:
isinstance(self.correlation_keys, dict)
and self.correlation_keys == other.correlation_keys
):
# We know we have a match, and we can just return if we don't have to figure out the key # We know we have a match, and we can just return if we don't have to figure out the key
return True return True
@ -107,9 +102,7 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
# Loop over the receives' correlation keys - if any of the keys fully match, then we match. # Loop over the receives' correlation keys - if any of the keys fully match, then we match.
for expected_values in self.correlation_keys.values(): for expected_values in self.correlation_keys.values():
if self.payload_matches_expected_values( if self.payload_matches_expected_values(other.payload, expected_values, expression_engine):
other.payload, expected_values, expression_engine
):
return True return True
return False return False
@ -128,23 +121,17 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
"""Compares the payload of a 'send' message against a single correlation key's expected values.""" """Compares the payload of a 'send' message against a single correlation key's expected values."""
for correlation_key in self.correlation_rules: for correlation_key in self.correlation_rules:
expected_value = expected_values.get(correlation_key.name, None) expected_value = expected_values.get(correlation_key.name, None)
if ( if expected_value is None: # This key is not required for this instance to match.
expected_value is None
): # This key is not required for this instance to match.
continue continue
try: try:
result = expression_engine._evaluate( result = expression_engine._evaluate(correlation_key.retrieval_expression, payload)
correlation_key.retrieval_expression, payload
)
except Exception as e: except Exception as e:
# the failure of a payload evaluation may not mean that matches for these # the failure of a payload evaluation may not mean that matches for these
# message instances can't happen with other messages. So don't error up. # message instances can't happen with other messages. So don't error up.
# fixme: Perhaps log some sort of error. # fixme: Perhaps log some sort of error.
current_app.logger.warning( current_app.logger.warning(
"Error evaluating correlation key when comparing send and receive" "Error evaluating correlation key when comparing send and receive messages."
" messages." + f"Expression {correlation_key.retrieval_expression} failed with the error "
+ f"Expression {correlation_key.retrieval_expression} failed with"
" the error "
+ str(e) + str(e)
) )
return False return False
@ -168,7 +155,4 @@ def ensure_failure_cause_is_set_if_message_instance_failed(
for instance in session.new: for instance in session.new:
if isinstance(instance, MessageInstanceModel): if isinstance(instance, MessageInstanceModel):
if instance.status == "failed" and instance.failure_cause is None: if instance.status == "failed" and instance.failure_cause is None:
raise ValueError( raise ValueError(f"{instance.__class__.__name__}: failure_cause must be set if status is failed")
f"{instance.__class__.__name__}: failure_cause must be set if"
" status is failed"
)

View File

@ -29,13 +29,9 @@ class MessageInstanceCorrelationRuleModel(SpiffworkflowBaseDBModel):
) )
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
message_instance_id = db.Column( message_instance_id = db.Column(ForeignKey(MessageInstanceModel.id), nullable=False, index=True) # type: ignore
ForeignKey(MessageInstanceModel.id), nullable=False, index=True # type: ignore name: str = db.Column(db.String(50), nullable=False, index=True)
)
name: str = db.Column(db.String(50), nullable=False)
retrieval_expression: str = db.Column(db.String(255)) retrieval_expression: str = db.Column(db.String(255))
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
message_instance = relationship( message_instance = relationship("MessageInstanceModel", back_populates="correlation_rules")
"MessageInstanceModel", back_populates="correlation_rules"
)

View File

@ -9,7 +9,7 @@ class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
__tablename__ = "message_triggerable_process_model" __tablename__ = "message_triggerable_process_model"
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
message_name: str = db.Column(db.String(255)) message_name: str = db.Column(db.String(255), index=True)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True) process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -46,10 +46,8 @@ class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
), ),
) )
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False) principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False, index=True)
permission_target_id = db.Column( permission_target_id = db.Column(ForeignKey(PermissionTargetModel.id), nullable=False, index=True) # type: ignore
ForeignKey(PermissionTargetModel.id), nullable=False # type: ignore
)
grant_type = db.Column(db.String(50), nullable=False) grant_type = db.Column(db.String(50), nullable=False)
permission = db.Column(db.String(50), nullable=False) permission = db.Column(db.String(50), nullable=False)

View File

@ -35,7 +35,5 @@ class PermissionTargetModel(SpiffworkflowBaseDBModel):
def validate_uri(self, key: str, value: str) -> str: def validate_uri(self, key: str, value: str) -> str:
"""Validate_uri.""" """Validate_uri."""
if re.search(r"%.", value): if re.search(r"%.", value):
raise InvalidPermissionTargetUriError( raise InvalidPermissionTargetUriError(f"Wildcard must appear at end: {value}")
f"Wildcard must appear at end: {value}"
)
return value return value

View File

@ -26,9 +26,7 @@ class ProcessGroup:
description: str | None = None description: str | None = None
display_order: int | None = 0 display_order: int | None = 0
admin: bool | None = False admin: bool | None = False
process_models: list[ProcessModelInfo] = field( process_models: list[ProcessModelInfo] = field(default_factory=list[ProcessModelInfo])
default_factory=list[ProcessModelInfo]
)
process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"]) process_groups: list[ProcessGroup] = field(default_factory=list["ProcessGroup"])
parent_groups: list[ProcessGroupLite] | None = None parent_groups: list[ProcessGroupLite] | None = None
@ -74,17 +72,13 @@ class ProcessGroupSchema(Schema):
] ]
process_models = marshmallow.fields.List( process_models = marshmallow.fields.List(
marshmallow.fields.Nested( marshmallow.fields.Nested("ProcessModelInfoSchema", dump_only=True, required=False)
"ProcessModelInfoSchema", dump_only=True, required=False
)
) )
process_groups = marshmallow.fields.List( process_groups = marshmallow.fields.List(
marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False) marshmallow.fields.Nested("ProcessGroupSchema", dump_only=True, required=False)
) )
@post_load @post_load
def make_process_group( def make_process_group(self, data: dict[str, str | bool | int], **kwargs: dict) -> ProcessGroup:
self, data: dict[str, str | bool | int], **kwargs: dict
) -> ProcessGroup:
"""Make_process_group.""" """Make_process_group."""
return ProcessGroup(**data) # type: ignore return ProcessGroup(**data) # type: ignore

View File

@ -10,7 +10,6 @@ from marshmallow import Schema
from marshmallow_enum import EnumField # type: ignore from marshmallow_enum import EnumField # type: ignore
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from sqlalchemy.orm import validates from sqlalchemy.orm import validates
@ -54,32 +53,28 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceModel.""" """ProcessInstanceModel."""
__tablename__ = "process_instance" __tablename__ = "process_instance"
__allow_unmapped__ = True
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
process_model_identifier: str = db.Column( process_model_identifier: str = db.Column(db.String(255), nullable=False, index=True)
db.String(255), nullable=False, index=True process_model_display_name: str = db.Column(db.String(255), nullable=False, index=True)
) process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
process_model_display_name: str = db.Column(
db.String(255), nullable=False, index=True
)
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
process_initiator = relationship("UserModel") process_initiator = relationship("UserModel")
bpmn_process_definition_id: int | None = db.Column( bpmn_process_definition_id: int | None = db.Column(
ForeignKey(BpmnProcessDefinitionModel.id), nullable=True # type: ignore ForeignKey(BpmnProcessDefinitionModel.id), nullable=True, index=True # type: ignore
) )
bpmn_process_definition = relationship(BpmnProcessDefinitionModel) bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
bpmn_process_id: int | None = db.Column( bpmn_process_id: int | None = db.Column(ForeignKey(BpmnProcessModel.id), nullable=True, index=True) # type: ignore
ForeignKey(BpmnProcessModel.id), nullable=True # type: ignore bpmn_process = relationship(BpmnProcessModel, cascade="delete")
) tasks = relationship("TaskModel", cascade="delete") # type: ignore
bpmn_process = relationship(BpmnProcessModel) process_instance_events = relationship("ProcessInstanceEventModel", cascade="delete") # type: ignore
spiff_serializer_version = db.Column(db.String(50), nullable=True) spiff_serializer_version = db.Column(db.String(50), nullable=True)
active_human_tasks = relationship( active_human_tasks = relationship(
"HumanTaskModel", "HumanTaskModel",
primaryjoin=( primaryjoin=(
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id," "and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id, HumanTaskModel.completed == False)"
" HumanTaskModel.completed == False)"
), ),
) # type: ignore ) # type: ignore
@ -93,20 +88,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"ProcessInstanceMetadataModel", "ProcessInstanceMetadataModel",
cascade="delete", cascade="delete",
) # type: ignore ) # type: ignore
process_instance_queue = relationship(
"ProcessInstanceQueueModel",
cascade="delete",
) # type: ignore
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore start_in_seconds: int | None = db.Column(db.Integer, index=True)
start_in_seconds: int | None = db.Column(db.Integer) end_in_seconds: int | None = db.Column(db.Integer, index=True)
end_in_seconds: int | None = db.Column(db.Integer)
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)
status: str = db.Column(db.String(50)) status: str = db.Column(db.String(50), index=True)
bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_type: str = db.Column(db.String(50))
bpmn_version_control_identifier: str = db.Column(db.String(255)) bpmn_version_control_identifier: str = db.Column(db.String(255))
spiff_step: int = db.Column(db.Integer)
locked_by: str | None = db.Column(db.String(80))
locked_at_in_seconds: int | None = db.Column(db.Integer)
bpmn_xml_file_contents: str | None = None bpmn_xml_file_contents: str | None = None
process_model_with_diagram_identifier: str | None = None process_model_with_diagram_identifier: str | None = None
@ -127,7 +121,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"bpmn_xml_file_contents": self.bpmn_xml_file_contents, "bpmn_xml_file_contents": self.bpmn_xml_file_contents,
"bpmn_version_control_identifier": self.bpmn_version_control_identifier, "bpmn_version_control_identifier": self.bpmn_version_control_identifier,
"bpmn_version_control_type": self.bpmn_version_control_type, "bpmn_version_control_type": self.bpmn_version_control_type,
"spiff_step": self.spiff_step,
"process_initiator_username": self.process_initiator.username, "process_initiator_username": self.process_initiator.username,
} }
@ -244,9 +237,7 @@ class ProcessInstanceApiSchema(Schema):
next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False) next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False)
@marshmallow.post_load @marshmallow.post_load
def make_process_instance( def make_process_instance(self, data: dict[str, Any], **kwargs: dict) -> ProcessInstanceApi:
self, data: dict[str, Any], **kwargs: dict
) -> ProcessInstanceApi:
"""Make_process_instance.""" """Make_process_instance."""
keys = [ keys = [
"id", "id",

View File

@ -0,0 +1,42 @@
from __future__ import annotations
from typing import Any
from sqlalchemy import ForeignKey
from sqlalchemy.orm import validates
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.user import UserModel
# event types take the form [SUBJECT]_[PAST_TENSE_VERB] since subject is not always the same.
class ProcessInstanceEventType(SpiffEnum):
process_instance_resumed = "process_instance_resumed"
process_instance_rewound_to_task = "process_instance_rewound_to_task"
process_instance_suspended = "process_instance_suspended"
process_instance_terminated = "process_instance_terminated"
task_completed = "task_completed"
task_data_edited = "task_data_edited"
task_executed_manually = "task_executed_manually"
task_failed = "task_failed"
task_skipped = "task_skipped"
class ProcessInstanceEventModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance_event"
id: int = db.Column(db.Integer, primary_key=True)
# use task guid so we can bulk insert without worrying about whether or not the task has an id yet
task_guid: str | None = db.Column(db.String(36), nullable=True, index=True)
process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False, index=True)
event_type: str = db.Column(db.String(50), nullable=False, index=True)
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False, index=True)
user_id = db.Column(ForeignKey(UserModel.id), nullable=True, index=True) # type: ignore
@validates("event_type")
def validate_event_type(self, key: str, value: Any) -> Any:
return self.validate_enum_field(key, value, ProcessInstanceEventType)

View File

@ -18,16 +18,14 @@ class ProcessInstanceFileDataModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column( process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
) )
identifier: str = db.Column(db.String(255), nullable=False) identifier: str = db.Column(db.String(255), nullable=False)
list_index: Optional[int] = db.Column(db.Integer, nullable=True) list_index: Optional[int] = db.Column(db.Integer, nullable=True)
mimetype: str = db.Column(db.String(255), nullable=False) mimetype: str = db.Column(db.String(255), nullable=False)
filename: str = db.Column(db.String(255), nullable=False) filename: str = db.Column(db.String(255), nullable=False)
# this is not deferred because there is no reason to query this model if you do not want the contents # this is not deferred because there is no reason to query this model if you do not want the contents
contents: str = db.Column( contents: str = db.Column(db.LargeBinary().with_variant(LONGBLOB, "mysql"), nullable=False)
db.LargeBinary().with_variant(LONGBLOB, "mysql"), nullable=False
)
digest: str = db.Column(db.String(64), nullable=False, index=True) digest: str = db.Column(db.String(64), nullable=False, index=True)
updated_at_in_seconds: int = db.Column(db.Integer, nullable=False) updated_at_in_seconds: int = db.Column(db.Integer, nullable=False)

View File

@ -13,15 +13,11 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceMetadataModel.""" """ProcessInstanceMetadataModel."""
__tablename__ = "process_instance_metadata" __tablename__ = "process_instance_metadata"
__table_args__ = ( __table_args__ = (db.UniqueConstraint("process_instance_id", "key", name="process_instance_metadata_unique"),)
db.UniqueConstraint(
"process_instance_id", "key", name="process_instance_metadata_unique"
),
)
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column( process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
) )
key: str = db.Column(db.String(255), nullable=False, index=True) key: str = db.Column(db.String(255), nullable=False, index=True)
value: str = db.Column(db.String(255), nullable=False) value: str = db.Column(db.String(255), nullable=False)

View File

@ -0,0 +1,28 @@
"""Process_instance_queue."""
from dataclasses import dataclass
from typing import Union
from sqlalchemy import ForeignKey
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@dataclass
class ProcessInstanceQueueModel(SpiffworkflowBaseDBModel):
"""ProcessInstanceQueueModel."""
__tablename__ = "process_instance_queue"
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), unique=True, nullable=False # type: ignore
)
run_at_in_seconds: int = db.Column(db.Integer)
priority: int = db.Column(db.Integer)
locked_by: Union[str, None] = db.Column(db.String(80), index=True, nullable=True)
locked_at_in_seconds: Union[int, None] = db.Column(db.Integer, index=True, nullable=True)
status: str = db.Column(db.String(50), index=True)
updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -8,7 +8,6 @@ from typing import Optional
from typing import TypedDict from typing import TypedDict
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
@ -69,7 +68,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True) identifier: str = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore report_metadata: dict = db.Column(db.JSON)
created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
created_by = relationship("UserModel") created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer) created_at_in_seconds = db.Column(db.Integer)
@ -187,9 +186,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
{"Header": "priority", "accessor": "priority"}, {"Header": "priority", "accessor": "priority"},
], ],
"order": "month asc", "order": "month asc",
"filter_by": [ "filter_by": [{"field_name": "month", "operator": "equals", "field_value": "3"}],
{"field_name": "month", "operator": "equals", "field_value": "3"}
],
} }
@classmethod @classmethod
@ -233,25 +230,19 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
if substitution_variables is not None: if substitution_variables is not None:
for key, value in substitution_variables.items(): for key, value in substitution_variables.items():
if isinstance(value, str) or isinstance(value, int): if isinstance(value, str) or isinstance(value, int):
field_value = str(field_value).replace( field_value = str(field_value).replace("{{" + key + "}}", str(value))
"{{" + key + "}}", str(value)
)
return field_value return field_value
# modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder # modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder
# just supports "equals" operator for now. # just supports "equals" operator for now.
# perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly. # perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly.
def passes_filter( def passes_filter(self, process_instance_dict: dict, substitution_variables: dict) -> bool:
self, process_instance_dict: dict, substitution_variables: dict
) -> bool:
"""Passes_filter.""" """Passes_filter."""
if "filter_by" in self.report_metadata: if "filter_by" in self.report_metadata:
for filter_by in self.report_metadata["filter_by"]: for filter_by in self.report_metadata["filter_by"]:
field_name = filter_by["field_name"] field_name = filter_by["field_name"]
operator = filter_by["operator"] operator = filter_by["operator"]
field_value = self.with_substitutions( field_value = self.with_substitutions(filter_by["field_value"], substitution_variables)
filter_by["field_value"], substitution_variables
)
if operator == "equals": if operator == "equals":
if str(process_instance_dict.get(field_name)) != str(field_value): if str(process_instance_dict.get(field_name)) != str(field_value):
return False return False
@ -274,9 +265,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
sort_value = process_instance_dict.get(order_by_item) sort_value = process_instance_dict.get(order_by_item)
comparison_values.append(Reversor(sort_value)) comparison_values.append(Reversor(sort_value))
else: else:
sort_value = cast( sort_value = cast(Optional[str], process_instance_dict.get(order_by_item))
Optional[str], process_instance_dict.get(order_by_item)
)
comparison_values.append(sort_value) comparison_values.append(sort_value)
return comparison_values return comparison_values
@ -307,20 +296,14 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
results = self.order_things(results) results = self.order_things(results)
if "columns" in self.report_metadata: if "columns" in self.report_metadata:
column_keys_to_keep = [ column_keys_to_keep = [c["accessor"] for c in self.report_metadata["columns"]]
c["accessor"] for c in self.report_metadata["columns"]
]
pruned_results = [] pruned_results = []
for result in results: for result in results:
dict_you_want = { dict_you_want = {
your_key: result[your_key] your_key: result[your_key] for your_key in column_keys_to_keep if result.get(your_key)
for your_key in column_keys_to_keep
if result.get(your_key)
} }
pruned_results.append(dict_you_want) pruned_results.append(dict_you_want)
results = pruned_results results = pruned_results
return ProcessInstanceReportResult( return ProcessInstanceReportResult(report_metadata=self.report_metadata, results=results)
report_metadata=self.report_metadata, results=results
)

View File

@ -89,9 +89,7 @@ class ProcessModelInfoSchema(Schema):
primary_process_id = marshmallow.fields.String(allow_none=True) primary_process_id = marshmallow.fields.String(allow_none=True)
files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema")) files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema"))
fault_or_suspend_on_exception = marshmallow.fields.String() fault_or_suspend_on_exception = marshmallow.fields.String()
exception_notification_addresses = marshmallow.fields.List( exception_notification_addresses = marshmallow.fields.List(marshmallow.fields.String)
marshmallow.fields.String
)
metadata_extraction_paths = marshmallow.fields.List( metadata_extraction_paths = marshmallow.fields.List(
marshmallow.fields.Dict( marshmallow.fields.Dict(
keys=marshmallow.fields.Str(required=False), keys=marshmallow.fields.Str(required=False),
@ -101,8 +99,6 @@ class ProcessModelInfoSchema(Schema):
) )
@post_load @post_load
def make_spec( def make_spec(self, data: dict[str, str | bool | int | NotificationType], **_: Any) -> ProcessModelInfo:
self, data: dict[str, str | bool | int | NotificationType], **_: Any
) -> ProcessModelInfo:
"""Make_spec.""" """Make_spec."""
return ProcessModelInfo(**data) # type: ignore return ProcessModelInfo(**data) # type: ignore

View File

@ -17,7 +17,7 @@ class SecretModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
key: str = db.Column(db.String(50), unique=True, nullable=False) key: str = db.Column(db.String(50), unique=True, nullable=False)
value: str = db.Column(db.Text(), nullable=False) value: str = db.Column(db.Text(), nullable=False)
user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -41,13 +41,11 @@ class SpecReferenceCache(SpiffworkflowBaseDBModel):
"""A cache of information about all the Processes and Decisions defined in all files.""" """A cache of information about all the Processes and Decisions defined in all files."""
__tablename__ = "spec_reference_cache" __tablename__ = "spec_reference_cache"
__table_args__ = ( __table_args__ = (UniqueConstraint("identifier", "type", name="_identifier_type_unique"),)
UniqueConstraint("identifier", "type", name="_identifier_type_unique"),
)
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
identifier = db.Column(db.String(255), index=True) identifier = db.Column(db.String(255), index=True)
display_name = db.Column(db.String(255), index=True) display_name = db.Column(db.String(255), index=True)
process_model_id = db.Column(db.String(255)) process_model_id = db.Column(db.String(255), index=True)
type = db.Column(db.String(255), index=True) # either 'process' or 'decision' type = db.Column(db.String(255), index=True) # either 'process' or 'decision'
file_name = db.Column(db.String(255)) file_name = db.Column(db.String(255))
relative_path = db.Column(db.String(255)) relative_path = db.Column(db.String(255))

View File

@ -1,25 +0,0 @@
"""Spiff_logging."""
from dataclasses import dataclass
from typing import Optional
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@dataclass
class SpiffLoggingModel(SpiffworkflowBaseDBModel):
"""SpiffLoggingModel."""
__tablename__ = "spiff_logging"
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(db.Integer, nullable=False)
bpmn_process_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_process_name: Optional[str] = db.Column(db.String(255), nullable=True)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
bpmn_task_name: str = db.Column(db.String(255), nullable=True)
bpmn_task_type: str = db.Column(db.String(255), nullable=True)
spiff_task_guid: str = db.Column(db.String(50), nullable=False)
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
message: Optional[str] = db.Column(db.String(255), nullable=True)
current_user_id: int = db.Column(db.Integer, nullable=True)
spiff_step: int = db.Column(db.Integer, nullable=False)

View File

@ -1,41 +0,0 @@
"""Spiff_step_details."""
from dataclasses import dataclass
from typing import Union
from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint
from sqlalchemy.orm import deferred
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@dataclass
class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
"""SpiffStepDetailsModel."""
__tablename__ = "spiff_step_details"
__table_args__ = (
UniqueConstraint(
"process_instance_id", "spiff_step", name="process_instance_id_spiff_step"
),
)
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
task_id: str = db.Column(db.String(50), nullable=False)
task_state: str = db.Column(db.String(50), nullable=False)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
delta_json: list = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False)
# to fix mypy in 3.9 - not sure why syntax like:
# float | None
# works in other dataclass db models
end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))

View File

@ -10,10 +10,17 @@ from marshmallow import Schema
from marshmallow_enum import EnumField # type: ignore from marshmallow_enum import EnumField # type: ignore
from SpiffWorkflow.task import TaskStateNames # type: ignore from SpiffWorkflow.task import TaskStateNames # type: ignore
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy.orm import relationship
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.json_data import JsonDataModel
from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
class TaskNotFoundError(Exception):
pass
class MultiInstanceType(enum.Enum): class MultiInstanceType(enum.Enum):
@ -40,23 +47,34 @@ class MultiInstanceType(enum.Enum):
@dataclass @dataclass
class TaskModel(SpiffworkflowBaseDBModel): class TaskModel(SpiffworkflowBaseDBModel):
__tablename__ = "task" __tablename__ = "task"
__allow_unmapped__ = True
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
guid: str = db.Column(db.String(36), nullable=False, unique=True, index=True) guid: str = db.Column(db.String(36), nullable=False, unique=True)
bpmn_process_id: int = db.Column( bpmn_process_id: int = db.Column(ForeignKey(BpmnProcessModel.id), nullable=False, index=True) # type: ignore
ForeignKey(BpmnProcessModel.id), nullable=False # type: ignore bpmn_process = relationship(BpmnProcessModel, back_populates="tasks")
) process_instance_id: int = db.Column(ForeignKey("process_instance.id"), nullable=False, index=True)
# find this by looking up the "workflow_name" and "task_spec" from the properties_json # find this by looking up the "workflow_name" and "task_spec" from the properties_json
# task_definition_id: int = db.Column( task_definition_id: int = db.Column(ForeignKey(TaskDefinitionModel.id), nullable=False, index=True) # type: ignore
# ForeignKey(TaskDefinitionModel.id), nullable=False # type: ignore task_definition = relationship("TaskDefinitionModel")
# )
state: str = db.Column(db.String(10), nullable=False)
properties_json: dict = db.Column(db.JSON, nullable=False)
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
start_in_seconds: float = db.Column(db.DECIMAL(17, 6)) state: str = db.Column(db.String(10), nullable=False, index=True)
properties_json: dict = db.Column(db.JSON, nullable=False)
json_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
python_env_data_hash: str = db.Column(db.String(255), nullable=False, index=True)
start_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))
end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6)) end_in_seconds: Union[float, None] = db.Column(db.DECIMAL(17, 6))
data: Optional[dict] = None
def python_env_data(self) -> dict:
return JsonDataModel.find_data_dict_by_hash(self.python_env_data_hash)
def json_data(self) -> dict:
return JsonDataModel.find_data_dict_by_hash(self.json_data_hash)
class Task: class Task:
"""Task.""" """Task."""
@ -91,7 +109,6 @@ class Task:
event_definition: Union[dict[str, Any], None] = None, event_definition: Union[dict[str, Any], None] = None,
call_activity_process_identifier: Optional[str] = None, call_activity_process_identifier: Optional[str] = None,
calling_subprocess_task_id: Optional[str] = None, calling_subprocess_task_id: Optional[str] = None,
task_spiff_step: Optional[int] = None,
): ):
"""__init__.""" """__init__."""
self.id = id self.id = id
@ -106,7 +123,6 @@ class Task:
self.event_definition = event_definition self.event_definition = event_definition
self.call_activity_process_identifier = call_activity_process_identifier self.call_activity_process_identifier = call_activity_process_identifier
self.calling_subprocess_task_id = calling_subprocess_task_id self.calling_subprocess_task_id = calling_subprocess_task_id
self.task_spiff_step = task_spiff_step
self.data = data self.data = data
if self.data is None: if self.data is None:
@ -121,15 +137,9 @@ class Task:
self.form_schema = form_schema self.form_schema = form_schema
self.form_ui_schema = form_ui_schema self.form_ui_schema = form_ui_schema
self.multi_instance_type = ( self.multi_instance_type = multi_instance_type # Some tasks have a repeat behavior.
multi_instance_type # Some tasks have a repeat behavior. self.multi_instance_count = multi_instance_count # This is the number of times the task could repeat.
) self.multi_instance_index = multi_instance_index # And the index of the currently repeating task.
self.multi_instance_count = (
multi_instance_count # This is the number of times the task could repeat.
)
self.multi_instance_index = (
multi_instance_index # And the index of the currently repeating task.
)
self.process_identifier = process_identifier self.process_identifier = process_identifier
self.properties = properties # Arbitrary extension properties from BPMN editor. self.properties = properties # Arbitrary extension properties from BPMN editor.
@ -170,7 +180,6 @@ class Task:
"event_definition": self.event_definition, "event_definition": self.event_definition,
"call_activity_process_identifier": self.call_activity_process_identifier, "call_activity_process_identifier": self.call_activity_process_identifier,
"calling_subprocess_task_id": self.calling_subprocess_task_id, "calling_subprocess_task_id": self.calling_subprocess_task_id,
"task_spiff_step": self.task_spiff_step,
} }
@classmethod @classmethod
@ -227,9 +236,7 @@ class FormFieldSchema(Schema):
default_value = marshmallow.fields.String(required=False, allow_none=True) default_value = marshmallow.fields.String(required=False, allow_none=True)
options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema)) options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema))
validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema)) validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema))
properties = marshmallow.fields.List( properties = marshmallow.fields.List(marshmallow.fields.Nested(FormFieldPropertySchema))
marshmallow.fields.Nested(FormFieldPropertySchema)
)
# class FormSchema(Schema): # class FormSchema(Schema):

View File

@ -1,5 +1,7 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass
from sqlalchemy import ForeignKey from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
@ -11,6 +13,7 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@dataclass
class TaskDefinitionModel(SpiffworkflowBaseDBModel): class TaskDefinitionModel(SpiffworkflowBaseDBModel):
__tablename__ = "task_definition" __tablename__ = "task_definition"
__table_args__ = ( __table_args__ = (
@ -23,13 +26,15 @@ class TaskDefinitionModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
bpmn_process_definition_id: int = db.Column( bpmn_process_definition_id: int = db.Column(
ForeignKey(BpmnProcessDefinitionModel.id), nullable=False # type: ignore ForeignKey(BpmnProcessDefinitionModel.id), nullable=False, index=True # type: ignore
) )
bpmn_process_definition = relationship(BpmnProcessDefinitionModel) bpmn_process_definition = relationship(BpmnProcessDefinitionModel)
bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True) bpmn_identifier: str = db.Column(db.String(255), nullable=False, index=True)
bpmn_name: str = db.Column(db.String(255), nullable=True, index=True)
typename: str = db.Column(db.String(255), nullable=False, index=True)
properties_json: dict = db.Column(db.JSON, nullable=False) properties_json: dict = db.Column(db.JSON, nullable=False)
typename: str = db.Column(db.String(255), nullable=False)
updated_at_in_seconds: int = db.Column(db.Integer) updated_at_in_seconds: int = db.Column(db.Integer)
created_at_in_seconds: int = db.Column(db.Integer) created_at_in_seconds: int = db.Column(db.Integer)

View File

@ -28,13 +28,12 @@ class UserModel(SpiffworkflowBaseDBModel):
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
username: str = db.Column(db.String(255), nullable=False, unique=True) username: str = db.Column(db.String(255), nullable=False, unique=True)
email = db.Column(db.String(255), index=True)
service = db.Column(db.String(255), nullable=False, unique=False, index=True) # not 'openid' -- google, aws
service_id = db.Column(db.String(255), nullable=False, unique=False, index=True)
service = db.Column(
db.String(255), nullable=False, unique=False
) # not 'openid' -- google, aws
service_id = db.Column(db.String(255), nullable=False, unique=False)
display_name = db.Column(db.String(255)) display_name = db.Column(db.String(255))
email = db.Column(db.String(255))
tenant_specific_field_1: str | None = db.Column(db.String(255)) tenant_specific_field_1: str | None = db.Column(db.String(255))
tenant_specific_field_2: str | None = db.Column(db.String(255)) tenant_specific_field_2: str | None = db.Column(db.String(255))
tenant_specific_field_3: str | None = db.Column(db.String(255)) tenant_specific_field_3: str | None = db.Column(db.String(255))

View File

@ -12,13 +12,11 @@ class UserGroupAssignmentModel(SpiffworkflowBaseDBModel):
"""UserGroupAssignmentModel.""" """UserGroupAssignmentModel."""
__tablename__ = "user_group_assignment" __tablename__ = "user_group_assignment"
__table_args__ = ( __table_args__ = (db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),)
db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"),
)
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) # type: ignore
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) group_id = db.Column(ForeignKey(GroupModel.id), nullable=False, index=True)
group = relationship("GroupModel", overlaps="groups,user_group_assignments,users") # type: ignore group = relationship("GroupModel", overlaps="groups,user_group_assignments,users") # type: ignore
user = relationship("UserModel", overlaps="groups,user_group_assignments,users") # type: ignore user = relationship("UserModel", overlaps="groups,user_group_assignments,users") # type: ignore

View File

@ -15,15 +15,11 @@ class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
MATCH_ALL_USERS = "*" MATCH_ALL_USERS = "*"
__tablename__ = "user_group_assignment_waiting" __tablename__ = "user_group_assignment_waiting"
__table_args__ = ( __table_args__ = (db.UniqueConstraint("username", "group_id", name="user_group_assignment_staged_unique"),)
db.UniqueConstraint(
"username", "group_id", name="user_group_assignment_staged_unique"
),
)
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), nullable=False) username = db.Column(db.String(255), nullable=False)
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) group_id = db.Column(ForeignKey(GroupModel.id), nullable=False, index=True)
group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore

View File

@ -0,0 +1,25 @@
from typing import Any
import flask.wrappers
import requests
from flask import current_app
from flask.wrappers import Response
def connector_proxy_type_ahead_url() -> Any:
"""Returns the connector proxy type ahead url."""
return current_app.config["SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_TYPE_AHEAD_URL"]
def type_ahead(category: str, prefix: str, limit: int) -> flask.wrappers.Response:
url = f"{connector_proxy_type_ahead_url()}/v1/type-ahead/{category}?prefix={prefix}&limit={limit}"
proxy_response = requests.get(url)
status = proxy_response.status_code
if status // 100 == 2:
response = proxy_response.text
else:
# supress pop up errors on the client
status = 200
response = "[]"
return Response(response, status=status, mimetype="application/json")

View File

@ -28,9 +28,7 @@ def message_instance_list(
message_instances_query = MessageInstanceModel.query message_instances_query = MessageInstanceModel.query
if process_instance_id: if process_instance_id:
message_instances_query = message_instances_query.filter_by( message_instances_query = message_instances_query.filter_by(process_instance_id=process_instance_id)
process_instance_id=process_instance_id
)
message_instances = ( message_instances = (
message_instances_query.order_by( message_instances_query.order_by(
@ -61,6 +59,12 @@ def message_instance_list(
# payload: dict, # payload: dict,
# process_instance_id: Optional[int], # process_instance_id: Optional[int],
# } # }
#
# For example:
# curl 'http://localhost:7000/v1.0/messages/gogo' \
# -H 'authorization: Bearer [FIXME]' \
# -H 'content-type: application/json' \
# --data-raw '{"payload":{"sure": "yes", "food": "spicy"}}'
def message_send( def message_send(
message_name: str, message_name: str,
body: Dict[str, Any], body: Dict[str, Any],
@ -70,10 +74,7 @@ def message_send(
raise ( raise (
ApiError( ApiError(
error_code="missing_payload", error_code="missing_payload",
message=( message="Please include a 'payload' in the JSON body that contains the message contents.",
"Please include a 'payload' in the JSON body that contains the"
" message contents."
),
status_code=400, status_code=400,
) )
) )
@ -111,9 +112,7 @@ def message_send(
) )
) )
process_instance = ProcessInstanceModel.query.filter_by( process_instance = ProcessInstanceModel.query.filter_by(id=receiver_message.process_instance_id).first()
id=receiver_message.process_instance_id
).first()
return Response( return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)), json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200, status=200,

View File

@ -20,9 +20,7 @@ from flask import request
from flask import url_for from flask import url_for
from werkzeug.wrappers import Response from werkzeug.wrappers import Response
openid_blueprint = Blueprint( openid_blueprint = Blueprint("openid", __name__, template_folder="templates", static_folder="static")
"openid", __name__, template_folder="templates", static_folder="static"
)
OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production" OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production"
@ -60,10 +58,7 @@ def auth() -> str:
def form_submit() -> Any: def form_submit() -> Any:
"""Handles the login form submission.""" """Handles the login form submission."""
users = get_users() users = get_users()
if ( if request.values["Uname"] in users and request.values["Pass"] == users[request.values["Uname"]]["password"]:
request.values["Uname"] in users
and request.values["Pass"] == users[request.values["Uname"]]["password"]
):
# Redirect back to the end user with some detailed information # Redirect back to the end user with some detailed information
state = request.values.get("state") state = request.values.get("state")
data = { data = {

View File

@ -2,7 +2,7 @@
margin: 0; margin: 0;
padding: 0; padding: 0;
background-color:white; background-color:white;
font-family: 'Arial'; font-family: 'Arial, sans-serif';
} }
header { header {
width: 100%; width: 100%;

View File

@ -1,12 +1,12 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html lang="en">
<head> <head>
<title>Login Form</title> <title>Login Form</title>
<link rel="stylesheet" type="text/css" href="{{ url_for('openid.static', filename='login.css') }}"> <link rel="stylesheet" type="text/css" href="{{ url_for('openid.static', filename='login.css') }}">
</head> </head>
<body> <body>
<header> <header>
<img class="logo_small" src="{{ url_for('openid.static', filename='logo_small.png') }}"/> <img class="logo_small" src="{{ url_for('openid.static', filename='logo_small.png') }}" alt="Small SpiffWorkflow logo" />
</header> </header>
<h2>Login</h2> <h2>Login</h2>

View File

@ -16,19 +16,16 @@ from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError, ProcessEntityNotFoundError,
) )
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceTaskDataCannotBeUpdatedError,
)
from spiffworkflow_backend.models.process_instance_file_data import ( from spiffworkflow_backend.models.process_instance_file_data import (
ProcessInstanceFileDataModel, ProcessInstanceFileDataModel,
) )
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.git_service import GitService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
@ -46,9 +43,7 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
raise ( raise (
ApiError( ApiError(
error_code="could_not_requests_to_check", error_code="could_not_requests_to_check",
message=( message="The key 'requests_to_check' not found at root of request body.",
"The key 'requests_to_check' not found at root of request body."
),
status_code=400, status_code=400,
) )
) )
@ -60,9 +55,7 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
response_dict[target_uri] = {} response_dict[target_uri] = {}
for http_method in http_methods: for http_method in http_methods:
permission_string = AuthorizationService.get_permission_from_http_method( permission_string = AuthorizationService.get_permission_from_http_method(http_method)
http_method
)
if permission_string: if permission_string:
has_permission = AuthorizationService.user_has_permission( has_permission = AuthorizationService.user_has_permission(
user=g.user, user=g.user,
@ -98,10 +91,7 @@ def _process_data_fetcher(
if file_data is None: if file_data is None:
raise ApiError( raise ApiError(
error_code="process_instance_file_data_not_found", error_code="process_instance_file_data_not_found",
message=( message=f"Could not find file data related to the digest: {process_data_identifier}",
"Could not find file data related to the digest:"
f" {process_data_identifier}"
),
) )
mimetype = file_data.mimetype mimetype = file_data.mimetype
filename = file_data.filename filename = file_data.filename
@ -169,79 +159,7 @@ def github_webhook_receive(body: Dict) -> Response:
auth_header = request.headers.get("X-Hub-Signature-256") auth_header = request.headers.get("X-Hub-Signature-256")
AuthorizationService.verify_sha256_token(auth_header) AuthorizationService.verify_sha256_token(auth_header)
result = GitService.handle_web_hook(body) result = GitService.handle_web_hook(body)
return Response( return Response(json.dumps({"git_pull": result}), status=200, mimetype="application/json")
json.dumps({"git_pull": result}), status=200, mimetype="application/json"
)
def task_data_update(
process_instance_id: str,
modified_process_model_identifier: str,
task_id: str,
body: Dict,
) -> Response:
"""Update task data."""
process_instance = ProcessInstanceModel.query.filter(
ProcessInstanceModel.id == int(process_instance_id)
).first()
if process_instance:
if process_instance.status != "suspended":
raise ProcessInstanceTaskDataCannotBeUpdatedError(
"The process instance needs to be suspended to update the task-data."
f" It is currently: {process_instance.status}"
)
process_instance_data = process_instance.process_instance_data
if process_instance_data is None:
raise ApiError(
error_code="process_instance_data_not_found",
message=(
"Could not find task data related to process instance:"
f" {process_instance.id}"
),
)
process_instance_data_dict = json.loads(process_instance_data.runtime_json)
if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str)
if task_id in process_instance_data_dict["tasks"]:
process_instance_data_dict["tasks"][task_id][
"data"
] = new_task_data_dict
process_instance_data.runtime_json = json.dumps(
process_instance_data_dict
)
db.session.add(process_instance_data)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
raise ApiError(
error_code="update_task_data_error",
message=f"Could not update the Instance. Original error is {e}",
) from e
else:
raise ApiError(
error_code="update_task_data_error",
message=(
f"Could not find Task: {task_id} in Instance:"
f" {process_instance_id}."
),
)
else:
raise ApiError(
error_code="update_task_data_error",
message=(
f"Could not update task data for Instance: {process_instance_id}, and"
f" Task: {task_id}."
),
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
)
def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any:
@ -268,9 +186,7 @@ def send_bpmn_event(
body: Dict, body: Dict,
) -> Response: ) -> Response:
"""Send a bpmn event to a workflow.""" """Send a bpmn event to a workflow."""
process_instance = ProcessInstanceModel.query.filter( process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
ProcessInstanceModel.id == int(process_instance_id)
).first()
if process_instance: if process_instance:
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.send_bpmn_event(body) processor.send_bpmn_event(body)
@ -286,34 +202,6 @@ def send_bpmn_event(
) )
def manual_complete_task(
modified_process_model_identifier: str,
process_instance_id: str,
task_id: str,
body: Dict,
) -> Response:
"""Mark a task complete without executing it."""
execute = body.get("execute", True)
process_instance = ProcessInstanceModel.query.filter(
ProcessInstanceModel.id == int(process_instance_id)
).first()
if process_instance:
processor = ProcessInstanceProcessor(process_instance)
processor.manual_complete_task(task_id, execute)
else:
raise ApiError(
error_code="complete_task",
message=(
f"Could not complete Task {task_id} in Instance {process_instance_id}"
),
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
)
def _commit_and_push_to_git(message: str) -> None: def _commit_and_push_to_git(message: str) -> None:
"""Commit_and_push_to_git.""" """Commit_and_push_to_git."""
if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]: if current_app.config["SPIFFWORKFLOW_BACKEND_GIT_COMMIT_ON_SAVE"]:
@ -332,9 +220,7 @@ def _find_process_instance_by_id_or_raise(
process_instance_id: int, process_instance_id: int,
) -> ProcessInstanceModel: ) -> ProcessInstanceModel:
"""Find_process_instance_by_id_or_raise.""" """Find_process_instance_by_id_or_raise."""
process_instance_query = ProcessInstanceModel.query.filter_by( process_instance_query = ProcessInstanceModel.query.filter_by(id=process_instance_id)
id=process_instance_id
)
# we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves: # we had a frustrating session trying to do joins and access columns from two tables. here's some notes for our future selves:
# this returns an object that allows you to do: process_instance.UserModel.username # this returns an object that allows you to do: process_instance.UserModel.username

View File

@ -44,9 +44,7 @@ def process_group_create(body: dict) -> flask.wrappers.Response:
) )
ProcessModelService.add_process_group(process_group) ProcessModelService.add_process_group(process_group)
_commit_and_push_to_git( _commit_and_push_to_git(f"User: {g.user.username} added process group {process_group.id}")
f"User: {g.user.username} added process group {process_group.id}"
)
return make_response(jsonify(process_group), 201) return make_response(jsonify(process_group), 201)
@ -63,22 +61,14 @@ def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Respo
status_code=400, status_code=400,
) from exception ) from exception
_commit_and_push_to_git( _commit_and_push_to_git(f"User: {g.user.username} deleted process group {process_group_id}")
f"User: {g.user.username} deleted process group {process_group_id}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_group_update( def process_group_update(modified_process_group_id: str, body: dict) -> flask.wrappers.Response:
modified_process_group_id: str, body: dict
) -> flask.wrappers.Response:
"""Process Group Update.""" """Process Group Update."""
body_include_list = ["display_name", "description"] body_include_list = ["display_name", "description"]
body_filtered = { body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body}
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
if not ProcessModelService.is_process_group_identifier(process_group_id): if not ProcessModelService.is_process_group_identifier(process_group_id):
@ -90,9 +80,7 @@ def process_group_update(
process_group = ProcessGroup(id=process_group_id, **body_filtered) process_group = ProcessGroup(id=process_group_id, **body_filtered)
ProcessModelService.update_process_group(process_group) ProcessModelService.update_process_group(process_group)
_commit_and_push_to_git( _commit_and_push_to_git(f"User: {g.user.username} updated process group {process_group_id}")
f"User: {g.user.username} updated process group {process_group_id}"
)
return make_response(jsonify(process_group), 200) return make_response(jsonify(process_group), 200)
@ -101,14 +89,10 @@ def process_group_list(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_group_list.""" """Process_group_list."""
if process_group_identifier is not None: if process_group_identifier is not None:
process_groups = ProcessModelService.get_process_groups( process_groups = ProcessModelService.get_process_groups(process_group_identifier)
process_group_identifier
)
else: else:
process_groups = ProcessModelService.get_process_groups() process_groups = ProcessModelService.get_process_groups()
batch = ProcessModelService().get_batch( batch = ProcessModelService().get_batch(items=process_groups, page=page, per_page=per_page)
items=process_groups, page=page, per_page=per_page
)
pages = len(process_groups) // per_page pages = len(process_groups) // per_page
remainder = len(process_groups) % per_page remainder = len(process_groups) % per_page
if remainder > 0: if remainder > 0:
@ -141,24 +125,15 @@ def process_group_show(
) )
) from exception ) from exception
process_group.parent_groups = ProcessModelService.get_parent_group_array( process_group.parent_groups = ProcessModelService.get_parent_group_array(process_group.id)
process_group.id
)
return make_response(jsonify(process_group), 200) return make_response(jsonify(process_group), 200)
def process_group_move( def process_group_move(modified_process_group_identifier: str, new_location: str) -> flask.wrappers.Response:
modified_process_group_identifier: str, new_location: str
) -> flask.wrappers.Response:
"""Process_group_move.""" """Process_group_move."""
original_process_group_id = _un_modify_modified_process_model_id( original_process_group_id = _un_modify_modified_process_model_id(modified_process_group_identifier)
modified_process_group_identifier new_process_group = ProcessModelService().process_group_move(original_process_group_id, new_location)
)
new_process_group = ProcessModelService().process_group_move(
original_process_group_id, new_location
)
_commit_and_push_to_git( _commit_and_push_to_git(
f"User: {g.user.username} moved process group {original_process_group_id} to" f"User: {g.user.username} moved process group {original_process_group_id} to {new_process_group.id}"
f" {new_process_group.id}"
) )
return make_response(jsonify(new_process_group), 200) return make_response(jsonify(new_process_group), 200)

View File

@ -12,12 +12,13 @@ from flask import jsonify
from flask import make_response from flask import make_response
from flask import request from flask import request
from flask.wrappers import Response from flask.wrappers import Response
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from sqlalchemy import and_ from sqlalchemy import and_
from sqlalchemy import or_ from sqlalchemy import or_
from sqlalchemy.orm import aliased
from spiffworkflow_backend.exceptions.api_error import ApiError from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
@ -27,18 +28,21 @@ from spiffworkflow_backend.models.process_instance import (
) )
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventModel
from spiffworkflow_backend.models.process_instance_metadata import ( from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel, ProcessInstanceMetadataModel,
) )
from spiffworkflow_backend.models.process_instance_queue import (
ProcessInstanceQueueModel,
)
from spiffworkflow_backend.models.process_instance_report import ( from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel, ProcessInstanceReportModel,
) )
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.task import TaskModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.task_definition import TaskDefinitionModel
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import ( from spiffworkflow_backend.routes.process_api_blueprint import (
_find_process_instance_by_id_or_raise, _find_process_instance_by_id_or_raise,
@ -55,6 +59,15 @@ from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceIsAlreadyLockedError,
)
from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceIsNotEnqueuedError,
)
from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceQueueService,
)
from spiffworkflow_backend.services.process_instance_report_service import ( from spiffworkflow_backend.services.process_instance_report_service import (
ProcessInstanceReportFilter, ProcessInstanceReportFilter,
) )
@ -66,15 +79,14 @@ from spiffworkflow_backend.services.process_instance_service import (
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskService
def process_instance_create( def process_instance_create(
modified_process_model_identifier: str, modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Create_process_instance.""" """Create_process_instance."""
process_model_identifier = _un_modify_modified_process_model_id( process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
modified_process_model_identifier
)
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
if process_model.primary_file_name is None: if process_model.primary_file_name is None:
@ -87,10 +99,8 @@ def process_instance_create(
status_code=400, status_code=400,
) )
process_instance = ( process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier(
ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_identifier, g.user
process_model_identifier, g.user
)
) )
return Response( return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)), json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
@ -109,10 +119,7 @@ def process_instance_run(
if process_instance.status != "not_started": if process_instance.status != "not_started":
raise ApiError( raise ApiError(
error_code="process_instance_not_runnable", error_code="process_instance_not_runnable",
message=( message=f"Process Instance ({process_instance.id}) is currently running or has already run.",
f"Process Instance ({process_instance.id}) is currently running or has"
" already run."
),
status_code=400, status_code=400,
) )
@ -120,9 +127,12 @@ def process_instance_run(
if do_engine_steps: if do_engine_steps:
try: try:
processor.lock_process_instance("Web")
processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)
except ApiError as e: except (
ApiError,
ProcessInstanceIsNotEnqueuedError,
ProcessInstanceIsAlreadyLockedError,
) as e:
ErrorHandlingService().handle_error(processor, e) ErrorHandlingService().handle_error(processor, e)
raise e raise e
except Exception as e: except Exception as e:
@ -135,21 +145,15 @@ def process_instance_run(
status_code=400, status_code=400,
task=task, task=task,
) from e ) from e
finally:
processor.unlock_process_instance("Web")
if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]: if not current_app.config["SPIFFWORKFLOW_BACKEND_RUN_BACKGROUND_SCHEDULER"]:
MessageService.correlate_all_message_instances() MessageService.correlate_all_message_instances()
process_instance_api = ProcessInstanceService.processor_to_process_instance_api( process_instance_api = ProcessInstanceService.processor_to_process_instance_api(processor)
processor
)
process_instance_data = processor.get_data() process_instance_data = processor.get_data()
process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
process_instance_metadata["data"] = process_instance_data process_instance_metadata["data"] = process_instance_data
return Response( return Response(json.dumps(process_instance_metadata), status=200, mimetype="application/json")
json.dumps(process_instance_metadata), status=200, mimetype="application/json"
)
def process_instance_terminate( def process_instance_terminate(
@ -159,7 +163,14 @@ def process_instance_terminate(
"""Process_instance_run.""" """Process_instance_run."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.terminate()
try:
with ProcessInstanceQueueService.dequeued(process_instance):
processor.terminate()
except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e:
ErrorHandlingService().handle_error(processor, e)
raise e
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -169,7 +180,15 @@ def process_instance_suspend(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_suspend.""" """Process_instance_suspend."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
ProcessInstanceProcessor.suspend(process_instance) processor = ProcessInstanceProcessor(process_instance)
try:
with ProcessInstanceQueueService.dequeued(process_instance):
processor.suspend()
except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e:
ErrorHandlingService().handle_error(processor, e)
raise e
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -179,7 +198,15 @@ def process_instance_resume(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_resume.""" """Process_instance_resume."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
ProcessInstanceProcessor.resume(process_instance) processor = ProcessInstanceProcessor(process_instance)
try:
with ProcessInstanceQueueService.dequeued(process_instance):
processor.resume()
except (ProcessInstanceIsNotEnqueuedError, ProcessInstanceIsAlreadyLockedError) as e:
ErrorHandlingService().handle_error(processor, e)
raise e
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -194,34 +221,35 @@ def process_instance_log_list(
# to make sure the process instance exists # to make sure the process instance exists
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
log_query = SpiffLoggingModel.query.filter( log_query = (
SpiffLoggingModel.process_instance_id == process_instance.id ProcessInstanceEventModel.query.filter_by(process_instance_id=process_instance.id)
.outerjoin(TaskModel, TaskModel.guid == ProcessInstanceEventModel.task_guid)
.outerjoin(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
.outerjoin(
BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id
)
) )
if not detailed: if not detailed:
log_query = log_query.filter( log_query = log_query.filter(
# 1. this was the previous implementation, where we only show completed tasks and skipped tasks.
# maybe we want to iterate on this in the future (in a third tab under process instance logs?)
# or_(
# SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore
# SpiffLoggingModel.message.like("Skipped task %"), # type: ignore
# )
# 2. We included ["End Event", "Default Start Event"] along with Default Throwing Event, but feb 2023
# we decided to remove them, since they get really chatty when there are lots of subprocesses and call activities.
and_( and_(
SpiffLoggingModel.message.in_(["State change to COMPLETED"]), # type: ignore TaskModel.state.in_(["COMPLETED"]), # type: ignore
SpiffLoggingModel.bpmn_task_type.in_( # type: ignore TaskDefinitionModel.typename.in_(["IntermediateThrowEvent"]), # type: ignore
["Default Throwing Event"]
),
) )
) )
logs = ( logs = (
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore log_query.order_by(
.join( ProcessInstanceEventModel.timestamp.desc(), ProcessInstanceEventModel.id.desc() # type: ignore
UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True )
) # isouter since if we don't have a user, we still want the log .outerjoin(UserModel, UserModel.id == ProcessInstanceEventModel.user_id)
.add_columns( .add_columns(
TaskModel.guid.label("spiff_task_guid"), # type: ignore
UserModel.username, UserModel.username,
BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore
BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore
TaskDefinitionModel.bpmn_identifier.label("task_definition_identifier"), # type: ignore
TaskDefinitionModel.bpmn_name.label("task_definition_name"), # type: ignore
TaskDefinitionModel.typename.label("bpmn_task_type"), # type: ignore
) )
.paginate(page=page, per_page=per_page, error_out=False) .paginate(page=page, per_page=per_page, error_out=False)
) )
@ -295,9 +323,7 @@ def process_instance_list(
report_filter_by: Optional[str] = None, report_filter_by: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_instance_report = ProcessInstanceReportService.report_with_identifier( process_instance_report = ProcessInstanceReportService.report_with_identifier(g.user, report_id, report_identifier)
g.user, report_id, report_identifier
)
report_column_list = None report_column_list = None
if report_columns: if report_columns:
@ -321,21 +347,19 @@ def process_instance_list(
report_filter_by_list=report_filter_by_list, report_filter_by_list=report_filter_by_list,
) )
else: else:
report_filter = ( report_filter = ProcessInstanceReportService.filter_from_metadata_with_overrides(
ProcessInstanceReportService.filter_from_metadata_with_overrides( process_instance_report=process_instance_report,
process_instance_report=process_instance_report, process_model_identifier=process_model_identifier,
process_model_identifier=process_model_identifier, user_group_identifier=user_group_identifier,
user_group_identifier=user_group_identifier, start_from=start_from,
start_from=start_from, start_to=start_to,
start_to=start_to, end_from=end_from,
end_from=end_from, end_to=end_to,
end_to=end_to, process_status=process_status,
process_status=process_status, with_relation_to_me=with_relation_to_me,
with_relation_to_me=with_relation_to_me, process_initiator_username=process_initiator_username,
process_initiator_username=process_initiator_username, report_column_list=report_column_list,
report_column_list=report_column_list, report_filter_by_list=report_filter_by_list,
report_filter_by_list=report_filter_by_list,
)
) )
response_json = ProcessInstanceReportService.run_process_instance_report( response_json = ProcessInstanceReportService.run_process_instance_report(
@ -349,18 +373,23 @@ def process_instance_list(
return make_response(jsonify(response_json), 200) return make_response(jsonify(response_json), 200)
def process_instance_report_column_list() -> flask.wrappers.Response: def process_instance_report_column_list(process_model_identifier: Optional[str] = None) -> flask.wrappers.Response:
"""Process_instance_report_column_list.""" """Process_instance_report_column_list."""
table_columns = ProcessInstanceReportService.builtin_column_options() table_columns = ProcessInstanceReportService.builtin_column_options()
columns_for_metadata = ( columns_for_metadata_query = (
db.session.query(ProcessInstanceMetadataModel.key) db.session.query(ProcessInstanceMetadataModel.key)
.order_by(ProcessInstanceMetadataModel.key) .order_by(ProcessInstanceMetadataModel.key)
.distinct() # type: ignore .distinct() # type: ignore
.all()
) )
if process_model_identifier:
columns_for_metadata_query = columns_for_metadata_query.join(ProcessInstanceModel)
columns_for_metadata_query = columns_for_metadata_query.filter(
ProcessInstanceModel.process_model_identifier == process_model_identifier
)
columns_for_metadata = columns_for_metadata_query.all()
columns_for_metadata_strings = [ columns_for_metadata_strings = [
{"Header": i[0], "accessor": i[0], "filterable": True} {"Header": i[0], "accessor": i[0], "filterable": True} for i in columns_for_metadata
for i in columns_for_metadata
] ]
return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
@ -407,20 +436,13 @@ def process_instance_delete(
# (Pdb) db.session.delete # (Pdb) db.session.delete
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>> # <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
db.session.query(SpiffLoggingModel).filter_by( db.session.query(ProcessInstanceQueueModel).filter_by(process_instance_id=process_instance.id).delete()
process_instance_id=process_instance.id
).delete()
db.session.query(SpiffStepDetailsModel).filter_by(
process_instance_id=process_instance.id
).delete()
db.session.delete(process_instance) db.session.delete(process_instance)
db.session.commit() db.session.commit()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_instance_report_list( def process_instance_report_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Process_instance_report_list.""" """Process_instance_report_list."""
process_instance_reports = ProcessInstanceReportModel.query.filter_by( process_instance_reports = ProcessInstanceReportModel.query.filter_by(
created_by_id=g.user.id, created_by_id=g.user.id,
@ -505,9 +527,7 @@ def process_instance_report_show(
) )
substitution_variables = request.args.to_dict() substitution_variables = request.args.to_dict()
result_dict = process_instance_report.generate_report( result_dict = process_instance_report.generate_report(process_instances.items, substitution_variables)
process_instances.items, substitution_variables
)
# update this if we go back to a database query instead of filtering in memory # update this if we go back to a database query instead of filtering in memory
result_dict["pagination"] = { result_dict["pagination"] = {
@ -522,157 +542,169 @@ def process_instance_report_show(
def process_instance_task_list_without_task_data_for_me( def process_instance_task_list_without_task_data_for_me(
modified_process_model_identifier: str, modified_process_model_identifier: str,
process_instance_id: int, process_instance_id: int,
all_tasks: bool = False, most_recent_tasks_only: bool = False,
spiff_step: int = 0, bpmn_process_guid: Optional[str] = None,
to_task_guid: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data_for_me.""" """Process_instance_task_list_without_task_data_for_me."""
process_instance = _find_process_instance_for_me_or_raise(process_instance_id) process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
return process_instance_task_list( return process_instance_task_list(
modified_process_model_identifier, _modified_process_model_identifier=modified_process_model_identifier,
process_instance, process_instance=process_instance,
all_tasks, most_recent_tasks_only=most_recent_tasks_only,
spiff_step, bpmn_process_guid=bpmn_process_guid,
to_task_guid=to_task_guid,
) )
def process_instance_task_list_without_task_data( def process_instance_task_list_without_task_data(
modified_process_model_identifier: str, modified_process_model_identifier: str,
process_instance_id: int, process_instance_id: int,
all_tasks: bool = False, most_recent_tasks_only: bool = False,
spiff_step: int = 0, bpmn_process_guid: Optional[str] = None,
to_task_guid: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list_without_task_data.""" """Process_instance_task_list_without_task_data."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
return process_instance_task_list( return process_instance_task_list(
modified_process_model_identifier, _modified_process_model_identifier=modified_process_model_identifier,
process_instance, process_instance=process_instance,
all_tasks, most_recent_tasks_only=most_recent_tasks_only,
spiff_step, bpmn_process_guid=bpmn_process_guid,
to_task_guid=to_task_guid,
) )
def process_instance_task_list( def process_instance_task_list(
_modified_process_model_identifier: str, _modified_process_model_identifier: str,
process_instance: ProcessInstanceModel, process_instance: ProcessInstanceModel,
all_tasks: bool = False, bpmn_process_guid: Optional[str] = None,
spiff_step: int = 0, to_task_guid: Optional[str] = None,
most_recent_tasks_only: bool = False, most_recent_tasks_only: bool = False,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list.""" """Process_instance_task_list."""
step_detail_query = db.session.query(SpiffStepDetailsModel).filter( bpmn_process_ids = []
SpiffStepDetailsModel.process_instance_id == process_instance.id, if bpmn_process_guid:
bpmn_process = BpmnProcessModel.query.filter_by(guid=bpmn_process_guid).first()
bpmn_processes = TaskService.bpmn_process_and_descendants([bpmn_process])
bpmn_process_ids = [p.id for p in bpmn_processes]
task_model_query = db.session.query(TaskModel).filter(
TaskModel.process_instance_id == process_instance.id,
) )
if spiff_step > 0: to_task_model: Optional[TaskModel] = None
step_detail_query = step_detail_query.filter( task_models_of_parent_bpmn_processes_guids: list[str] = []
SpiffStepDetailsModel.spiff_step <= spiff_step if to_task_guid is not None:
to_task_model = TaskModel.query.filter_by(guid=to_task_guid, process_instance_id=process_instance.id).first()
if to_task_model is None:
raise ApiError(
error_code="task_not_found",
message=f"Cannot find a task with guid '{to_task_guid}' for process instance '{process_instance.id}'",
status_code=400,
)
if to_task_model.state != "COMPLETED":
# TODO: find a better term for viewing at task state
raise ApiError(
error_code="task_cannot_be_viewed_at",
message=(
f"Desired task with guid '{to_task_guid}' for process instance '{process_instance.id}' was never"
" completed and therefore cannot be viewed at."
),
status_code=400,
)
(
_parent_bpmn_processes,
task_models_of_parent_bpmn_processes,
) = TaskService.task_models_of_parent_bpmn_processes(to_task_model)
task_models_of_parent_bpmn_processes_guids = [p.guid for p in task_models_of_parent_bpmn_processes if p.guid]
task_model_query = task_model_query.filter(
or_(
TaskModel.end_in_seconds <= to_task_model.end_in_seconds, # type: ignore
TaskModel.guid.in_(task_models_of_parent_bpmn_processes_guids), # type: ignore
)
) )
step_details = step_detail_query.all() bpmn_process_alias = aliased(BpmnProcessModel)
direct_parent_bpmn_process_alias = aliased(BpmnProcessModel)
direct_parent_bpmn_process_definition_alias = aliased(BpmnProcessDefinitionModel)
processor = ProcessInstanceProcessor(process_instance) task_model_query = (
full_bpmn_process_dict = processor.full_bpmn_process_dict task_model_query.order_by(TaskModel.id.desc()) # type: ignore
.join(TaskDefinitionModel, TaskDefinitionModel.id == TaskModel.task_definition_id)
tasks = full_bpmn_process_dict["tasks"] .join(bpmn_process_alias, bpmn_process_alias.id == TaskModel.bpmn_process_id)
subprocesses = full_bpmn_process_dict["subprocesses"] .outerjoin(
direct_parent_bpmn_process_alias,
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} direct_parent_bpmn_process_alias.id == bpmn_process_alias.direct_parent_process_id,
)
subprocess_state_overrides = {} .outerjoin(
for step_detail in step_details: direct_parent_bpmn_process_definition_alias,
if step_detail.task_id in tasks: direct_parent_bpmn_process_definition_alias.id
tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int( == direct_parent_bpmn_process_alias.bpmn_process_definition_id,
step_detail.task_state )
) .join(
else: BpmnProcessDefinitionModel, BpmnProcessDefinitionModel.id == TaskDefinitionModel.bpmn_process_definition_id
for subprocess_id, subprocess_info in subprocesses.items(): )
if step_detail.task_id in subprocess_info["tasks"]: .add_columns(
subprocess_info["tasks"][step_detail.task_id]["state"] = ( BpmnProcessDefinitionModel.bpmn_identifier.label("bpmn_process_definition_identifier"), # type: ignore
Task.task_state_name_to_int(step_detail.task_state) BpmnProcessDefinitionModel.bpmn_name.label("bpmn_process_definition_name"), # type: ignore
) bpmn_process_alias.guid.label("bpmn_process_guid"),
subprocess_state_overrides[subprocess_id] = TaskState.WAITING # not sure why we needed these
# direct_parent_bpmn_process_alias.guid.label("bpmn_process_direct_parent_guid"),
for subprocess_info in subprocesses.values(): # direct_parent_bpmn_process_definition_alias.bpmn_identifier.label(
for spiff_task_id in subprocess_info["tasks"]: # "bpmn_process_direct_parent_bpmn_identifier"
if spiff_task_id not in steps_by_id: # ),
subprocess_info["tasks"][spiff_task_id]["data"] = {} TaskDefinitionModel.bpmn_identifier,
subprocess_info["tasks"][spiff_task_id]["state"] = ( TaskDefinitionModel.bpmn_name,
subprocess_state_overrides.get(spiff_task_id, TaskState.FUTURE) TaskDefinitionModel.typename,
) TaskDefinitionModel.properties_json.label("task_definition_properties_json"), # type: ignore
TaskModel.guid,
for spiff_task_id in tasks: TaskModel.state,
if spiff_task_id not in steps_by_id: TaskModel.end_in_seconds,
tasks[spiff_task_id]["data"] = {} TaskModel.start_in_seconds,
tasks[spiff_task_id]["state"] = subprocess_state_overrides.get( )
spiff_task_id, TaskState.FUTURE
)
bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(
full_bpmn_process_dict
) )
spiff_task = processor.__class__.get_task_by_bpmn_identifier( if len(bpmn_process_ids) > 0:
step_details[-1].bpmn_task_identifier, bpmn_process_instance task_model_query = task_model_query.filter(bpmn_process_alias.id.in_(bpmn_process_ids))
)
if spiff_task is not None and spiff_task.state != TaskState.READY:
spiff_task.complete()
spiff_tasks = None
if all_tasks:
spiff_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
else:
spiff_tasks = processor.get_all_user_tasks()
(
subprocesses_by_child_task_ids,
task_typename_by_task_id,
) = processor.get_subprocesses_by_child_task_ids()
processor.get_highest_level_calling_subprocesses_by_child_task_ids(
subprocesses_by_child_task_ids, task_typename_by_task_id
)
tasks = []
spiff_tasks_to_process = spiff_tasks
task_models = task_model_query.all()
task_model_list = {}
if most_recent_tasks_only: if most_recent_tasks_only:
spiff_tasks_by_process_id_and_task_name: dict[str, SpiffTask] = {} for task_model in task_models:
for spiff_task in spiff_tasks: bpmn_process_guid = task_model.bpmn_process_guid or "TOP"
row_id = f"{spiff_task.task_spec._wf_spec.name}:{spiff_task.task_spec.name}" row_key = f"{bpmn_process_guid}:::{task_model.bpmn_identifier}"
if ( if row_key not in task_model_list:
row_id not in spiff_tasks_by_process_id_and_task_name task_model_list[row_key] = task_model
or spiff_task.last_state_change task_models = list(task_model_list.values())
> spiff_tasks_by_process_id_and_task_name[row_id].last_state_change
):
spiff_tasks_by_process_id_and_task_name[row_id] = spiff_task
spiff_tasks_to_process = spiff_tasks_by_process_id_and_task_name.values()
for spiff_task in spiff_tasks_to_process: if to_task_model is not None:
task_spiff_step: Optional[int] = None task_models_dict = json.loads(current_app.json.dumps(task_models))
if str(spiff_task.id) in steps_by_id: for task_model in task_models_dict:
task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step end_in_seconds = float(task_model["end_in_seconds"]) if task_model["end_in_seconds"] is not None else None
calling_subprocess_task_id = subprocesses_by_child_task_ids.get( if to_task_model.guid == task_model["guid"] and task_model["state"] == "COMPLETED":
str(spiff_task.id), None TaskService.reset_task_model_dict(task_model, state="READY")
) elif (
task = ProcessInstanceService.spiff_task_to_api_task( end_in_seconds is None
processor, or to_task_model.end_in_seconds is None
spiff_task, or to_task_model.end_in_seconds < end_in_seconds
calling_subprocess_task_id=calling_subprocess_task_id, ) and task_model["guid"] in task_models_of_parent_bpmn_processes_guids:
task_spiff_step=task_spiff_step, TaskService.reset_task_model_dict(task_model, state="WAITING")
) return make_response(jsonify(task_models_dict), 200)
tasks.append(task)
return make_response(jsonify(tasks), 200) return make_response(jsonify(task_models), 200)
def process_instance_reset( def process_instance_reset(
process_instance_id: int, process_instance_id: int,
modified_process_model_identifier: str, modified_process_model_identifier: str,
spiff_step: int = 0, to_task_guid: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Reset a process instance to a particular step.""" """Reset a process instance to a particular step."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance) ProcessInstanceProcessor.reset_process(process_instance, to_task_guid)
processor.reset_process(spiff_step)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -681,14 +713,10 @@ def process_instance_find_by_id(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_find_by_id.""" """Process_instance_find_by_id."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
modified_process_model_identifier = ( modified_process_model_identifier = ProcessModelInfo.modify_process_identifier_for_path_param(
ProcessModelInfo.modify_process_identifier_for_path_param( process_instance.process_model_identifier
process_instance.process_model_identifier
)
)
process_instance_uri = (
f"/process-instances/{modified_process_model_identifier}/{process_instance.id}"
) )
process_instance_uri = f"/process-instances/{modified_process_model_identifier}/{process_instance.id}"
has_permission = AuthorizationService.user_has_permission( has_permission = AuthorizationService.user_has_permission(
user=g.user, user=g.user,
permission="read", permission="read",
@ -722,32 +750,22 @@ def _get_process_instance(
process_model_with_diagram = None process_model_with_diagram = None
name_of_file_with_diagram = None name_of_file_with_diagram = None
if process_identifier: if process_identifier:
spec_reference = SpecReferenceCache.query.filter_by( spec_reference = SpecReferenceCache.query.filter_by(identifier=process_identifier, type="process").first()
identifier=process_identifier, type="process"
).first()
if spec_reference is None: if spec_reference is None:
raise SpecReferenceNotFoundError( raise SpecReferenceNotFoundError(
"Could not find given process identifier in the cache:" f"Could not find given process identifier in the cache: {process_identifier}"
f" {process_identifier}"
) )
process_model_with_diagram = ProcessModelService.get_process_model( process_model_with_diagram = ProcessModelService.get_process_model(spec_reference.process_model_id)
spec_reference.process_model_id
)
name_of_file_with_diagram = spec_reference.file_name name_of_file_with_diagram = spec_reference.file_name
process_instance.process_model_with_diagram_identifier = ( process_instance.process_model_with_diagram_identifier = process_model_with_diagram.id
process_model_with_diagram.id
)
else: else:
process_model_with_diagram = _get_process_model(process_model_identifier) process_model_with_diagram = _get_process_model(process_model_identifier)
if process_model_with_diagram.primary_file_name: if process_model_with_diagram.primary_file_name:
name_of_file_with_diagram = process_model_with_diagram.primary_file_name name_of_file_with_diagram = process_model_with_diagram.primary_file_name
if process_model_with_diagram and name_of_file_with_diagram: if process_model_with_diagram and name_of_file_with_diagram:
if ( if process_instance.bpmn_version_control_identifier == current_version_control_revision:
process_instance.bpmn_version_control_identifier
== current_version_control_revision
):
bpmn_xml_file_contents = SpecFileService.get_data( bpmn_xml_file_contents = SpecFileService.get_data(
process_model_with_diagram, name_of_file_with_diagram process_model_with_diagram, name_of_file_with_diagram
).decode("utf-8") ).decode("utf-8")
@ -790,10 +808,7 @@ def _find_process_instance_for_me_or_raise(
raise ( raise (
ApiError( ApiError(
error_code="process_instance_cannot_be_found", error_code="process_instance_cannot_be_found",
message=( message=f"Process instance with id {process_instance_id} cannot be found that is associated with you.",
f"Process instance with id {process_instance_id} cannot be found"
" that is associated with you."
),
status_code=400, status_code=400,
) )
) )

View File

@ -63,11 +63,7 @@ def process_model_create(
"fault_or_suspend_on_exception", "fault_or_suspend_on_exception",
"exception_notification_addresses", "exception_notification_addresses",
] ]
body_filtered = { body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body}
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
_get_process_group_from_modified_identifier(modified_process_group_id) _get_process_group_from_modified_identifier(modified_process_group_id)
@ -82,25 +78,19 @@ def process_model_create(
if ProcessModelService.is_process_model_identifier(process_model_info.id): if ProcessModelService.is_process_model_identifier(process_model_info.id):
raise ApiError( raise ApiError(
error_code="process_model_with_id_already_exists", error_code="process_model_with_id_already_exists",
message=( message=f"Process Model with given id already exists: {process_model_info.id}",
f"Process Model with given id already exists: {process_model_info.id}"
),
status_code=400, status_code=400,
) )
if ProcessModelService.is_process_group_identifier(process_model_info.id): if ProcessModelService.is_process_group_identifier(process_model_info.id):
raise ApiError( raise ApiError(
error_code="process_group_with_id_already_exists", error_code="process_group_with_id_already_exists",
message=( message=f"Process Group with given id already exists: {process_model_info.id}",
f"Process Group with given id already exists: {process_model_info.id}"
),
status_code=400, status_code=400,
) )
ProcessModelService.add_process_model(process_model_info) ProcessModelService.add_process_model(process_model_info)
_commit_and_push_to_git( _commit_and_push_to_git(f"User: {g.user.username} created process model {process_model_info.id}")
f"User: {g.user.username} created process model {process_model_info.id}"
)
return Response( return Response(
json.dumps(ProcessModelInfoSchema().dump(process_model_info)), json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
status=201, status=201,
@ -122,9 +112,7 @@ def process_model_delete(
status_code=400, status_code=400,
) from exception ) from exception
_commit_and_push_to_git( _commit_and_push_to_git(f"User: {g.user.username} deleted process model {process_model_identifier}")
f"User: {g.user.username} deleted process model {process_model_identifier}"
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -143,11 +131,7 @@ def process_model_update(
"fault_or_suspend_on_exception", "fault_or_suspend_on_exception",
"exception_notification_addresses", "exception_notification_addresses",
] ]
body_filtered = { body_filtered = {include_item: body[include_item] for include_item in body_include_list if include_item in body}
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -156,10 +140,7 @@ def process_model_update(
# All we really need this for is to get the process id from a bpmn file so maybe that could # All we really need this for is to get the process id from a bpmn file so maybe that could
# all be moved to FileSystemService. # all be moved to FileSystemService.
update_primary_bpmn_file = False update_primary_bpmn_file = False
if ( if "primary_file_name" in body_filtered and "primary_process_id" not in body_filtered:
"primary_file_name" in body_filtered
and "primary_process_id" not in body_filtered
):
if process_model.primary_file_name != body_filtered["primary_file_name"]: if process_model.primary_file_name != body_filtered["primary_file_name"]:
update_primary_bpmn_file = True update_primary_bpmn_file = True
@ -167,22 +148,14 @@ def process_model_update(
# update the file to ensure we get the correct process id if the primary file changed. # update the file to ensure we get the correct process id if the primary file changed.
if update_primary_bpmn_file and process_model.primary_file_name: if update_primary_bpmn_file and process_model.primary_file_name:
primary_file_contents = SpecFileService.get_data( primary_file_contents = SpecFileService.get_data(process_model, process_model.primary_file_name)
process_model, process_model.primary_file_name SpecFileService.update_file(process_model, process_model.primary_file_name, primary_file_contents)
)
SpecFileService.update_file(
process_model, process_model.primary_file_name, primary_file_contents
)
_commit_and_push_to_git( _commit_and_push_to_git(f"User: {g.user.username} updated process model {process_model_identifier}")
f"User: {g.user.username} updated process model {process_model_identifier}"
)
return ProcessModelInfoSchema().dump(process_model) return ProcessModelInfoSchema().dump(process_model)
def process_model_show( def process_model_show(modified_process_model_identifier: str, include_file_references: bool = False) -> Any:
modified_process_model_identifier: str, include_file_references: bool = False
) -> Any:
"""Process_model_show.""" """Process_model_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -194,13 +167,9 @@ def process_model_show(
if include_file_references: if include_file_references:
for file in process_model.files: for file in process_model.files:
file.references = SpecFileService.get_references_for_file( file.references = SpecFileService.get_references_for_file(file, process_model)
file, process_model
)
process_model.parent_groups = ProcessModelService.get_parent_group_array( process_model.parent_groups = ProcessModelService.get_parent_group_array(process_model.id)
process_model.id
)
try: try:
current_git_revision = GitService.get_current_revision() current_git_revision = GitService.get_current_revision()
except GitCommandError: except GitCommandError:
@ -210,19 +179,12 @@ def process_model_show(
return make_response(jsonify(process_model), 200) return make_response(jsonify(process_model), 200)
def process_model_move( def process_model_move(modified_process_model_identifier: str, new_location: str) -> flask.wrappers.Response:
modified_process_model_identifier: str, new_location: str
) -> flask.wrappers.Response:
"""Process_model_move.""" """Process_model_move."""
original_process_model_id = _un_modify_modified_process_model_id( original_process_model_id = _un_modify_modified_process_model_id(modified_process_model_identifier)
modified_process_model_identifier new_process_model = ProcessModelService().process_model_move(original_process_model_id, new_location)
)
new_process_model = ProcessModelService().process_model_move(
original_process_model_id, new_location
)
_commit_and_push_to_git( _commit_and_push_to_git(
f"User: {g.user.username} moved process model {original_process_model_id} to" f"User: {g.user.username} moved process model {original_process_model_id} to {new_process_model.id}"
f" {new_process_model.id}"
) )
return make_response(jsonify(new_process_model), 200) return make_response(jsonify(new_process_model), 200)
@ -232,17 +194,13 @@ def process_model_publish(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_publish.""" """Process_model_publish."""
if branch_to_update is None: if branch_to_update is None:
branch_to_update = current_app.config[ branch_to_update = current_app.config["SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"]
"SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH"
]
if branch_to_update is None: if branch_to_update is None:
raise MissingGitConfigsError( raise MissingGitConfigsError(
"Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. " "Missing config for SPIFFWORKFLOW_BACKEND_GIT_PUBLISH_TARGET_BRANCH. "
"This is required for publishing process models" "This is required for publishing process models"
) )
process_model_identifier = _un_modify_modified_process_model_id( process_model_identifier = _un_modify_modified_process_model_id(modified_process_model_identifier)
modified_process_model_identifier
)
pr_url = GitService().publish(process_model_identifier, branch_to_update) pr_url = GitService().publish(process_model_identifier, branch_to_update)
data = {"ok": True, "pr_url": pr_url} data = {"ok": True, "pr_url": pr_url}
return Response(json.dumps(data), status=200, mimetype="application/json") return Response(json.dumps(data), status=200, mimetype="application/json")
@ -262,21 +220,15 @@ def process_model_list(
recursive=recursive, recursive=recursive,
filter_runnable_by_user=filter_runnable_by_user, filter_runnable_by_user=filter_runnable_by_user,
) )
process_models_to_return = ProcessModelService().get_batch( process_models_to_return = ProcessModelService().get_batch(process_models, page=page, per_page=per_page)
process_models, page=page, per_page=per_page
)
if include_parent_groups: if include_parent_groups:
process_group_cache = IdToProcessGroupMapping({}) process_group_cache = IdToProcessGroupMapping({})
for process_model in process_models_to_return: for process_model in process_models_to_return:
parent_group_lites_with_cache = ( parent_group_lites_with_cache = ProcessModelService.get_parent_group_array_and_cache_it(
ProcessModelService.get_parent_group_array_and_cache_it( process_model.id, process_group_cache
process_model.id, process_group_cache
)
) )
process_model.parent_groups = parent_group_lites_with_cache[ process_model.parent_groups = parent_group_lites_with_cache["process_groups"]
"process_groups"
]
pages = len(process_models) // per_page pages = len(process_models) // per_page
remainder = len(process_models) % per_page remainder = len(process_models) % per_page
@ -293,19 +245,13 @@ def process_model_list(
return make_response(jsonify(response_json), 200) return make_response(jsonify(response_json), 200)
def process_model_file_update( def process_model_file_update(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response:
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_update.""" """Process_model_file_update."""
message = f"User: {g.user.username} clicked save for" message = f"User: {g.user.username} clicked save for"
return _create_or_update_process_model_file( return _create_or_update_process_model_file(modified_process_model_identifier, message, 200)
modified_process_model_identifier, message, 200
)
def process_model_file_delete( def process_model_file_delete(modified_process_model_identifier: str, file_name: str) -> flask.wrappers.Response:
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_delete.""" """Process_model_file_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -333,8 +279,7 @@ def process_model_file_delete(
) from exception ) from exception
_commit_and_push_to_git( _commit_and_push_to_git(
f"User: {g.user.username} deleted process model file" f"User: {g.user.username} deleted process model file {process_model_identifier}/{file_name}"
f" {process_model_identifier}/{file_name}"
) )
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -344,14 +289,10 @@ def process_model_file_create(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_file_create.""" """Process_model_file_create."""
message = f"User: {g.user.username} added process model file" message = f"User: {g.user.username} added process model file"
return _create_or_update_process_model_file( return _create_or_update_process_model_file(modified_process_model_identifier, message, 201)
modified_process_model_identifier, message, 201
)
def process_model_file_show( def process_model_file_show(modified_process_model_identifier: str, file_name: str) -> Any:
modified_process_model_identifier: str, file_name: str
) -> Any:
"""Process_model_file_show.""" """Process_model_file_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -360,8 +301,7 @@ def process_model_file_show(
raise ApiError( raise ApiError(
error_code="unknown file", error_code="unknown file",
message=( message=(
f"No information exists for file {file_name}" f"No information exists for file {file_name} it does not exist in workflow {process_model_identifier}."
f" it does not exist in workflow {process_model_identifier}."
), ),
status_code=404, status_code=404,
) )
@ -382,17 +322,13 @@ def process_model_create_with_natural_language(
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_create_with_natural_language.""" """Process_model_create_with_natural_language."""
pattern = re.compile( pattern = re.compile(
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that" r" collects (?P<columns>.*)"
r" collects (?P<columns>.*)"
) )
match = pattern.match(body["natural_language_text"]) match = pattern.match(body["natural_language_text"])
if match is None: if match is None:
raise ApiError( raise ApiError(
error_code="natural_language_text_not_yet_supported", error_code="natural_language_text_not_yet_supported",
message=( message=f"Natural language text is not yet supported. Please use the form: {pattern.pattern}",
"Natural language text is not yet supported. Please use the form:"
f" {pattern.pattern}"
),
status_code=400, status_code=400,
) )
process_model_display_name = match.group("pm_name") process_model_display_name = match.group("pm_name")
@ -406,12 +342,8 @@ def process_model_create_with_natural_language(
column_names = match.group("columns") column_names = match.group("columns")
columns = re.sub(r"(, (and )?)", ",", column_names).split(",") columns = re.sub(r"(, (and )?)", ",", column_names).split(",")
process_group = _get_process_group_from_modified_identifier( process_group = _get_process_group_from_modified_identifier(modified_process_group_id)
modified_process_group_id qualified_process_model_identifier = f"{process_group.id}/{process_model_identifier}"
)
qualified_process_model_identifier = (
f"{process_group.id}/{process_model_identifier}"
)
metadata_extraction_paths = [] metadata_extraction_paths = []
for column in columns: for column in columns:
@ -432,9 +364,7 @@ def process_model_create_with_natural_language(
status_code=400, status_code=400,
) )
bpmn_template_file = os.path.join( bpmn_template_file = os.path.join(current_app.root_path, "templates", "basic_with_user_task_template.bpmn")
current_app.root_path, "templates", "basic_with_user_task_template.bpmn"
)
if not os.path.exists(bpmn_template_file): if not os.path.exists(bpmn_template_file):
raise ApiError( raise ApiError(
error_code="bpmn_template_file_does_not_exist", error_code="bpmn_template_file_does_not_exist",
@ -451,9 +381,7 @@ def process_model_create_with_natural_language(
bpmn_template_contents = bpmn_template_contents.replace( bpmn_template_contents = bpmn_template_contents.replace(
"natural_language_process_id_template", bpmn_process_identifier "natural_language_process_id_template", bpmn_process_identifier
) )
bpmn_template_contents = bpmn_template_contents.replace( bpmn_template_contents = bpmn_template_contents.replace("form-identifier-id-template", form_identifier)
"form-identifier-id-template", form_identifier
)
form_uischema_json: dict = {"ui:order": columns} form_uischema_json: dict = {"ui:order": columns}
@ -487,21 +415,14 @@ def process_model_create_with_natural_language(
) )
_commit_and_push_to_git( _commit_and_push_to_git(
f"User: {g.user.username} created process model via natural language:" f"User: {g.user.username} created process model via natural language: {process_model_info.id}"
f" {process_model_info.id}"
) )
default_report_metadata = ProcessInstanceReportService.system_metadata_map( default_report_metadata = ProcessInstanceReportService.system_metadata_map("default")
"default"
)
if default_report_metadata is None: if default_report_metadata is None:
raise ProcessInstanceReportNotFoundError( raise ProcessInstanceReportNotFoundError("Could not find a report with identifier 'default'")
"Could not find a report with identifier 'default'"
)
for column in columns: for column in columns:
default_report_metadata["columns"].append( default_report_metadata["columns"].append({"Header": column, "accessor": column, "filterable": True})
{"Header": column, "accessor": column, "filterable": True}
)
ProcessInstanceReportModel.create_report( ProcessInstanceReportModel.create_report(
identifier=process_model_identifier, identifier=process_model_identifier,
user=g.user, user=g.user,
@ -534,16 +455,11 @@ def _get_process_group_from_modified_identifier(
if modified_process_group_id is None: if modified_process_group_id is None:
raise ApiError( raise ApiError(
error_code="process_group_id_not_specified", error_code="process_group_id_not_specified",
message=( message="Process Model could not be created when process_group_id path param is unspecified",
"Process Model could not be created when process_group_id path param is"
" unspecified"
),
status_code=400, status_code=400,
) )
unmodified_process_group_id = _un_modify_modified_process_model_id( unmodified_process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
modified_process_group_id
)
process_group = ProcessModelService.get_process_group(unmodified_process_group_id) process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
if process_group is None: if process_group is None:
raise ApiError( raise ApiError(
@ -584,26 +500,19 @@ def _create_or_update_process_model_file(
file = None file = None
try: try:
file = SpecFileService.update_file( file = SpecFileService.update_file(process_model, request_file.filename, request_file_contents)
process_model, request_file.filename, request_file_contents
)
except ProcessModelFileInvalidError as exception: except ProcessModelFileInvalidError as exception:
raise ( raise (
ApiError( ApiError(
error_code="process_model_file_invalid", error_code="process_model_file_invalid",
message=( message=f"Invalid Process model file: {request_file.filename}. Received error: {str(exception)}",
f"Invalid Process model file: {request_file.filename}."
f" Received error: {str(exception)}"
),
status_code=400, status_code=400,
) )
) from exception ) from exception
file_contents = SpecFileService.get_data(process_model, file.name) file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents file.file_contents = file_contents
file.process_model_id = process_model.id file.process_model_id = process_model.id
_commit_and_push_to_git( _commit_and_push_to_git(f"{message_for_git_commit} {process_model_identifier}/{file.name}")
f"{message_for_git_commit} {process_model_identifier}/{file.name}"
)
return Response( return Response(
json.dumps(FileSchema().dump(file)), json.dumps(FileSchema().dump(file)),

View File

@ -26,13 +26,9 @@ def script_unit_test_create(
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Script_unit_test_create.""" """Script_unit_test_create."""
bpmn_task_identifier = _get_required_parameter_or_raise( bpmn_task_identifier = _get_required_parameter_or_raise("bpmn_task_identifier", body)
"bpmn_task_identifier", body
)
input_json = _get_required_parameter_or_raise("input_json", body) input_json = _get_required_parameter_or_raise("input_json", body)
expected_output_json = _get_required_parameter_or_raise( expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
"expected_output_json", body
)
process_model_identifier = modified_process_model_identifier.replace(":", "/") process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = _get_process_model(process_model_identifier) process_model = _get_process_model(process_model_identifier)
@ -40,10 +36,7 @@ def script_unit_test_create(
if file is None: if file is None:
raise ApiError( raise ApiError(
error_code="cannot_find_file", error_code="cannot_find_file",
message=( message=f"Could not find the primary bpmn file for process_model: {process_model.id}",
"Could not find the primary bpmn file for process_model:"
f" {process_model.id}"
),
status_code=404, status_code=404,
) )
@ -52,9 +45,7 @@ def script_unit_test_create(
bpmn_etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents) bpmn_etree_element = SpecFileService.get_etree_from_xml_bytes(file_contents)
nsmap = bpmn_etree_element.nsmap nsmap = bpmn_etree_element.nsmap
spiff_element_maker = ElementMaker( spiff_element_maker = ElementMaker(namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap)
namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
)
script_task_elements = bpmn_etree_element.xpath( script_task_elements = bpmn_etree_element.xpath(
f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
@ -74,9 +65,7 @@ def script_unit_test_create(
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
) )
if len(extension_elements_array) == 0: if len(extension_elements_array) == 0:
bpmn_element_maker = ElementMaker( bpmn_element_maker = ElementMaker(namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap)
namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
)
extension_elements = bpmn_element_maker("extensionElements") extension_elements = bpmn_element_maker("extensionElements")
script_task_element.append(extension_elements) script_task_element.append(extension_elements)
else: else:
@ -93,23 +82,16 @@ def script_unit_test_create(
else: else:
unit_test_elements = unit_test_elements_array[0] unit_test_elements = unit_test_elements_array[0]
fuzz = "".join( fuzz = "".join(random.choice(string.ascii_uppercase + string.digits) for _ in range(7)) # noqa: S311
random.choice(string.ascii_uppercase + string.digits) # noqa: S311
for _ in range(7)
)
unit_test_id = f"unit_test_{fuzz}" unit_test_id = f"unit_test_{fuzz}"
input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
expected_output_json_element = spiff_element_maker( expected_output_json_element = spiff_element_maker("expectedOutputJson", json.dumps(expected_output_json))
"expectedOutputJson", json.dumps(expected_output_json)
)
unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
unit_test_element.append(input_json_element) unit_test_element.append(input_json_element)
unit_test_element.append(expected_output_json_element) unit_test_element.append(expected_output_json_element)
unit_test_elements.append(unit_test_element) unit_test_elements.append(unit_test_element)
SpecFileService.update_file( SpecFileService.update_file(process_model, file.name, etree.tostring(bpmn_etree_element))
process_model, file.name, etree.tostring(bpmn_etree_element)
)
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
@ -120,13 +102,10 @@ def script_unit_test_run(
"""Script_unit_test_run.""" """Script_unit_test_run."""
# FIXME: We should probably clear this somewhere else but this works # FIXME: We should probably clear this somewhere else but this works
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
python_script = _get_required_parameter_or_raise("python_script", body) python_script = _get_required_parameter_or_raise("python_script", body)
input_json = _get_required_parameter_or_raise("input_json", body) input_json = _get_required_parameter_or_raise("input_json", body)
expected_output_json = _get_required_parameter_or_raise( expected_output_json = _get_required_parameter_or_raise("expected_output_json", body)
"expected_output_json", body
)
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
python_script, input_json, expected_output_json python_script, input_json, expected_output_json

View File

@ -17,9 +17,7 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskServi
def service_task_list() -> flask.wrappers.Response: def service_task_list() -> flask.wrappers.Response:
"""Service_task_list.""" """Service_task_list."""
available_connectors = ServiceTaskService.available_connectors() available_connectors = ServiceTaskService.available_connectors()
return Response( return Response(json.dumps(available_connectors), status=200, mimetype="application/json")
json.dumps(available_connectors), status=200, mimetype="application/json"
)
def authentication_list() -> flask.wrappers.Response: def authentication_list() -> flask.wrappers.Response:
@ -27,9 +25,7 @@ def authentication_list() -> flask.wrappers.Response:
available_authentications = ServiceTaskService.authentication_list() available_authentications = ServiceTaskService.authentication_list()
response_json = { response_json = {
"results": available_authentications, "results": available_authentications,
"connector_proxy_base_url": current_app.config[ "connector_proxy_base_url": current_app.config["SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL"],
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL"
],
"redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
} }
@ -43,9 +39,5 @@ def authentication_callback(
"""Authentication_callback.""" """Authentication_callback."""
verify_token(request.args.get("token"), force_run=True) verify_token(request.args.get("token"), force_run=True)
response = request.args["response"] response = request.args["response"]
SecretService.update_secret( SecretService.update_secret(f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True)
f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True return redirect(f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND']}/admin/configuration")
)
return redirect(
f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND']}/admin/configuration"
)

View File

@ -34,10 +34,15 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceTaskDataCannotBeUpdatedError,
)
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import ( from spiffworkflow_backend.routes.process_api_blueprint import (
_find_principal_or_raise, _find_principal_or_raise,
@ -51,11 +56,15 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
from spiffworkflow_backend.services.process_instance_queue_service import (
ProcessInstanceQueueService,
)
from spiffworkflow_backend.services.process_instance_service import ( from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService, ProcessInstanceService,
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.task_service import TaskService
class TaskDataSelectOption(TypedDict): class TaskDataSelectOption(TypedDict):
@ -104,11 +113,10 @@ def task_list_my_tasks(
ProcessInstanceModel.status != ProcessInstanceStatus.error.value, ProcessInstanceModel.status != ProcessInstanceStatus.error.value,
) )
potential_owner_usernames_from_group_concat_or_similar = ( potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user)
_get_potential_owner_usernames(assigned_user)
)
# FIXME: this breaks postgres. Look at commit c147cdb47b1481f094b8c3d82dc502fe961f4977 for # FIXME: this breaks postgres. Look at commit c147cdb47b1481f094b8c3d82dc502fe961f4977 for
# UPDATE: maybe fixed in postgres and mysql. remove comment if so.
# the postgres fix but it breaks the method for mysql. # the postgres fix but it breaks the method for mysql.
# error in postgres: # error in postgres:
# psycopg2.errors.GroupingError) column \"process_instance.process_model_identifier\" must # psycopg2.errors.GroupingError) column \"process_instance.process_model_identifier\" must
@ -119,19 +127,12 @@ def task_list_my_tasks(
HumanTaskModel.task_title, HumanTaskModel.task_title,
HumanTaskModel.process_model_display_name, HumanTaskModel.process_model_display_name,
HumanTaskModel.process_instance_id, HumanTaskModel.process_instance_id,
ProcessInstanceModel.process_model_identifier, func.max(ProcessInstanceModel.process_model_identifier).label("process_model_identifier"),
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore func.max(ProcessInstanceModel.status).label("process_instance_status"),
ProcessInstanceModel.updated_at_in_seconds, func.max(ProcessInstanceModel.updated_at_in_seconds).label("updated_at_in_seconds"),
ProcessInstanceModel.created_at_in_seconds, func.max(ProcessInstanceModel.created_at_in_seconds).label("created_at_in_seconds"),
process_initiator_user.username.label("process_initiator_username"), func.max(process_initiator_user.username).label("process_initiator_username"),
GroupModel.identifier.label("assigned_user_group_identifier"), func.max(GroupModel.identifier).label("assigned_user_group_identifier"),
# func.max does not seem to return columns so we need to call both
func.max(ProcessInstanceModel.process_model_identifier),
func.max(ProcessInstanceModel.status.label("process_instance_status")), # type: ignore
func.max(ProcessInstanceModel.updated_at_in_seconds),
func.max(ProcessInstanceModel.created_at_in_seconds),
func.max(process_initiator_user.username.label("process_initiator_username")),
func.max(GroupModel.identifier.label("assigned_user_group_identifier")),
potential_owner_usernames_from_group_concat_or_similar, potential_owner_usernames_from_group_concat_or_similar,
).paginate(page=page, per_page=per_page, error_out=False) ).paginate(page=page, per_page=per_page, error_out=False)
@ -147,9 +148,7 @@ def task_list_my_tasks(
return make_response(jsonify(response_json), 200) return make_response(jsonify(response_json), 200)
def task_list_for_my_open_processes( def task_list_for_my_open_processes(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_for_my_open_processes.""" """Task_list_for_my_open_processes."""
return _get_tasks(page=page, per_page=per_page) return _get_tasks(page=page, per_page=per_page)
@ -179,63 +178,91 @@ def task_list_for_my_groups(
def task_data_show( def task_data_show(
modified_process_model_identifier: str, modified_process_model_identifier: str,
process_instance_id: int, process_instance_id: int,
spiff_step: int = 0, task_guid: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
step_detail = ( task_model.data = task_model.json_data()
db.session.query(SpiffStepDetailsModel) return make_response(jsonify(task_model), 200)
.filter(
SpiffStepDetailsModel.process_instance_id == process_instance.id,
SpiffStepDetailsModel.spiff_step == spiff_step,
)
.first()
)
if step_detail is None:
def task_data_update(
process_instance_id: str,
modified_process_model_identifier: str,
task_guid: str,
body: Dict,
) -> Response:
"""Update task data."""
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
if process_instance:
if process_instance.status != "suspended":
raise ProcessInstanceTaskDataCannotBeUpdatedError(
"The process instance needs to be suspended to update the task-data."
f" It is currently: {process_instance.status}"
)
task_model = TaskModel.query.filter_by(guid=task_guid).first()
if task_model is None:
raise ApiError(
error_code="update_task_data_error",
message=f"Could not find Task: {task_guid} in Instance: {process_instance_id}.",
)
if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str)
json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
task_model, new_task_data_dict, "json_data_hash"
)
if json_data_dict is not None:
TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict})
ProcessInstanceProcessor.add_event_to_process_instance(
process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid
)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
raise ApiError(
error_code="update_task_data_error",
message=f"Could not update the Instance. Original error is {e}",
) from e
else:
raise ApiError( raise ApiError(
error_code="spiff_step_for_proces_instance_not_found", error_code="update_task_data_error",
message=( message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_guid}.",
"The given spiff step for the given process instance could not be"
" found."
),
status_code=400,
) )
return Response(
processor = ProcessInstanceProcessor(process_instance) json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
spiff_task = processor.__class__.get_task_by_bpmn_identifier( status=200,
step_detail.bpmn_task_identifier, processor.bpmn_process_instance mimetype="application/json",
) )
task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
task = ProcessInstanceService.spiff_task_to_api_task(
processor, def manual_complete_task(
spiff_task, modified_process_model_identifier: str,
task_spiff_step=spiff_step, process_instance_id: str,
task_guid: str,
body: Dict,
) -> Response:
"""Mark a task complete without executing it."""
execute = body.get("execute", True)
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
if process_instance:
processor = ProcessInstanceProcessor(process_instance)
processor.manual_complete_task(task_guid, execute)
else:
raise ApiError(
error_code="complete_task",
message=f"Could not complete Task {task_guid} in Instance {process_instance_id}",
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
status=200,
mimetype="application/json",
) )
task.data = task_data
return make_response(jsonify(task), 200)
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None: def task_show(process_instance_id: int, task_guid: str) -> flask.wrappers.Response:
if task.form_ui_schema is None:
task.form_ui_schema = {}
if task.data and "form_ui_hidden_fields" in task.data:
hidden_fields = task.data["form_ui_hidden_fields"]
for hidden_field in hidden_fields:
hidden_field_parts = hidden_field.split(".")
relevant_depth_of_ui_schema = task.form_ui_schema
for ii, hidden_field_part in enumerate(hidden_field_parts):
if hidden_field_part not in relevant_depth_of_ui_schema:
relevant_depth_of_ui_schema[hidden_field_part] = {}
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[
hidden_field_part
]
if len(hidden_field_parts) == ii + 1:
relevant_depth_of_ui_schema["ui:widget"] = "hidden"
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
"""Task_show.""" """Task_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
@ -250,14 +277,12 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
process_instance.process_model_identifier, process_instance.process_model_identifier,
) )
_find_human_task_or_raise(process_instance_id, task_id) _find_human_task_or_raise(process_instance_id, task_guid)
form_schema_file_name = "" form_schema_file_name = ""
form_ui_schema_file_name = "" form_ui_schema_file_name = ""
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
spiff_task = _get_spiff_task_from_process_instance( spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor)
task_id, process_instance, processor=processor
)
extensions = spiff_task.task_spec.extensions extensions = spiff_task.task_spec.extensions
if "properties" in extensions: if "properties" in extensions:
@ -276,23 +301,13 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
refs = SpecFileService.get_references_for_process(process_model_with_form) refs = SpecFileService.get_references_for_process(process_model_with_form)
all_processes = [i.identifier for i in refs] all_processes = [i.identifier for i in refs]
if task.process_identifier not in all_processes: if task.process_identifier not in all_processes:
top_process_name = processor.find_process_model_process_name_by_task_name( top_process_name = processor.find_process_model_process_name_by_task_name(task.process_identifier)
task.process_identifier bpmn_file_full_path = ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
) top_process_name
bpmn_file_full_path = (
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
top_process_name
)
)
relative_path = os.path.relpath(
bpmn_file_full_path, start=FileSystemService.root_path()
) )
relative_path = os.path.relpath(bpmn_file_full_path, start=FileSystemService.root_path())
process_model_relative_path = os.path.dirname(relative_path) process_model_relative_path = os.path.dirname(relative_path)
process_model_with_form = ( process_model_with_form = ProcessModelService.get_process_model_from_relative_path(process_model_relative_path)
ProcessModelService.get_process_model_from_relative_path(
process_model_relative_path
)
)
if task.type == "User Task": if task.type == "User Task":
if not form_schema_file_name: if not form_schema_file_name:
@ -300,8 +315,8 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
ApiError( ApiError(
error_code="missing_form_file", error_code="missing_form_file",
message=( message=(
"Cannot find a form file for process_instance_id:" f"Cannot find a form file for process_instance_id: {process_instance_id}, task_guid:"
f" {process_instance_id}, task_id: {task_id}" f" {task_guid}"
), ),
status_code=400, status_code=400,
) )
@ -338,9 +353,7 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
) )
except WorkflowTaskException as wfe: except WorkflowTaskException as wfe:
wfe.add_note("Failed to render instructions for end user.") wfe.add_note("Failed to render instructions for end user.")
raise ApiError.from_workflow_exception( raise ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe) from wfe
"instructions_error", str(wfe), exp=wfe
) from wfe
return make_response(jsonify(task), 200) return make_response(jsonify(task), 200)
@ -368,11 +381,11 @@ def process_data_show(
) )
def task_submit_shared( def _task_submit_shared(
process_instance_id: int, process_instance_id: int,
task_id: str, task_guid: str,
body: Dict[str, Any], body: Dict[str, Any],
terminate_loop: bool = False, save_as_draft: bool = False,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
principal = _find_principal_or_raise() principal = _find_principal_or_raise()
process_instance = _find_process_instance_by_id_or_raise(process_instance_id) process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
@ -387,12 +400,8 @@ def task_submit_shared(
) )
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
spiff_task = _get_spiff_task_from_process_instance( spiff_task = _get_spiff_task_from_process_instance(task_guid, process_instance, processor=processor)
task_id, process_instance, processor=processor AuthorizationService.assert_user_can_complete_spiff_task(process_instance.id, spiff_task, principal.user)
)
AuthorizationService.assert_user_can_complete_spiff_task(
process_instance.id, spiff_task, principal.user
)
if spiff_task.state != TaskState.READY: if spiff_task.state != TaskState.READY:
raise ( raise (
@ -403,26 +412,10 @@ def task_submit_shared(
) )
) )
if terminate_loop and spiff_task.is_looping(): # multi-instance code from crconnect - we may need it or may not
spiff_task.terminate_loop() # if terminate_loop and spiff_task.is_looping():
# spiff_task.terminate_loop()
human_task = _find_human_task_or_raise( #
process_instance_id=process_instance_id,
task_id=task_id,
only_tasks_that_can_be_completed=True,
)
with sentry_sdk.start_span(op="task", description="complete_form_task"):
processor.lock_process_instance("Web")
ProcessInstanceService.complete_form_task(
processor=processor,
spiff_task=spiff_task,
data=body,
user=g.user,
human_task=human_task,
)
processor.unlock_process_instance("Web")
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
# task spec, complete that form as well. # task spec, complete that form as well.
# if update_all: # if update_all:
@ -433,34 +426,55 @@ def task_submit_shared(
# last_index = next_task.task_info()["mi_index"] # last_index = next_task.task_info()["mi_index"]
# next_task = processor.next_task() # next_task = processor.next_task()
next_human_task_assigned_to_me = ( if save_as_draft:
HumanTaskModel.query.filter_by( task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
process_instance_id=process_instance_id, completed=False ProcessInstanceService.update_form_task_data(processor, spiff_task, body, g.user)
json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
task_model, spiff_task.data, "json_data_hash"
) )
.order_by(asc(HumanTaskModel.id)) # type: ignore if json_data_dict is not None:
.join(HumanTaskUserModel) TaskService.insert_or_update_json_data_dict(json_data_dict)
.filter_by(user_id=principal.user_id) db.session.add(task_model)
.first() db.session.commit()
) else:
if next_human_task_assigned_to_me: human_task = _find_human_task_or_raise(
return make_response( process_instance_id=process_instance_id,
jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200 task_guid=task_guid,
only_tasks_that_can_be_completed=True,
) )
with sentry_sdk.start_span(op="task", description="complete_form_task"):
with ProcessInstanceQueueService.dequeued(process_instance):
ProcessInstanceService.complete_form_task(
processor=processor,
spiff_task=spiff_task,
data=body,
user=g.user,
human_task=human_task,
)
next_human_task_assigned_to_me = (
HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False)
.order_by(asc(HumanTaskModel.id)) # type: ignore
.join(HumanTaskUserModel)
.filter_by(user_id=principal.user_id)
.first()
)
if next_human_task_assigned_to_me:
return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200)
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
def task_submit( def task_submit(
process_instance_id: int, process_instance_id: int,
task_id: str, task_guid: str,
body: Dict[str, Any], body: Dict[str, Any],
terminate_loop: bool = False, save_as_draft: bool = False,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Task_submit_user_data.""" """Task_submit_user_data."""
with sentry_sdk.start_span( with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
op="controller_action", description="tasks_controller.task_submit" return _task_submit_shared(process_instance_id, task_guid, body, save_as_draft)
):
return task_submit_shared(process_instance_id, task_id, body, terminate_loop)
def _get_tasks( def _get_tasks(
@ -492,9 +506,7 @@ def _get_tasks(
assigned_user = aliased(UserModel) assigned_user = aliased(UserModel)
if processes_started_by_user: if processes_started_by_user:
human_tasks_query = ( human_tasks_query = (
human_tasks_query.filter( human_tasks_query.filter(ProcessInstanceModel.process_initiator_id == user_id)
ProcessInstanceModel.process_initiator_id == user_id
)
.outerjoin( .outerjoin(
HumanTaskUserModel, HumanTaskUserModel,
HumanTaskModel.id == HumanTaskUserModel.human_task_id, HumanTaskModel.id == HumanTaskUserModel.human_task_id,
@ -502,9 +514,7 @@ def _get_tasks(
.outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id) .outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id)
) )
else: else:
human_tasks_query = human_tasks_query.filter( human_tasks_query = human_tasks_query.filter(ProcessInstanceModel.process_initiator_id != user_id).join(
ProcessInstanceModel.process_initiator_id != user_id
).join(
HumanTaskUserModel, HumanTaskUserModel,
and_( and_(
HumanTaskUserModel.user_id == user_id, HumanTaskUserModel.user_id == user_id,
@ -514,9 +524,7 @@ def _get_tasks(
if has_lane_assignment_id: if has_lane_assignment_id:
if user_group_identifier: if user_group_identifier:
human_tasks_query = human_tasks_query.filter( human_tasks_query = human_tasks_query.filter(GroupModel.identifier == user_group_identifier)
GroupModel.identifier == user_group_identifier
)
else: else:
human_tasks_query = human_tasks_query.filter( human_tasks_query = human_tasks_query.filter(
HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
@ -524,16 +532,26 @@ def _get_tasks(
else: else:
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
potential_owner_usernames_from_group_concat_or_similar = ( potential_owner_usernames_from_group_concat_or_similar = _get_potential_owner_usernames(assigned_user)
_get_potential_owner_usernames(assigned_user)
) process_model_identifier_column = ProcessInstanceModel.process_model_identifier
process_instance_status_column = ProcessInstanceModel.status.label("process_instance_status") # type: ignore
user_username_column = UserModel.username.label("process_initiator_username") # type: ignore
group_identifier_column = GroupModel.identifier.label("assigned_user_group_identifier")
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "postgres":
process_model_identifier_column = func.max(ProcessInstanceModel.process_model_identifier).label(
"process_model_identifier"
)
process_instance_status_column = func.max(ProcessInstanceModel.status).label("process_instance_status")
user_username_column = func.max(UserModel.username).label("process_initiator_username")
group_identifier_column = func.max(GroupModel.identifier).label("assigned_user_group_identifier")
human_tasks = ( human_tasks = (
human_tasks_query.add_columns( human_tasks_query.add_columns(
ProcessInstanceModel.process_model_identifier, process_model_identifier_column,
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore process_instance_status_column,
UserModel.username.label("process_initiator_username"), # type: ignore user_username_column,
GroupModel.identifier.label("assigned_user_group_identifier"), group_identifier_column,
HumanTaskModel.task_name, HumanTaskModel.task_name,
HumanTaskModel.task_title, HumanTaskModel.task_title,
HumanTaskModel.process_model_display_name, HumanTaskModel.process_model_display_name,
@ -558,9 +576,7 @@ def _get_tasks(
return make_response(jsonify(response_json), 200) return make_response(jsonify(response_json), 200)
def _prepare_form_data( def _prepare_form_data(form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo) -> dict:
form_file: str, spiff_task: SpiffTask, process_model: ProcessModelInfo
) -> dict:
"""Prepare_form_data.""" """Prepare_form_data."""
if spiff_task.data is None: if spiff_task.data is None:
return {} return {}
@ -576,42 +592,29 @@ def _prepare_form_data(
raise ( raise (
ApiError( ApiError(
error_code="error_loading_form", error_code="error_loading_form",
message=( message=f"Could not load form schema from: {form_file}. Error was: {str(exception)}",
f"Could not load form schema from: {form_file}."
f" Error was: {str(exception)}"
),
status_code=400, status_code=400,
) )
) from exception ) from exception
except WorkflowTaskException as wfe: except WorkflowTaskException as wfe:
wfe.add_note(f"Error in Json Form File '{form_file}'") wfe.add_note(f"Error in Json Form File '{form_file}'")
api_error = ApiError.from_workflow_exception( api_error = ApiError.from_workflow_exception("instructions_error", str(wfe), exp=wfe)
"instructions_error", str(wfe), exp=wfe
)
api_error.file_name = form_file api_error.file_name = form_file
raise api_error raise api_error
def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> str: def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) -> str:
"""Render_jinja_template.""" """Render_jinja_template."""
jinja_environment = jinja2.Environment( jinja_environment = jinja2.Environment(autoescape=True, lstrip_blocks=True, trim_blocks=True)
autoescape=True, lstrip_blocks=True, trim_blocks=True
)
try: try:
template = jinja_environment.from_string(unprocessed_template) template = jinja_environment.from_string(unprocessed_template)
return template.render(**spiff_task.data) return template.render(**spiff_task.data)
except jinja2.exceptions.TemplateError as template_error: except jinja2.exceptions.TemplateError as template_error:
wfe = WorkflowTaskException( wfe = WorkflowTaskException(str(template_error), task=spiff_task, exception=template_error)
str(template_error), task=spiff_task, exception=template_error
)
if isinstance(template_error, TemplateSyntaxError): if isinstance(template_error, TemplateSyntaxError):
wfe.line_number = template_error.lineno wfe.line_number = template_error.lineno
wfe.error_line = template_error.source.split("\n")[ wfe.error_line = template_error.source.split("\n")[template_error.lineno - 1]
template_error.lineno - 1 wfe.add_note("Jinja2 template errors can happen when trying to display task data")
]
wfe.add_note(
"Jinja2 template errors can happen when trying to display task data"
)
raise wfe from template_error raise wfe from template_error
except Exception as error: except Exception as error:
_type, _value, tb = exc_info() _type, _value, tb = exc_info()
@ -621,22 +624,20 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
wfe.line_number = tb.tb_lineno wfe.line_number = tb.tb_lineno
wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1] wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1]
tb = tb.tb_next tb = tb.tb_next
wfe.add_note( wfe.add_note("Jinja2 template errors can happen when trying to display task data")
"Jinja2 template errors can happen when trying to display task data"
)
raise wfe from error raise wfe from error
def _get_spiff_task_from_process_instance( def _get_spiff_task_from_process_instance(
task_id: str, task_guid: str,
process_instance: ProcessInstanceModel, process_instance: ProcessInstanceModel,
processor: Union[ProcessInstanceProcessor, None] = None, processor: Union[ProcessInstanceProcessor, None] = None,
) -> SpiffTask: ) -> SpiffTask:
"""Get_spiff_task_from_process_instance.""" """Get_spiff_task_from_process_instance."""
if processor is None: if processor is None:
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
task_uuid = uuid.UUID(task_id) task_uuid = uuid.UUID(task_guid)
spiff_task = processor.bpmn_process_instance.get_task(task_uuid) spiff_task = processor.bpmn_process_instance.get_task_from_id(task_uuid)
if spiff_task is None: if spiff_task is None:
raise ( raise (
@ -650,9 +651,7 @@ def _get_spiff_task_from_process_instance(
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches # originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
def _update_form_schema_with_task_data_as_needed( def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spiff_task: SpiffTask) -> None:
in_dict: dict, task: Task, spiff_task: SpiffTask
) -> None:
"""Update_nested.""" """Update_nested."""
if task.data is None: if task.data is None:
return None return None
@ -664,12 +663,8 @@ def _update_form_schema_with_task_data_as_needed(
if len(value) == 1: if len(value) == 1:
first_element_in_value_list = value[0] first_element_in_value_list = value[0]
if isinstance(first_element_in_value_list, str): if isinstance(first_element_in_value_list, str):
if first_element_in_value_list.startswith( if first_element_in_value_list.startswith("options_from_task_data_var:"):
"options_from_task_data_var:" task_data_var = first_element_in_value_list.replace("options_from_task_data_var:", "")
):
task_data_var = first_element_in_value_list.replace(
"options_from_task_data_var:", ""
)
if task_data_var not in task.data: if task_data_var not in task.data:
wte = WorkflowTaskException( wte = WorkflowTaskException(
@ -691,10 +686,7 @@ def _update_form_schema_with_task_data_as_needed(
select_options_from_task_data = task.data.get(task_data_var) select_options_from_task_data = task.data.get(task_data_var)
if isinstance(select_options_from_task_data, list): if isinstance(select_options_from_task_data, list):
if all( if all("value" in d and "label" in d for d in select_options_from_task_data):
"value" in d and "label" in d
for d in select_options_from_task_data
):
def map_function( def map_function(
task_data_select_option: TaskDataSelectOption, task_data_select_option: TaskDataSelectOption,
@ -736,17 +728,15 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
def _find_human_task_or_raise( def _find_human_task_or_raise(
process_instance_id: int, process_instance_id: int,
task_id: str, task_guid: str,
only_tasks_that_can_be_completed: bool = False, only_tasks_that_can_be_completed: bool = False,
) -> HumanTaskModel: ) -> HumanTaskModel:
if only_tasks_that_can_be_completed: if only_tasks_that_can_be_completed:
human_task_query = HumanTaskModel.query.filter_by( human_task_query = HumanTaskModel.query.filter_by(
process_instance_id=process_instance_id, task_id=task_id, completed=False process_instance_id=process_instance_id, task_id=task_guid, completed=False
) )
else: else:
human_task_query = HumanTaskModel.query.filter_by( human_task_query = HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, task_id=task_guid)
process_instance_id=process_instance_id, task_id=task_id
)
human_task: HumanTaskModel = human_task_query.first() human_task: HumanTaskModel = human_task_query.first()
if human_task is None: if human_task is None:
@ -754,10 +744,40 @@ def _find_human_task_or_raise(
ApiError( ApiError(
error_code="no_human_task", error_code="no_human_task",
message=( message=(
f"Cannot find a task to complete for task id '{task_id}' and" f"Cannot find a task to complete for task id '{task_guid}' and"
f" process instance {process_instance_id}." f" process instance {process_instance_id}."
), ),
status_code=500, status_code=500,
) )
) )
return human_task return human_task
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
if task.form_ui_schema is None:
task.form_ui_schema = {}
if task.data and "form_ui_hidden_fields" in task.data:
hidden_fields = task.data["form_ui_hidden_fields"]
for hidden_field in hidden_fields:
hidden_field_parts = hidden_field.split(".")
relevant_depth_of_ui_schema = task.form_ui_schema
for ii, hidden_field_part in enumerate(hidden_field_parts):
if hidden_field_part not in relevant_depth_of_ui_schema:
relevant_depth_of_ui_schema[hidden_field_part] = {}
relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part]
if len(hidden_field_parts) == ii + 1:
relevant_depth_of_ui_schema["ui:widget"] = "hidden"
def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int) -> TaskModel:
task_model: Optional[TaskModel] = TaskModel.query.filter_by(
guid=task_guid, process_instance_id=process_instance_id
).first()
if task_model is None:
raise ApiError(
error_code="task_not_found",
message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'",
status_code=400,
)
return task_model

View File

@ -80,8 +80,7 @@ def verify_token(
user_model = get_user_from_decoded_internal_token(decoded_token) user_model = get_user_from_decoded_internal_token(decoded_token)
except Exception as e: except Exception as e:
current_app.logger.error( current_app.logger.error(
"Exception in verify_token getting user from decoded" f"Exception in verify_token getting user from decoded internal token. {e}"
f" internal token. {e}"
) )
elif "iss" in decoded_token.keys(): elif "iss" in decoded_token.keys():
user_info = None user_info = None
@ -90,22 +89,12 @@ def verify_token(
user_info = decoded_token user_info = decoded_token
except TokenExpiredError as token_expired_error: except TokenExpiredError as token_expired_error:
# Try to refresh the token # Try to refresh the token
user = UserService.get_user_by_service_and_service_id( user = UserService.get_user_by_service_and_service_id(decoded_token["iss"], decoded_token["sub"])
decoded_token["iss"], decoded_token["sub"]
)
if user: if user:
refresh_token = AuthenticationService.get_refresh_token(user.id) refresh_token = AuthenticationService.get_refresh_token(user.id)
if refresh_token: if refresh_token:
auth_token: dict = ( auth_token: dict = AuthenticationService.get_auth_token_from_refresh_token(refresh_token)
AuthenticationService.get_auth_token_from_refresh_token( if auth_token and "error" not in auth_token and "id_token" in auth_token:
refresh_token
)
)
if (
auth_token
and "error" not in auth_token
and "id_token" in auth_token
):
tld = current_app.config["THREAD_LOCAL_DATA"] tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token["id_token"] tld.new_access_token = auth_token["id_token"]
tld.new_id_token = auth_token["id_token"] tld.new_id_token = auth_token["id_token"]
@ -130,9 +119,7 @@ def verify_token(
status_code=401, status_code=401,
) from e ) from e
if ( if (
user_info is not None user_info is not None and "error" not in user_info and "iss" in user_info
and "error" not in user_info
and "iss" in user_info
): # not sure what to test yet ): # not sure what to test yet
user_model = ( user_model = (
UserModel.query.filter(UserModel.service == user_info["iss"]) UserModel.query.filter(UserModel.service == user_info["iss"])
@ -154,9 +141,7 @@ def verify_token(
) )
else: else:
current_app.logger.debug( current_app.logger.debug("token_type not in decode_token in verify_token")
"token_type not in decode_token in verify_token"
)
raise ApiError( raise ApiError(
error_code="invalid_token", error_code="invalid_token",
message="Invalid token. Please log in.", message="Invalid token. Please log in.",
@ -175,9 +160,7 @@ def verify_token(
else: else:
raise ApiError(error_code="no_user_id", message="Cannot get a user id") raise ApiError(error_code="no_user_id", message="Cannot get a user id")
raise ApiError( raise ApiError(error_code="invalid_token", message="Cannot validate token.", status_code=401)
error_code="invalid_token", message="Cannot validate token.", status_code=401
)
def set_new_access_token_in_cookie( def set_new_access_token_in_cookie(
@ -193,30 +176,20 @@ def set_new_access_token_in_cookie(
"", "",
current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"], current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"],
) )
if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith( if domain_for_frontend_cookie and domain_for_frontend_cookie.startswith("localhost"):
"localhost"
):
domain_for_frontend_cookie = None domain_for_frontend_cookie = None
# fixme - we should not be passing the access token back to the client # fixme - we should not be passing the access token back to the client
if hasattr(tld, "new_access_token") and tld.new_access_token: if hasattr(tld, "new_access_token") and tld.new_access_token:
response.set_cookie( response.set_cookie("access_token", tld.new_access_token, domain=domain_for_frontend_cookie)
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
)
# id_token is required for logging out since this gets passed back to the openid server # id_token is required for logging out since this gets passed back to the openid server
if hasattr(tld, "new_id_token") and tld.new_id_token: if hasattr(tld, "new_id_token") and tld.new_id_token:
response.set_cookie( response.set_cookie("id_token", tld.new_id_token, domain=domain_for_frontend_cookie)
"id_token", tld.new_id_token, domain=domain_for_frontend_cookie
)
if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out: if hasattr(tld, "user_has_logged_out") and tld.user_has_logged_out:
response.set_cookie( response.set_cookie("id_token", "", max_age=0, domain=domain_for_frontend_cookie)
"id_token", "", max_age=0, domain=domain_for_frontend_cookie response.set_cookie("access_token", "", max_age=0, domain=domain_for_frontend_cookie)
)
response.set_cookie(
"access_token", "", max_age=0, domain=domain_for_frontend_cookie
)
_clear_auth_tokens_from_thread_local_data() _clear_auth_tokens_from_thread_local_data()
@ -236,9 +209,7 @@ def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
secret_key = current_app.config.get("SECRET_KEY") secret_key = current_app.config.get("SECRET_KEY")
else: else:
current_app.logger.error("Missing SECRET_KEY in encode_auth_token") current_app.logger.error("Missing SECRET_KEY in encode_auth_token")
raise ApiError( raise ApiError(error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token")
error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token"
)
return jwt.encode( return jwt.encode(
payload, payload,
str(secret_key), str(secret_key),
@ -249,9 +220,7 @@ def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
def login(redirect_url: str = "/") -> Response: def login(redirect_url: str = "/") -> Response:
"""Login.""" """Login."""
state = AuthenticationService.generate_state(redirect_url) state = AuthenticationService.generate_state(redirect_url)
login_redirect_url = AuthenticationService().get_login_redirect_url( login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"))
state.decode("UTF-8")
)
return redirect(login_redirect_url) return redirect(login_redirect_url)
@ -281,9 +250,7 @@ def login_return(code: str, state: str, session_state: str = "") -> Optional[Res
g.user = user_model.id g.user = user_model.id
g.token = auth_token_object["id_token"] g.token = auth_token_object["id_token"]
if "refresh_token" in auth_token_object: if "refresh_token" in auth_token_object:
AuthenticationService.store_refresh_token( AuthenticationService.store_refresh_token(user_model.id, auth_token_object["refresh_token"])
user_model.id, auth_token_object["refresh_token"]
)
redirect_url = state_redirect_url redirect_url = state_redirect_url
tld = current_app.config["THREAD_LOCAL_DATA"] tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token_object["id_token"] tld.new_access_token = auth_token_object["id_token"]
@ -325,9 +292,7 @@ def login_api() -> Response:
"""Login_api.""" """Login_api."""
redirect_url = "/v1.0/login_api_return" redirect_url = "/v1.0/login_api_return"
state = AuthenticationService.generate_state(redirect_url) state = AuthenticationService.generate_state(redirect_url)
login_redirect_url = AuthenticationService().get_login_redirect_url( login_redirect_url = AuthenticationService().get_login_redirect_url(state.decode("UTF-8"), redirect_url)
state.decode("UTF-8"), redirect_url
)
return redirect(login_redirect_url) return redirect(login_redirect_url)
@ -335,9 +300,7 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
state_dict["redirect_url"] state_dict["redirect_url"]
auth_token_object = AuthenticationService().get_auth_token_object( auth_token_object = AuthenticationService().get_auth_token_object(code, "/v1.0/login_api_return")
code, "/v1.0/login_api_return"
)
access_token: str = auth_token_object["access_token"] access_token: str = auth_token_object["access_token"]
if access_token is None: if access_token is None:
raise MissingAccessTokenError("Cannot find the access token for the request") raise MissingAccessTokenError("Cannot find the access token for the request")
@ -365,16 +328,12 @@ def get_decoded_token(token: str) -> Optional[Dict]:
try: try:
decoded_token = jwt.decode(token, options={"verify_signature": False}) decoded_token = jwt.decode(token, options={"verify_signature": False})
except Exception as e: except Exception as e:
raise ApiError( raise ApiError(error_code="invalid_token", message="Cannot decode token.") from e
error_code="invalid_token", message="Cannot decode token."
) from e
else: else:
if "token_type" in decoded_token or "iss" in decoded_token: if "token_type" in decoded_token or "iss" in decoded_token:
return decoded_token return decoded_token
else: else:
current_app.logger.error( current_app.logger.error(f"Unknown token type in get_decoded_token: token: {token}")
f"Unknown token type in get_decoded_token: token: {token}"
)
raise ApiError( raise ApiError(
error_code="unknown_token", error_code="unknown_token",
message="Unknown token type in get_decoded_token", message="Unknown token type in get_decoded_token",
@ -397,9 +356,7 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
service = parts[0].split(":")[1] service = parts[0].split(":")[1]
service_id = parts[1].split(":")[1] service_id = parts[1].split(":")[1]
user: UserModel = ( user: UserModel = (
UserModel.query.filter(UserModel.service == service) UserModel.query.filter(UserModel.service == service).filter(UserModel.service_id == service_id).first()
.filter(UserModel.service_id == service_id)
.first()
) )
if user: if user:
return user return user

View File

@ -98,11 +98,7 @@ def create_group(group_name: str) -> flask.wrappers.Response:
try: try:
db.session.add(group) db.session.add(group)
except IntegrityError as exception: except IntegrityError as exception:
raise ( raise (ApiError(error_code="integrity_error", message=repr(exception), status_code=500)) from exception
ApiError(
error_code="integrity_error", message=repr(exception), status_code=500
)
) from exception
db.session.commit() db.session.commit()
return Response(json.dumps({"id": group.id}), status=201, mimetype=APPLICATION_JSON) return Response(json.dumps({"id": group.id}), status=201, mimetype=APPLICATION_JSON)
@ -133,9 +129,7 @@ def assign_user_to_group() -> flask.wrappers.Response:
user = get_user_from_request() user = get_user_from_request()
group = get_group_from_request() group = get_group_from_request()
user_group_assignment = UserGroupAssignmentModel.query.filter_by( user_group_assignment = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first()
user_id=user.id, group_id=group.id
).first()
if user_group_assignment is not None: if user_group_assignment is not None:
raise ( raise (
ApiError( ApiError(
@ -162,9 +156,7 @@ def remove_user_from_group() -> flask.wrappers.Response:
user = get_user_from_request() user = get_user_from_request()
group = get_group_from_request() group = get_group_from_request()
user_group_assignment = UserGroupAssignmentModel.query.filter_by( user_group_assignment = UserGroupAssignmentModel.query.filter_by(user_id=user.id, group_id=group.id).first()
user_id=user.id, group_id=group.id
).first()
if user_group_assignment is None: if user_group_assignment is None:
raise ( raise (
ApiError( ApiError(

View File

@ -1,13 +1,29 @@
"""Users_controller.""" """Users_controller."""
from typing import Any
from typing import Dict
import flask import flask
from flask import current_app from flask import current_app
from flask import g from flask import g
from flask import jsonify from flask import jsonify
from flask import make_response from flask import make_response
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
def user_exists_by_username(body: Dict[str, Any]) -> flask.wrappers.Response:
if "username" not in body:
raise ApiError(
error_code="username_not_given",
message="Username could not be found in post body.",
status_code=400,
)
username = body["username"]
found_user = UserModel.query.filter_by(username=username).first()
return make_response(jsonify({"user_found": found_user is not None}), 200)
def user_search(username_prefix: str) -> flask.wrappers.Response: def user_search(username_prefix: str) -> flask.wrappers.Response:
"""User_search.""" """User_search."""
found_users = UserModel.query.filter(UserModel.username.like(f"{username_prefix}%")).all() # type: ignore found_users = UserModel.query.filter(UserModel.username.like(f"{username_prefix}%")).all() # type: ignore
@ -24,9 +40,6 @@ def user_group_list_for_current_user() -> flask.wrappers.Response:
groups = g.user.groups groups = g.user.groups
# TODO: filter out the default group and have a way to know what is the default group # TODO: filter out the default group and have a way to know what is the default group
group_identifiers = [ group_identifiers = [
i.identifier i.identifier for i in groups if i.identifier != current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
for i in groups
if i.identifier
!= current_app.config["SPIFFWORKFLOW_BACKEND_DEFAULT_USER_GROUP"]
] ]
return make_response(jsonify(sorted(group_identifiers)), 200) return make_response(jsonify(sorted(group_identifiers)), 200)

View File

@ -9,7 +9,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.script_attributes_context import ( from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext, ScriptAttributesContext,
) )
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.scripts.script import Script
@ -36,26 +35,13 @@ class DeleteProcessInstancesWithCriteria(Script):
delete_criteria.append( delete_criteria.append(
(ProcessInstanceModel.process_model_identifier == criteria["name"]) (ProcessInstanceModel.process_model_identifier == criteria["name"])
& ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore & ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore
& ( & (ProcessInstanceModel.updated_at_in_seconds < (delete_time - criteria["last_updated_delta"]))
ProcessInstanceModel.updated_at_in_seconds
< (delete_time - criteria["last_updated_delta"])
)
) )
results = ( results = ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all()
ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all()
)
rows_affected = len(results) rows_affected = len(results)
if rows_affected > 0: if rows_affected > 0:
ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore
step_details = SpiffStepDetailsModel.query.filter(
SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore
).all()
for deletion in step_details:
db.session.delete(deletion)
for deletion in results: for deletion in results:
db.session.delete(deletion) db.session.delete(deletion)
db.session.commit() db.session.commit()

View File

@ -20,12 +20,7 @@ class FactService(Script):
return """Just your basic class that can pull in data from a few api endpoints and return """Just your basic class that can pull in data from a few api endpoints and
do a basic task.""" do a basic task."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any
) -> Any:
"""Run.""" """Run."""
if "type" not in kwargs: if "type" not in kwargs:
raise Exception("Please specify a 'type' of fact as a keyword argument.") raise Exception("Please specify a 'type' of fact as a keyword argument.")
@ -35,10 +30,7 @@ class FactService(Script):
if fact == "cat": if fact == "cat":
details = "The cat in the hat" # self.get_cat() details = "The cat in the hat" # self.get_cat()
elif fact == "norris": elif fact == "norris":
details = ( details = "Chuck Norris doesnt read books. He stares them down until he gets the information he wants."
"Chuck Norris doesnt read books. He stares them down until he gets the"
" information he wants."
)
elif fact == "buzzword": elif fact == "buzzword":
details = "Move the Needle." # self.get_buzzword() details = "Move the Needle." # self.get_buzzword()
else: else:

View File

@ -34,8 +34,7 @@ class GetAllPermissions(Script):
.join(GroupModel, GroupModel.id == PrincipalModel.group_id) .join(GroupModel, GroupModel.id == PrincipalModel.group_id)
.join( .join(
PermissionTargetModel, PermissionTargetModel,
PermissionTargetModel.id PermissionTargetModel.id == PermissionAssignmentModel.permission_target_id,
== PermissionAssignmentModel.permission_target_id,
) )
.add_columns( .add_columns(
PermissionAssignmentModel.permission, PermissionAssignmentModel.permission,
@ -46,9 +45,7 @@ class GetAllPermissions(Script):
permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict() permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict()
for pa in permission_assignments: for pa in permission_assignments:
permissions.setdefault((pa.group_identifier, pa.uri), []).append( permissions.setdefault((pa.group_identifier, pa.uri), []).append(pa.permission)
pa.permission
)
def replace_suffix(string: str, old: str, new: str) -> str: def replace_suffix(string: str, old: str, new: str) -> str:
"""Replace_suffix.""" """Replace_suffix."""

View File

@ -20,12 +20,7 @@ class GetCurrentUser(Script):
"""Get_description.""" """Get_description."""
return """Return the current user.""" return """Return the current user."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
self,
script_attributes_context: ScriptAttributesContext,
*_args: Any,
**kwargs: Any
) -> Any:
"""Run.""" """Run."""
# dump the user using our json encoder and then load it back up as a dict # dump the user using our json encoder and then load it back up as a dict
# to remove unwanted field types # to remove unwanted field types

View File

@ -27,12 +27,7 @@ class GetDataSizes(Script):
return """Returns a dictionary of information about the size of task data and return """Returns a dictionary of information about the size of task data and
the python environment for the currently running process.""" the python environment for the currently running process."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
self,
script_attributes_context: ScriptAttributesContext,
*_args: Any,
**kwargs: Any
) -> Any:
"""Run.""" """Run."""
if script_attributes_context.task is None: if script_attributes_context.task is None:
raise TaskNotGivenToScriptError( raise TaskNotGivenToScriptError(
@ -42,8 +37,7 @@ class GetDataSizes(Script):
workflow = script_attributes_context.task.workflow workflow = script_attributes_context.task.workflow
task_data_size = ProcessInstanceProcessor.get_task_data_size(workflow) task_data_size = ProcessInstanceProcessor.get_task_data_size(workflow)
task_data_keys_by_task = { task_data_keys_by_task = {
t.task_spec.name: sorted(t.data.keys()) t.task_spec.name: sorted(t.data.keys()) for t in ProcessInstanceProcessor.get_tasks_with_data(workflow)
for t in ProcessInstanceProcessor.get_tasks_with_data(workflow)
} }
python_env_size = ProcessInstanceProcessor.get_python_env_size(workflow) python_env_size = ProcessInstanceProcessor.get_python_env_size(workflow)
python_env_keys = workflow.script_engine.environment.user_defined_state().keys() python_env_keys = workflow.script_engine.environment.user_defined_state().keys()

View File

@ -42,8 +42,6 @@ class GetEncodedFileData(Script):
).first() ).first()
base64_value = base64.b64encode(file_data.contents).decode("ascii") base64_value = base64.b64encode(file_data.contents).decode("ascii")
encoded_file_data = ( encoded_file_data = f"data:{file_data.mimetype};name={file_data.filename};base64,{base64_value}"
f"data:{file_data.mimetype};name={file_data.filename};base64,{base64_value}"
)
return encoded_file_data return encoded_file_data

View File

@ -19,11 +19,6 @@ class GetEnv(Script):
"""Get_description.""" """Get_description."""
return """Returns the current environment - ie testing, staging, production.""" return """Returns the current environment - ie testing, staging, production."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
self,
script_attributes_context: ScriptAttributesContext,
*_args: Any,
**kwargs: Any
) -> Any:
"""Run.""" """Run."""
return script_attributes_context.environment_identifier return script_attributes_context.environment_identifier

View File

@ -21,11 +21,6 @@ class GetFrontendUrl(Script):
"""Get_description.""" """Get_description."""
return """Return the url to the frontend.""" return """Return the url to the frontend."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> Any:
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any
) -> Any:
"""Run.""" """Run."""
return current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"] return current_app.config["SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND"]

View File

@ -32,8 +32,7 @@ class GetGroupMembers(Script):
group = GroupModel.query.filter_by(identifier=group_identifier).first() group = GroupModel.query.filter_by(identifier=group_identifier).first()
if group is None: if group is None:
raise GroupNotFoundError( raise GroupNotFoundError(
"Script 'get_group_members' could not find group with identifier" f"Script 'get_group_members' could not find group with identifier '{group_identifier}'."
f" '{group_identifier}'."
) )
usernames = [u.username for u in group.users] usernames = [u.username for u in group.users]

View File

@ -24,12 +24,7 @@ class GetLocaltime(Script):
return """Converts a Datetime object into a Datetime object for a specific timezone. return """Converts a Datetime object into a Datetime object for a specific timezone.
Defaults to US/Eastern.""" Defaults to US/Eastern."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *args: Any, **kwargs: Any) -> datetime:
self,
script_attributes_context: ScriptAttributesContext,
*args: Any,
**kwargs: Any
) -> datetime:
"""Run.""" """Run."""
if len(args) > 0 or "datetime" in kwargs: if len(args) > 0 or "datetime" in kwargs:
if "datetime" in kwargs: if "datetime" in kwargs:

View File

@ -19,16 +19,9 @@ class GetProcessInfo(Script):
"""Get_description.""" """Get_description."""
return """Returns a dictionary of information about the currently running process.""" return """Returns a dictionary of information about the currently running process."""
def run( def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
self,
script_attributes_context: ScriptAttributesContext,
*_args: Any,
**kwargs: Any
) -> Any:
"""Run.""" """Run."""
return { return {
"process_instance_id": script_attributes_context.process_instance_id, "process_instance_id": script_attributes_context.process_instance_id,
"process_model_identifier": ( "process_model_identifier": script_attributes_context.process_model_identifier,
script_attributes_context.process_model_identifier
),
} }

View File

@ -26,9 +26,7 @@ class GetProcessInitiatorUser(Script):
) -> Any: ) -> Any:
"""Run.""" """Run."""
process_instance = ( process_instance = (
ProcessInstanceModel.query.filter_by( ProcessInstanceModel.query.filter_by(id=script_attributes_context.process_instance_id)
id=script_attributes_context.process_instance_id
)
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
.first() .first()
) )

Some files were not shown because too many files have changed in this diff Show More