Merge branch 'main' into cullerton

# Conflicts:
#	src/spiffworkflow_backend/routes/process_api_blueprint.py
This commit is contained in:
mike cullerton 2022-06-24 11:42:29 -04:00
commit 9b08c79ad0
20 changed files with 264 additions and 178 deletions

View File

@ -15,12 +15,12 @@ if [ "${DOWNGRADE_DB:-}" = "true" ]; then
poetry run flask db downgrade poetry run flask db downgrade
fi fi
if [ "${UPGRADE_DB:-}" = "true" ]; then if [[ "${SPIFFWORKFLOW_BACKEND_UPGRADE_DB:-}" == "true" ]]; then
echo 'Upgrading database...' echo 'Upgrading database...'
poetry run flask db upgrade poetry run flask db upgrade
fi fi
port="${PORT0:-}" port="${SPIFFWORKFLOW_BACKEND_PORT:-}"
if [[ -z "$port" ]]; then if [[ -z "$port" ]]; then
port=7000 port=7000
fi fi
@ -32,4 +32,4 @@ if [[ "${APPLICATION_ROOT:-}" != "/" ]]; then
fi fi
# THIS MUST BE THE LAST COMMAND! # THIS MUST BE THE LAST COMMAND!
exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$PORT0" --workers=3 --timeout 90 --log-level debug wsgi:app exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$SPIFFWORKFLOW_BACKEND_PORT" --workers=3 --timeout 90 --log-level debug wsgi:app

View File

@ -17,4 +17,4 @@ if [[ "${RUN_WITH_DAEMON:-}" != "false" ]]; then
additional_args="${additional_args} -d" additional_args="${additional_args} -d"
fi fi
docker compose up --build $additional_args docker compose up --wait --build $additional_args

View File

@ -7,6 +7,26 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${FLASK_ENV:-}" ]]; then
export FLASK_ENV=staging
fi
if [[ -z "${FLASK_SESSION_SECRET_KEY:-}" ]]; then
export FLASK_SESSION_SECRET_KEY=staging_super_secret_key_dont_tell_anyone
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD=St4g3Th1515
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DATABASE_NAME=spiffworkflow_backend_staging
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-}" ]]; then
export SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY=always
fi
git pull git pull
./bin/docker_restart ./bin/docker_restart
./bin/wait_for_server_to_be_up ./bin/wait_for_server_to_be_up

View File

@ -5,16 +5,16 @@ services:
image: mysql:8.0.29 image: mysql:8.0.29
cap_add: cap_add:
- SYS_NICE - SYS_NICE
restart: always restart: "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-no}"
environment: environment:
- MYSQL_DATABASE=spiffworkflow_backend_staging - MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
- MYSQL_ROOT_PASSWORD=St4g3Th1515 - MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}
ports: ports:
- "3306" - "3306"
volumes: volumes:
- spiffworkflow_backend:/var/lib/mysql - spiffworkflow_backend:/var/lib/mysql
healthcheck: healthcheck:
test: mysql --user=root --password=St4g3Th1515 -e 'select 1' spiffworkflow_backend_staging test: mysql --user=root --password=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} -e 'select 1' ${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
spiffworkflow-backend: spiffworkflow-backend:
container_name: spiffworkflow-backend container_name: spiffworkflow-backend
@ -27,19 +27,19 @@ services:
context: . context: .
environment: environment:
- APPLICATION_ROOT=/ - APPLICATION_ROOT=/
- FLASK_ENV=staging - FLASK_ENV=${FLASK_ENV:-development}
- FLASK_SESSION_SECRET_KEY=super_secret_key - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
- DEVELOPMENT=true - SPIFFWORKFLOW_BACKEND_PORT=7000
- LDAP_URL=mock - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
- PORT0=7000 - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@db/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
- PRODUCTION=false
- UPGRADE_DB=true
- DATABASE_URI=mysql+mysqlconnector://root:St4g3Th1515@db/spiffworkflow_backend_staging
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false}
ports: ports:
- "7000:7000" - "7000:7000"
volumes: volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR}:/app/process_models - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
healthcheck:
test: curl localhost:7000/v1.0/status --fail
volumes: volumes:
spiffworkflow_backend: spiffworkflow_backend:

61
poetry.lock generated
View File

@ -1047,20 +1047,21 @@ mypy-extensions = "*"
[[package]] [[package]]
name = "mypy" name = "mypy"
version = "0.910" version = "0.961"
description = "Optional static typing for Python" description = "Optional static typing for Python"
category = "dev" category = "dev"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.6"
[package.dependencies] [package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0" mypy-extensions = ">=0.4.3"
toml = "*" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=3.7.4" typing-extensions = ">=3.10"
[package.extras] [package.extras]
dmypy = ["psutil (>=4.0)"] dmypy = ["psutil (>=4.0)"]
python2 = ["typed-ast (>=1.4.0,<1.5.0)"] python2 = ["typed-ast (>=1.4.0,<2)"]
reports = ["lxml"]
[[package]] [[package]]
name = "mypy-extensions" name = "mypy-extensions"
@ -2033,7 +2034,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = "^3.9" python-versions = "^3.9"
content-hash = "b4302cbeba2b6c5a3777a4b3eb4549fbc6e910ab832b2bf0d573110564d67690" content-hash = "5aae09a360d44ea3e42a0152ba755e3cb7d464a8fbf8d2fb8bdaf6ece66cbb57"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [
@ -2667,29 +2668,29 @@ monkeytype = [
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"}, {file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
] ]
mypy = [ mypy = [
{file = "mypy-0.910-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a155d80ea6cee511a3694b108c4494a39f42de11ee4e61e72bc424c490e46457"}, {file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"},
{file = "mypy-0.910-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b94e4b785e304a04ea0828759172a15add27088520dc7e49ceade7834275bedb"}, {file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"},
{file = "mypy-0.910-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:088cd9c7904b4ad80bec811053272986611b84221835e079be5bcad029e79dd9"}, {file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"},
{file = "mypy-0.910-cp35-cp35m-win_amd64.whl", hash = "sha256:adaeee09bfde366d2c13fe6093a7df5df83c9a2ba98638c7d76b010694db760e"}, {file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"},
{file = "mypy-0.910-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ecd2c3fe726758037234c93df7e98deb257fd15c24c9180dacf1ef829da5f921"}, {file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"},
{file = "mypy-0.910-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d9dd839eb0dc1bbe866a288ba3c1afc33a202015d2ad83b31e875b5905a079b6"}, {file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"},
{file = "mypy-0.910-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:3e382b29f8e0ccf19a2df2b29a167591245df90c0b5a2542249873b5c1d78212"}, {file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"},
{file = "mypy-0.910-cp36-cp36m-win_amd64.whl", hash = "sha256:53fd2eb27a8ee2892614370896956af2ff61254c275aaee4c230ae771cadd885"}, {file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"},
{file = "mypy-0.910-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b6fb13123aeef4a3abbcfd7e71773ff3ff1526a7d3dc538f3929a49b42be03f0"}, {file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"},
{file = "mypy-0.910-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e4dab234478e3bd3ce83bac4193b2ecd9cf94e720ddd95ce69840273bf44f6de"}, {file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"},
{file = "mypy-0.910-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:7df1ead20c81371ccd6091fa3e2878559b5c4d4caadaf1a484cf88d93ca06703"}, {file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"},
{file = "mypy-0.910-cp37-cp37m-win_amd64.whl", hash = "sha256:0aadfb2d3935988ec3815952e44058a3100499f5be5b28c34ac9d79f002a4a9a"}, {file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"},
{file = "mypy-0.910-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec4e0cd079db280b6bdabdc807047ff3e199f334050db5cbb91ba3e959a67504"}, {file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"},
{file = "mypy-0.910-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:119bed3832d961f3a880787bf621634ba042cb8dc850a7429f643508eeac97b9"}, {file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"},
{file = "mypy-0.910-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:866c41f28cee548475f146aa4d39a51cf3b6a84246969f3759cb3e9c742fc072"}, {file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"},
{file = "mypy-0.910-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6e0a6e27fb364fb3853389607cf7eb3a126ad335790fa1e14ed02fba50811"}, {file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"},
{file = "mypy-0.910-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a85e280d4d217150ce8cb1a6dddffd14e753a4e0c3cf90baabb32cefa41b59e"}, {file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"},
{file = "mypy-0.910-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42c266ced41b65ed40a282c575705325fa7991af370036d3f134518336636f5b"}, {file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"},
{file = "mypy-0.910-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:3c4b8ca36877fc75339253721f69603a9c7fdb5d4d5a95a1a1b899d8b86a4de2"}, {file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"},
{file = "mypy-0.910-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:c0df2d30ed496a08de5daed2a9ea807d07c21ae0ab23acf541ab88c24b26ab97"}, {file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"},
{file = "mypy-0.910-cp39-cp39-win_amd64.whl", hash = "sha256:c6c2602dffb74867498f86e6129fd52a2770c48b7cd3ece77ada4fa38f94eba8"}, {file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"},
{file = "mypy-0.910-py3-none-any.whl", hash = "sha256:ef565033fa5a958e62796867b1df10c40263ea9ded87164d67572834e57a174d"}, {file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"},
{file = "mypy-0.910.tar.gz", hash = "sha256:704098302473cb31a218f1775a873b376b30b4c18229421e9e9dc8916fd16150"}, {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"},
] ]
mypy-extensions = [ mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},

View File

@ -51,7 +51,7 @@ types-pytz = "^2022.1.0"
pytest = "^6.2.5" pytest = "^6.2.5"
coverage = {extras = ["toml"], version = "^6.1"} coverage = {extras = ["toml"], version = "^6.1"}
safety = "^1.10.3" safety = "^1.10.3"
mypy = "^0.910" mypy = ">=0.961"
typeguard = "^2.13.2" typeguard = "^2.13.2"
xdoctest = {extras = ["colors"], version = "^1.0.0"} xdoctest = {extras = ["colors"], version = "^1.0.0"}
sphinx = "^4.3.0" sphinx = "^4.3.0"

View File

@ -10,6 +10,21 @@ security:
- jwt: ["secret"] - jwt: ["secret"]
paths: paths:
/status:
get:
security: []
operationId: spiffworkflow_backend.routes.process_api_blueprint.status
summary: Returns 200 if the server is Responding
tags:
- Liveness
- Status
responses:
"200":
description: The server is running.
content:
application/json:
schema:
$ref: "#components/schemas/OkTrue"
/process-groups: /process-groups:
parameters: parameters:
- name: page - name: page

View File

@ -21,7 +21,7 @@ def setup_logger_for_sql_statements(app: Flask) -> None:
def setup_database_uri(app: Flask) -> None: def setup_database_uri(app: Flask) -> None:
"""Setup_database_uri.""" """Setup_database_uri."""
if os.environ.get("DATABASE_URI") is None: if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
if os.environ.get("SPIFF_DATABASE_TYPE") == "sqlite": if os.environ.get("SPIFF_DATABASE_TYPE") == "sqlite":
app.config[ app.config[
"SQLALCHEMY_DATABASE_URI" "SQLALCHEMY_DATABASE_URI"
@ -39,7 +39,9 @@ def setup_database_uri(app: Flask) -> None:
"SQLALCHEMY_DATABASE_URI" "SQLALCHEMY_DATABASE_URI"
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/spiffworkflow_backend_{app.env}" ] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/spiffworkflow_backend_{app.env}"
else: else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get("DATABASE_URI") app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
)
def setup_config(app: Flask) -> None: def setup_config(app: Flask) -> None:

View File

@ -0,0 +1,11 @@
"""Spiff_enum."""
import enum
class SpiffEnum(enum.Enum):
"""SpiffEnum."""
@classmethod
def list(cls) -> list[str]:
"""List."""
return [el.value for el in cls]

View File

@ -1,5 +1,4 @@
"""File.""" """File."""
import enum
from dataclasses import dataclass from dataclasses import dataclass
from dataclasses import field from dataclasses import field
from datetime import datetime from datetime import datetime
@ -12,6 +11,7 @@ from marshmallow import Schema
from sqlalchemy.orm import deferred from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.data_store import DataStoreModel from spiffworkflow_backend.models.data_store import DataStoreModel
@ -40,7 +40,7 @@ class FileModel(SpiffworkflowBaseDBModel):
archived = db.Column(db.Boolean, default=False) # type: ignore archived = db.Column(db.Boolean, default=False) # type: ignore
class FileType(enum.Enum): class FileType(SpiffEnum):
"""FileType.""" """FileType."""
bpmn = "bpmn" bpmn = "bpmn"
@ -64,11 +64,6 @@ class FileType(enum.Enum):
xml = "xml" xml = "xml"
zip = "zip" zip = "zip"
@classmethod
def list(cls) -> list[str]:
"""List."""
return [el.value for el in cls]
CONTENT_TYPES = { CONTENT_TYPES = {
"bpmn": "text/xml", "bpmn": "text/xml",

View File

@ -1,7 +1,6 @@
"""Process_instance.""" """Process_instance."""
from __future__ import annotations from __future__ import annotations
import enum
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any from typing import Any
@ -16,6 +15,7 @@ from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship from sqlalchemy.orm import relationship
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.task import TaskSchema from spiffworkflow_backend.models.task import TaskSchema
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -65,7 +65,7 @@ class NavigationItemSchema(Schema):
return item return item
class ProcessInstanceStatus(enum.Enum): class ProcessInstanceStatus(SpiffEnum):
"""ProcessInstanceStatus.""" """ProcessInstanceStatus."""
not_started = "not_started" not_started = "not_started"

View File

@ -1,5 +1,6 @@
"""APIs for dealing with process groups, process models, and process instances.""" """APIs for dealing with process groups, process models, and process instances."""
from typing import Any from typing import Any
from typing import Union
from flask import Blueprint from flask import Blueprint
from flask import current_app from flask import current_app
@ -9,6 +10,7 @@ from flask import render_template
from flask import request from flask import request
from flask import url_for from flask import url_for
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from werkzeug.wrappers.response import Response
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
@ -28,7 +30,7 @@ ALLOWED_BPMN_EXTENSIONS = {"bpmn", "dmn"}
@admin_blueprint.route("/token", methods=["GET"]) @admin_blueprint.route("/token", methods=["GET"])
def token(): def token() -> str:
"""Token.""" """Token."""
if current_app.env == "production": if current_app.env == "production":
return "Not authorized" return "Not authorized"
@ -44,25 +46,28 @@ def token():
@admin_blueprint.route("/process-groups", methods=["GET"]) @admin_blueprint.route("/process-groups", methods=["GET"])
def process_groups_list(): def process_groups_list() -> str:
"""Process_groups_list.""" """Process_groups_list."""
process_groups = ProcessModelService().get_process_groups() process_groups = ProcessModelService().get_process_groups()
return render_template("process_groups_list.html", process_groups=process_groups) return render_template("process_groups_list.html", process_groups=process_groups)
@admin_blueprint.route("/process-groups/<process_group_id>", methods=["GET"]) @admin_blueprint.route("/process-groups/<process_group_id>", methods=["GET"])
def process_group_show(process_group_id): def process_group_show(process_group_id: str) -> str:
"""Show_process_group.""" """Show_process_group."""
process_group = ProcessModelService().get_process_group(process_group_id) process_group = ProcessModelService().get_process_group(process_group_id)
return render_template("process_group_show.html", process_group=process_group) return render_template("process_group_show.html", process_group=process_group)
@admin_blueprint.route("/process-models/<process_model_id>", methods=["GET"]) @admin_blueprint.route("/process-models/<process_model_id>", methods=["GET"])
def process_model_show(process_model_id): def process_model_show(process_model_id: str) -> Union[str, Response]:
"""Show_process_model.""" """Show_process_model."""
process_model = ProcessModelService().get_process_model(process_model_id) process_model = ProcessModelService().get_process_model(process_model_id)
files = SpecFileService.get_files(process_model, extension_filter="bpmn") files = SpecFileService.get_files(process_model, extension_filter="bpmn")
current_file_name = process_model.primary_file_name current_file_name = process_model.primary_file_name
if current_file_name is None:
flash("No primary_file_name", "error")
return redirect(url_for("admin.process_groups_list"))
bpmn_xml = SpecFileService.get_data(process_model, current_file_name) bpmn_xml = SpecFileService.get_data(process_model, current_file_name)
return render_template( return render_template(
"process_model_show.html", "process_model_show.html",
@ -76,7 +81,7 @@ def process_model_show(process_model_id):
@admin_blueprint.route( @admin_blueprint.route(
"/process-models/<process_model_id>/<file_name>", methods=["GET"] "/process-models/<process_model_id>/<file_name>", methods=["GET"]
) )
def process_model_show_file(process_model_id, file_name): def process_model_show_file(process_model_id: str, file_name: str) -> str:
"""Process_model_show_file.""" """Process_model_show_file."""
process_model = ProcessModelService().get_process_model(process_model_id) process_model = ProcessModelService().get_process_model(process_model_id)
bpmn_xml = SpecFileService.get_data(process_model, file_name) bpmn_xml = SpecFileService.get_data(process_model, file_name)
@ -93,7 +98,7 @@ def process_model_show_file(process_model_id, file_name):
@admin_blueprint.route( @admin_blueprint.route(
"/process-models/<process_model_id>/upload-file", methods=["POST"] "/process-models/<process_model_id>/upload-file", methods=["POST"]
) )
def process_model_upload_file(process_model_id): def process_model_upload_file(process_model_id: str) -> Response:
"""Process_model_upload_file.""" """Process_model_upload_file."""
process_model_service = ProcessModelService() process_model_service = ProcessModelService()
process_model = process_model_service.get_process_model(process_model_id) process_model = process_model_service.get_process_model(process_model_id)
@ -103,13 +108,15 @@ def process_model_upload_file(process_model_id):
request_file = request.files["file"] request_file = request.files["file"]
# If the user does not select a file, the browser submits an # If the user does not select a file, the browser submits an
# empty file without a filename. # empty file without a filename.
if request_file.filename == "": if request_file.filename == "" or request_file.filename is None:
flash("No selected file", "error") flash("No selected file", "error")
if request_file and _allowed_file(request_file.filename): else:
SpecFileService.add_file( if request_file and _allowed_file(request_file.filename):
process_model, request_file.filename, request_file.stream.read() if request_file.filename is not None:
) SpecFileService.add_file(
process_model_service.update_spec(process_model) process_model, request_file.filename, request_file.stream.read()
)
process_model_service.update_spec(process_model)
return redirect( return redirect(
url_for("admin.process_model_show", process_model_id=process_model.id) url_for("admin.process_model_show", process_model_id=process_model.id)
@ -119,7 +126,7 @@ def process_model_upload_file(process_model_id):
@admin_blueprint.route( @admin_blueprint.route(
"/process_models/<process_model_id>/edit/<file_name>", methods=["GET"] "/process_models/<process_model_id>/edit/<file_name>", methods=["GET"]
) )
def process_model_edit(process_model_id, file_name): def process_model_edit(process_model_id: str, file_name: str) -> str:
"""Edit_bpmn.""" """Edit_bpmn."""
process_model = ProcessModelService().get_process_model(process_model_id) process_model = ProcessModelService().get_process_model(process_model_id)
bpmn_xml = SpecFileService.get_data(process_model, file_name) bpmn_xml = SpecFileService.get_data(process_model, file_name)
@ -135,10 +142,13 @@ def process_model_edit(process_model_id, file_name):
@admin_blueprint.route( @admin_blueprint.route(
"/process-models/<process_model_id>/save/<file_name>", methods=["POST"] "/process-models/<process_model_id>/save/<file_name>", methods=["POST"]
) )
def process_model_save(process_model_id, file_name): def process_model_save(process_model_id: str, file_name: str) -> Union[str, Response]:
"""Process_model_save.""" """Process_model_save."""
process_model = ProcessModelService().get_process_model(process_model_id) process_model = ProcessModelService().get_process_model(process_model_id)
SpecFileService.update_file(process_model, file_name, request.get_data()) SpecFileService.update_file(process_model, file_name, request.get_data())
if process_model.primary_file_name is None:
flash("No primary_file_name", "error")
return redirect(url_for("admin.process_groups_list"))
bpmn_xml = SpecFileService.get_data(process_model, process_model.primary_file_name) bpmn_xml = SpecFileService.get_data(process_model, process_model.primary_file_name)
return render_template( return render_template(
"process_model_edit.html", "process_model_edit.html",
@ -149,7 +159,7 @@ def process_model_save(process_model_id, file_name):
@admin_blueprint.route("/process-models/<process_model_id>/run", methods=["GET"]) @admin_blueprint.route("/process-models/<process_model_id>/run", methods=["GET"])
def process_model_run(process_model_id): def process_model_run(process_model_id: str) -> Union[str, Response]:
"""Process_model_run.""" """Process_model_run."""
user = _find_or_create_user("Mr. Test") # Fixme - sheesh! user = _find_or_create_user("Mr. Test") # Fixme - sheesh!
process_instance = ProcessInstanceService.create_process_instance( process_instance = ProcessInstanceService.create_process_instance(
@ -162,7 +172,10 @@ def process_model_run(process_model_id):
process_model = ProcessModelService().get_process_model(process_model_id) process_model = ProcessModelService().get_process_model(process_model_id)
files = SpecFileService.get_files(process_model, extension_filter="bpmn") files = SpecFileService.get_files(process_model, extension_filter="bpmn")
current_file_name = process_model.primary_file_name current_file_name = process_model.primary_file_name
bpmn_xml = SpecFileService.get_data(process_model, process_model.primary_file_name) if current_file_name is None:
flash("No primary_file_name", "error")
return redirect(url_for("admin.process_groups_list"))
bpmn_xml = SpecFileService.get_data(process_model, current_file_name)
return render_template( return render_template(
"process_model_show.html", "process_model_show.html",
@ -184,7 +197,7 @@ def _find_or_create_user(username: str = "test_user1") -> Any:
return user return user
def _allowed_file(filename): def _allowed_file(filename: str) -> bool:
"""_allowed_file.""" """_allowed_file."""
return ( return (
"." in filename "." in filename

View File

@ -25,6 +25,7 @@ from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
@ -43,6 +44,12 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
process_api_blueprint = Blueprint("process_api", __name__) process_api_blueprint = Blueprint("process_api", __name__)
def status() -> flask.wrappers.Response:
"""Status."""
ProcessInstanceModel.query.filter().first()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_group_add( def process_group_add(
body: Dict[str, Union[str, bool, int]] body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
@ -147,22 +154,11 @@ def process_model_show(
process_group_id: str, process_model_id: str process_group_id: str, process_model_id: str
) -> Dict[str, Union[str, List[Dict[str, Optional[Union[str, int, bool]]]], bool, int]]: ) -> Dict[str, Union[str, List[Dict[str, Optional[Union[str, int, bool]]]], bool, int]]:
"""Process_model_show.""" """Process_model_show."""
try: process_model = get_process_model(process_model_id, process_group_id)
process_model = ProcessModelService().get_process_model( files = sorted(SpecFileService.get_files(process_model))
process_model_id, group_id=process_group_id process_model.files = files
) process_model_json = ProcessModelInfoSchema().dump(process_model)
files = sorted(SpecFileService.get_files(process_model)) return process_model_json
process_model.files = files
process_model_json = ProcessModelInfoSchema().dump(process_model)
return process_model_json
except ProcessEntityNotFoundError as exception:
raise (
ApiError(
code="process_mode_cannot_be_found",
message=f"Process model cannot be found: {process_model_id}",
status_code=400,
)
) from exception
def process_model_list( def process_model_list(
@ -193,9 +189,7 @@ def get_file(
process_group_id: str, process_model_id: str, file_name: str process_group_id: str, process_model_id: str, file_name: str
) -> Dict[str, Optional[Union[str, int, bool]]]: ) -> Dict[str, Optional[Union[str, int, bool]]]:
"""Get_file.""" """Get_file."""
process_model = ProcessModelService().get_process_model( process_model = get_process_model(process_model_id, process_group_id)
process_model_id, group_id=process_group_id
)
files = SpecFileService.get_files(process_model, file_name) files = SpecFileService.get_files(process_model, file_name)
if len(files) == 0: if len(files) == 0:
raise ApiError( raise ApiError(
@ -217,9 +211,7 @@ def process_model_file_update(
process_group_id: str, process_model_id: str, file_name: str process_group_id: str, process_model_id: str, file_name: str
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_file_save.""" """Process_model_file_save."""
process_model = ProcessModelService().get_process_model( process_model = get_process_model(process_model_id, process_group_id)
process_model_id, group_id=process_group_id
)
request_file = get_file_from_request() request_file = get_file_from_request()
request_file_contents = request_file.stream.read() request_file_contents = request_file.stream.read()
@ -237,9 +229,7 @@ def process_model_file_update(
def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Response: def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Response:
"""Add_file.""" """Add_file."""
process_model_service = ProcessModelService() process_model_service = ProcessModelService()
process_model = process_model_service.get_process_model( process_model = get_process_model(process_model_id, process_group_id)
process_model_id, group_id=process_group_id
)
request_file = get_file_from_request() request_file = get_file_from_request()
file = SpecFileService.add_file( file = SpecFileService.add_file(
process_model, request_file.filename, request_file.stream.read() process_model, request_file.filename, request_file.stream.read()
@ -319,17 +309,7 @@ def process_instance_list(
process_status: Optional[str] = None, process_status: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_model = ProcessModelService().get_process_model( process_model = get_process_model(process_model_id, process_group_id)
process_model_id, group_id=process_group_id
)
if process_model is None:
raise (
ApiError(
code="process_model_cannot_be_found",
message=f"Process model cannot be found: {process_model_id}",
status_code=400,
)
)
results = ProcessInstanceModel.query.filter_by( results = ProcessInstanceModel.query.filter_by(
process_model_identifier=process_model.id process_model_identifier=process_model.id
@ -389,17 +369,7 @@ def process_instance_report(
process_group_id: str, process_model_id: str, page: int = 1, per_page: int = 100 process_group_id: str, process_model_id: str, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_model = ProcessModelService().get_process_model( process_model = get_process_model(process_model_id, process_group_id)
process_model_id, group_id=process_group_id
)
if process_model is None:
raise (
ApiError(
code="process_mode_cannot_be_found",
message=f"Process model cannot be found: {process_model_id}",
status_code=400,
)
)
process_instances = ( process_instances = (
ProcessInstanceModel.query.filter_by(process_model_identifier=process_model.id) ProcessInstanceModel.query.filter_by(process_model_identifier=process_model.id)
@ -439,3 +409,22 @@ def get_file_from_request() -> FileStorage:
status_code=400, status_code=400,
) )
return request_file return request_file
def get_process_model(process_model_id: str, process_group_id: str) -> ProcessModelInfo:
"""Get_process_model."""
process_model = None
try:
process_model = ProcessModelService().get_process_model(
process_model_id, group_id=process_group_id
)
except ProcessEntityNotFoundError as exception:
raise (
ApiError(
code="process_model_cannot_be_found",
message=f"Process model cannot be found: {process_model_id}",
status_code=400,
)
) from exception
return process_model

View File

@ -2,7 +2,6 @@
from typing import Dict from typing import Dict
from typing import Optional from typing import Optional
from flask import current_app
from flask import g from flask import g
from flask_bpmn.api.api_error import ApiError from flask_bpmn.api.api_error import ApiError
@ -50,28 +49,6 @@ def verify_token(token: Optional[str] = None) -> Dict[str, Optional[str]]:
else: else:
raise failure_error raise failure_error
# If there's no token and we're in production, get the user from the SSO headers and return their token
elif _is_production():
uid = "TEST_UID"
if uid is not None:
db_user = UserModel.query.filter_by(uid=uid).first()
# If the user is valid, store the user and token for this session
if db_user is not None:
g.user = db_user
token_from_user = g.user.encode_auth_token()
g.token = token_from_user
token_info = UserModel.decode_auth_token(token_from_user)
return token_info
else:
raise ApiError(
"no_user",
"User not found. Please login via the frontend app before accessing this feature.",
status_code=403,
)
else: else:
# Fall back to a default user if this is not production. # Fall back to a default user if this is not production.
g.user = UserModel.query.first() g.user = UserModel.query.first()
@ -83,8 +60,3 @@ def verify_token(token: Optional[str] = None) -> Dict[str, Optional[str]]:
token_from_user = g.user.encode_auth_token() token_from_user = g.user.encode_auth_token()
token_info = UserModel.decode_auth_token(token_from_user) token_info = UserModel.decode_auth_token(token_from_user)
return token_info return token_info
def _is_production() -> bool:
"""_is_production."""
return "PRODUCTION" in current_app.config and current_app.config["PRODUCTION"]

View File

@ -0,0 +1,36 @@
"""Acceptance_test_fixtures."""
import json
import time
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.test_data import find_or_create_user
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
def load_fixtures() -> list[ProcessInstanceModel]:
"""Load_fixtures."""
test_process_group_id = "acceptance-tests-group-one"
test_process_model_id = "acceptance-tests-model-1"
user = find_or_create_user()
statuses = ProcessInstanceStatus.list()
current_time = round(time.time())
process_instances = []
for i in range(5):
process_instance = ProcessInstanceModel(
status=ProcessInstanceStatus[statuses[i]],
process_initiator=user,
process_model_identifier=test_process_model_id,
process_group_identifier=test_process_group_id,
updated_at_in_seconds=round(time.time()),
start_in_seconds=(3600 * i) + current_time,
end_in_seconds=(3600 * i + 20) + current_time,
bpmn_json=json.dumps({"i": i}),
)
db.session.add(process_instance)
process_instances.append(process_instance)
db.session.commit()
return process_instances

View File

@ -1,4 +1,6 @@
"""Error_handling_service.""" """Error_handling_service."""
from typing import Union
from flask_bpmn.api.api_error import ApiError from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
@ -27,7 +29,7 @@ class ErrorHandlingService:
return instance return instance
def handle_error( def handle_error(
self, _processor: ProcessInstanceProcessor, _error: ApiError self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception]
) -> None: ) -> None:
"""On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception."""
process_model = ProcessModelService().get_process_model( process_model = ProcessModelService().get_process_model(

View File

@ -73,8 +73,6 @@ class ProcessModelService(FileSystemService):
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
) )
process_model = self.get_process_model(process_model_id) process_model = self.get_process_model(process_model_id)
if not process_model:
return
if process_model.library: if process_model.library:
self.__remove_library_references(process_model.id) self.__remove_library_references(process_model.id)
path = self.workflow_path(process_model) path = self.workflow_path(process_model)

View File

@ -70,6 +70,7 @@ def test_process_model_delete(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["ok"] is True assert response.json["ok"] is True
# assert we no longer have a model # assert we no longer have a model
@ -132,6 +133,7 @@ def test_process_model_update(
data=json.dumps(ProcessModelInfoSchema().dump(process_model)), data=json.dumps(ProcessModelInfoSchema().dump(process_model)),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["display_name"] == "Updated Display Name" assert response.json["display_name"] == "Updated Display Name"
@ -158,6 +160,7 @@ def test_process_model_list(
f"/v1.0/process-groups/{group_id}/process-models", f"/v1.0/process-groups/{group_id}/process-models",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 5 assert len(response.json["results"]) == 5
assert response.json["pagination"]["count"] == 5 assert response.json["pagination"]["count"] == 5
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
@ -168,6 +171,7 @@ def test_process_model_list(
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=1", f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=1",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["results"][0]["id"] == "test_model_0" assert response.json["results"][0]["id"] == "test_model_0"
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
@ -179,6 +183,7 @@ def test_process_model_list(
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=1", f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=1",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["results"][0]["id"] == "test_model_1" assert response.json["results"][0]["id"] == "test_model_1"
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
@ -190,6 +195,7 @@ def test_process_model_list(
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=3", f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=3",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 3 assert len(response.json["results"]) == 3
assert response.json["results"][0]["id"] == "test_model_0" assert response.json["results"][0]["id"] == "test_model_0"
assert response.json["pagination"]["count"] == 3 assert response.json["pagination"]["count"] == 3
@ -202,6 +208,7 @@ def test_process_model_list(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
# there should only be 2 left # there should only be 2 left
assert response.json is not None
assert len(response.json["results"]) == 2 assert len(response.json["results"]) == 2
assert response.json["results"][0]["id"] == "test_model_3" assert response.json["results"][0]["id"] == "test_model_3"
assert response.json["pagination"]["count"] == 2 assert response.json["pagination"]["count"] == 2
@ -302,6 +309,7 @@ def test_process_group_list(
"/v1.0/process-groups", "/v1.0/process-groups",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 5 assert len(response.json["results"]) == 5
assert response.json["pagination"]["count"] == 5 assert response.json["pagination"]["count"] == 5
assert response.json["pagination"]["total"] == 5 assert response.json["pagination"]["total"] == 5
@ -312,6 +320,7 @@ def test_process_group_list(
"/v1.0/process-groups?page=1&per_page=1", "/v1.0/process-groups?page=1&per_page=1",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["results"][0]["id"] == "test_process_group_0" assert response.json["results"][0]["id"] == "test_process_group_0"
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
@ -323,6 +332,7 @@ def test_process_group_list(
"/v1.0/process-groups?page=2&per_page=1", "/v1.0/process-groups?page=2&per_page=1",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["results"][0]["id"] == "test_process_group_1" assert response.json["results"][0]["id"] == "test_process_group_1"
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
@ -334,6 +344,7 @@ def test_process_group_list(
"/v1.0/process-groups?page=1&per_page=3", "/v1.0/process-groups?page=1&per_page=3",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
assert len(response.json["results"]) == 3 assert len(response.json["results"]) == 3
assert response.json["results"][0]["id"] == "test_process_group_0" assert response.json["results"][0]["id"] == "test_process_group_0"
assert response.json["results"][1]["id"] == "test_process_group_1" assert response.json["results"][1]["id"] == "test_process_group_1"
@ -348,6 +359,7 @@ def test_process_group_list(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
# there should only be 2 left # there should only be 2 left
assert response.json is not None
assert len(response.json["results"]) == 2 assert len(response.json["results"]) == 2
assert response.json["results"][0]["id"] == "test_process_group_3" assert response.json["results"][0]["id"] == "test_process_group_3"
assert response.json["results"][1]["id"] == "test_process_group_4" assert response.json["results"][1]["id"] == "test_process_group_4"
@ -374,6 +386,7 @@ def test_process_model_file_update_fails_if_no_file_given(
) )
assert response.status_code == 400 assert response.status_code == 400
assert response.json is not None
assert response.json["code"] == "no_file_given" assert response.json["code"] == "no_file_given"
@ -395,6 +408,7 @@ def test_process_model_file_update_fails_if_contents_is_empty(
) )
assert response.status_code == 400 assert response.status_code == 400
assert response.json is not None
assert response.json["code"] == "file_contents_empty" assert response.json["code"] == "file_contents_empty"
@ -417,6 +431,7 @@ def test_process_model_file_update(
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["ok"] assert response.json["ok"]
response = client.get( response = client.get(
@ -442,12 +457,13 @@ def test_get_file(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["name"] == "hello_world.bpmn" assert response.json["name"] == "hello_world.bpmn"
assert response.json["process_group_id"] == "group_id1" assert response.json["process_group_id"] == "group_id1"
assert response.json["process_model_id"] == "hello_world" assert response.json["process_model_id"] == "hello_world"
def test_get_workflow_from_workflow_spec( def dest_get_workflow_from_workflow_spec(
app: Flask, client: FlaskClient, with_bpmn_file_cleanup: None app: Flask, client: FlaskClient, with_bpmn_file_cleanup: None
) -> None: ) -> None:
"""Test_get_workflow_from_workflow_spec.""" """Test_get_workflow_from_workflow_spec."""
@ -458,6 +474,7 @@ def test_get_workflow_from_workflow_spec(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 201 assert response.status_code == 201
assert response.json is not None
assert "hello_world" == response.json["process_model_identifier"] assert "hello_world" == response.json["process_model_identifier"]
# assert('Task_GetName' == response.json['next_task']['name']) # assert('Task_GetName' == response.json['next_task']['name'])
@ -469,6 +486,7 @@ def test_get_process_groups_when_none(
user = find_or_create_user() user = find_or_create_user()
response = client.get("/v1.0/process-groups", headers=logged_in_headers(user)) response = client.get("/v1.0/process-groups", headers=logged_in_headers(user))
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["results"] == [] assert response.json["results"] == []
@ -480,6 +498,7 @@ def test_get_process_groups_when_there_are_some(
load_test_spec("hello_world") load_test_spec("hello_world")
response = client.get("/v1.0/process-groups", headers=logged_in_headers(user)) response = client.get("/v1.0/process-groups", headers=logged_in_headers(user))
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["total"] == 1 assert response.json["pagination"]["total"] == 1
@ -498,6 +517,7 @@ def test_get_process_group_when_found(
f"/v1.0/process-groups/{test_process_group_id}", headers=logged_in_headers(user) f"/v1.0/process-groups/{test_process_group_id}", headers=logged_in_headers(user)
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["id"] == test_process_group_id assert response.json["id"] == test_process_group_id
assert response.json["process_models"][0]["id"] == process_model_dir_name assert response.json["process_models"][0]["id"] == process_model_dir_name
@ -515,6 +535,7 @@ def test_get_process_model_when_found(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert response.json["id"] == process_model_dir_name assert response.json["id"] == process_model_dir_name
assert len(response.json["files"]) == 1 assert len(response.json["files"]) == 1
assert response.json["files"][0]["name"] == "hello_world.bpmn" assert response.json["files"][0]["name"] == "hello_world.bpmn"
@ -532,7 +553,8 @@ def test_get_process_model_when_not_found(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 400 assert response.status_code == 400
assert response.json["code"] == "process_mode_cannot_be_found" assert response.json is not None
assert response.json["code"] == "process_model_cannot_be_found"
def test_process_instance_create( def test_process_instance_create(
@ -546,6 +568,7 @@ def test_process_instance_create(
response = create_process_instance( response = create_process_instance(
client, test_process_group_id, test_process_model_id, headers client, test_process_group_id, test_process_model_id, headers
) )
assert response.json is not None
assert response.json["updated_at_in_seconds"] is not None assert response.json["updated_at_in_seconds"] is not None
assert response.json["status"] == "not_started" assert response.json["status"] == "not_started"
assert response.json["process_model_identifier"] == test_process_model_id assert response.json["process_model_identifier"] == test_process_model_id
@ -597,6 +620,7 @@ def test_process_instance_list_with_default_list(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["pages"] == 1
@ -644,6 +668,7 @@ def test_process_instance_list_with_paginated_items(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 3 assert response.json["pagination"]["pages"] == 3
@ -654,6 +679,7 @@ def test_process_instance_list_with_paginated_items(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 2 assert len(response.json["results"]) == 2
assert response.json["pagination"]["count"] == 2 assert response.json["pagination"]["count"] == 2
assert response.json["pagination"]["pages"] == 3 assert response.json["pagination"]["pages"] == 3
@ -693,6 +719,7 @@ def test_process_instance_list_filter(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances", f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
results = response.json["results"] results = response.json["results"]
assert len(results) == 5 assert len(results) == 5
@ -703,6 +730,7 @@ def test_process_instance_list_filter(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}", f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
results = response.json["results"] results = response.json["results"]
assert len(results) == 1 assert len(results) == 1
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
@ -713,6 +741,7 @@ def test_process_instance_list_filter(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001", f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
results = response.json["results"] results = response.json["results"]
assert len(results) == 4 assert len(results) == 4
for i in range(4): for i in range(4):
@ -723,6 +752,7 @@ def test_process_instance_list_filter(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=2001&end_till=5999", f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=2001&end_till=5999",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
results = response.json["results"] results = response.json["results"]
assert len(results) == 2 assert len(results) == 2
assert json.loads(results[0]["bpmn_json"])["i"] in (2, 3) assert json.loads(results[0]["bpmn_json"])["i"] in (2, 3)
@ -733,6 +763,7 @@ def test_process_instance_list_filter(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001&start_till=3999", f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001&start_till=3999",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
results = response.json["results"] results = response.json["results"]
assert len(results) == 2 assert len(results) == 2
assert json.loads(results[0]["bpmn_json"])["i"] in (1, 2) assert json.loads(results[0]["bpmn_json"])["i"] in (1, 2)
@ -743,6 +774,7 @@ def test_process_instance_list_filter(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?end_from=2001&end_till=5999", f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?end_from=2001&end_till=5999",
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.json is not None
results = response.json["results"] results = response.json["results"]
assert len(results) == 3 assert len(results) == 3
for i in range(3): for i in range(3):
@ -769,6 +801,7 @@ def test_process_instance_report_with_default_list(
headers=logged_in_headers(user), headers=logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 1 assert len(response.json["results"]) == 1
assert response.json["pagination"]["count"] == 1 assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 1 assert response.json["pagination"]["pages"] == 1

View File

@ -0,0 +1,16 @@
"""Test_acceptance_test_fixtures."""
from flask.app import Flask
from spiffworkflow_backend.services.acceptance_test_fixtures import load_fixtures
def test_start_dates_are_one_hour_apart(app: Flask) -> None:
"""Test_start_dates_are_one_hour_apart."""
process_instances = load_fixtures()
assert len(process_instances) > 2
assert process_instances[0].start_in_seconds is not None
assert process_instances[1].start_in_seconds is not None
assert (process_instances[0].start_in_seconds + 3600) == (
process_instances[1].start_in_seconds
)

27
wsgi.py
View File

@ -1,28 +1,11 @@
"""This is my docstring.""" """This is my docstring."""
from werkzeug.exceptions import NotFound import os
from werkzeug.middleware.dispatcher import DispatcherMiddleware
from werkzeug.middleware.proxy_fix import ProxyFix
from spiffworkflow_backend import create_app from spiffworkflow_backend import create_app
from spiffworkflow_backend.services.acceptance_test_fixtures import load_fixtures
app = create_app() app = create_app()
if __name__ == "__main__": if os.environ.get("SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA") == "true":
with app.app_context():
def no_app(environ, start_response): load_fixtures()
"""This is."""
return NotFound()(environ, start_response)
# Remove trailing slash, but add leading slash
base_url = "/" + app.config["APPLICATION_ROOT"].strip("/")
routes = {"/": app.wsgi_app}
if base_url != "/":
routes[base_url] = app.wsgi_app
app.wsgi_app = DispatcherMiddleware(no_app, routes)
app.wsgi_app = ProxyFix(app.wsgi_app)
flask_port = app.config["FLASK_PORT"]
app.run(host="0.0.0.0", port=flask_port)