Squashed 'spiffworkflow-backend/' changes from b8924100d..64ac70428

64ac70428 Move to Python 3.11 (#27)
167851df1 remove leading slash from model identifiers
3503fe400 one instance test left for cypress w/ burnettk
7e09363c4 merged in main and resolved conflicts w/ burnettk cullerton
afbc3157d fixed some acceptance tests w/ burnettk cullerton
96e7b63c6 made a process model form w/ burnettk
c19aac25f Somethings up w/authentication. Maybe the stored token is bad
d17007eaa Merge branch 'main' into feature/nested-groups
729b13881 script to modify all model json files to use full path for model id. Also removes process_group_id
7856b8caa return next task when running an instance w/ burnettk
d183b961c First pass at custom report/perspective for Process Instance List (#23)
f303c0f77 remove process_group_identifier from process-instances endpoint
207de9ab8 Merge branch 'main' into feature/nested-groups
fec8d06b8 started test for test_script_unit_test_run
ea31c7a12 fixed process_model_identifier in script_unit_test_create
5d058cbea Fix url in test
f8afd7bdd Remove the `_2` methods
7c13ec802 Fix urls for uspend and resume endpoints
19158d7b0 Get testing bpmn files out of the config directory
5f1ee7f16 fixed failing test
de3b4b81d Merge branch 'main' into feature/nested-groups
440871d57 First stab at nested folders. Added temp endpoints Changes to tests and test helpers
1bd6a199f Don't need parent. Just use the whole path for the group id
a2ab420b2 Committing so Jon can view code

git-subtree-dir: spiffworkflow-backend
git-subtree-split: 64ac7042887af80869963bc103c01f56404727f2
This commit is contained in:
jasquat 2022-11-09 15:02:19 -05:00
parent 39f9dcba4d
commit 23f0b12e9a
48 changed files with 1941 additions and 1001 deletions

View File

@ -1 +1 @@
python 3.10.4 python 3.11.0

View File

@ -1,4 +1,4 @@
FROM ghcr.io/sartography/python:3.10 FROM ghcr.io/sartography/python:3.11
RUN pip install poetry RUN pip install poetry
RUN useradd _gunicorn --no-create-home --user-group RUN useradd _gunicorn --no-create-home --user-group

22
bin/update_all_json.py Normal file
View File

@ -0,0 +1,22 @@
"""Updates all JSON files, based on the current state of BPMN_SPEC_ABSOLUTE_DIR."""
from spiffworkflow_backend import get_hacked_up_app_for_script
from spiffworkflow_backend.services.process_model_service import ProcessModelService
def main() -> None:
"""Main."""
app = get_hacked_up_app_for_script()
with app.app_context():
groups = ProcessModelService().get_process_groups()
for group in groups:
for process_model in group.process_models:
update_items = {
"process_group_id": "",
"id": f"{group.id}/{process_model.id}",
}
ProcessModelService().update_spec(process_model, update_items)
if __name__ == "__main__":
main()

View File

@ -4,10 +4,10 @@ import shutil
import pytest import pytest
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@ -20,6 +20,8 @@ from spiffworkflow_backend.services.process_instance_service import (
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
# from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
# We need to call this before importing spiffworkflow_backend # We need to call this before importing spiffworkflow_backend
# otherwise typeguard cannot work. hence the noqa: E402 # otherwise typeguard cannot work. hence the noqa: E402
@ -66,17 +68,37 @@ def with_super_admin_user() -> UserModel:
@pytest.fixture() @pytest.fixture()
def setup_process_instances_for_reports() -> list[ProcessInstanceModel]: def setup_process_instances_for_reports(
client: FlaskClient, with_super_admin_user: UserModel
) -> list[ProcessInstanceModel]:
"""Setup_process_instances_for_reports.""" """Setup_process_instances_for_reports."""
user = BaseTest.find_or_create_user() user = with_super_admin_user
process_group_id = "runs_without_input" process_group_id = "runs_without_input"
process_model_id = "sample" process_model_id = "sample"
load_test_spec(process_group_id=process_group_id, process_model_id=process_model_id) # bpmn_file_name = "sample.bpmn"
bpmn_file_location = "sample"
process_model_identifier = BaseTest().basic_test_setup(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
# bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
# BaseTest().create_process_group(
# client=client, user=user, process_group_id=process_group_id, display_name=process_group_id
# )
# process_model_id = "runs_without_input/sample"
# load_test_spec(
# process_model_id=f"{process_group_id}/{process_model_id}",
# process_model_source_directory="sample"
# )
process_instances = [] process_instances = []
for data in [kay(), ray(), jay()]: for data in [kay(), ray(), jay()]:
process_instance = ProcessInstanceService.create_process_instance( process_instance = ProcessInstanceService.create_process_instance(
process_group_identifier=process_group_id, # process_group_identifier=process_group_id,
process_model_identifier=process_model_id, process_model_identifier=process_model_identifier,
user=user, user=user,
) )
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: b1647eff45c9 Revision ID: 50dd2e016d94
Revises: Revises:
Create Date: 2022-11-02 14:25:09.992800 Create Date: 2022-11-08 16:28:18.991635
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'b1647eff45c9' revision = '50dd2e016d94'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -95,7 +95,7 @@ def upgrade():
) )
op.create_table('process_instance', op.create_table('process_instance',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_model_identifier', sa.String(length=50), nullable=False), sa.Column('process_model_identifier', sa.String(length=255), nullable=False),
sa.Column('process_group_identifier', sa.String(length=50), nullable=False), sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('process_initiator_id', sa.Integer(), nullable=False), sa.Column('process_initiator_id', sa.Integer(), nullable=False),
sa.Column('bpmn_json', sa.JSON(), nullable=True), sa.Column('bpmn_json', sa.JSON(), nullable=True),
@ -115,19 +115,16 @@ def upgrade():
op.create_table('process_instance_report', op.create_table('process_instance_report',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=50), nullable=False), sa.Column('identifier', sa.String(length=50), nullable=False),
sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('report_metadata', sa.JSON(), nullable=True), sa.Column('report_metadata', sa.JSON(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=False), sa.Column('created_by_id', sa.Integer(), nullable=False),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ), sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('process_group_identifier', 'process_model_identifier', 'identifier', name='process_instance_report_unique') sa.UniqueConstraint('created_by_id', 'identifier', name='process_instance_report_unique')
) )
op.create_index(op.f('ix_process_instance_report_created_by_id'), 'process_instance_report', ['created_by_id'], unique=False)
op.create_index(op.f('ix_process_instance_report_identifier'), 'process_instance_report', ['identifier'], unique=False) op.create_index(op.f('ix_process_instance_report_identifier'), 'process_instance_report', ['identifier'], unique=False)
op.create_index(op.f('ix_process_instance_report_process_group_identifier'), 'process_instance_report', ['process_group_identifier'], unique=False)
op.create_index(op.f('ix_process_instance_report_process_model_identifier'), 'process_instance_report', ['process_model_identifier'], unique=False)
op.create_table('refresh_token', op.create_table('refresh_token',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False),
@ -292,9 +289,8 @@ def downgrade():
op.drop_table('user_group_assignment') op.drop_table('user_group_assignment')
op.drop_table('secret') op.drop_table('secret')
op.drop_table('refresh_token') op.drop_table('refresh_token')
op.drop_index(op.f('ix_process_instance_report_process_model_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_process_group_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report') op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_created_by_id'), table_name='process_instance_report')
op.drop_table('process_instance_report') op.drop_table('process_instance_report')
op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance') op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_group_identifier'), table_name='process_instance') op.drop_index(op.f('ix_process_instance_process_group_identifier'), table_name='process_instance')

133
poetry.lock generated
View File

@ -80,8 +80,7 @@ python-versions = ">=3.7.2"
[package.dependencies] [package.dependencies]
lazy-object-proxy = ">=1.4.0" lazy-object-proxy = ">=1.4.0"
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}
wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""}
[[package]] [[package]]
name = "attrs" name = "attrs"
@ -95,7 +94,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]] [[package]]
name = "Babel" name = "Babel"
@ -175,8 +174,6 @@ click = ">=8.0.0"
mypy-extensions = ">=0.4.3" mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0" pathspec = ">=0.9.0"
platformdirs = ">=2" platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras] [package.extras]
colorama = ["colorama (>=0.4.3)"] colorama = ["colorama (>=0.4.3)"]
@ -268,7 +265,7 @@ optional = false
python-versions = ">=3.6.0" python-versions = ">=3.6.0"
[package.extras] [package.extras]
unicode_backport = ["unicodedata2"] unicode-backport = ["unicodedata2"]
[[package]] [[package]]
name = "classify-imports" name = "classify-imports"
@ -394,9 +391,6 @@ category = "dev"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras] [package.extras]
toml = ["tomli"] toml = ["tomli"]
@ -574,7 +568,6 @@ python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
click = ">=8.0" click = ">=8.0"
importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
itsdangerous = ">=2.0" itsdangerous = ">=2.0"
Jinja2 = ">=3.0" Jinja2 = ">=3.0"
Werkzeug = ">=2.2.2" Werkzeug = ">=2.2.2"
@ -639,7 +632,7 @@ werkzeug = "*"
type = "git" type = "git"
url = "https://github.com/sartography/flask-bpmn" url = "https://github.com/sartography/flask-bpmn"
reference = "main" reference = "main"
resolved_reference = "191f0f32798720c9ce1e5307732c90ac26433298" resolved_reference = "17434e0907cc35914d013614bb79288eed1bd437"
[[package]] [[package]]
name = "Flask-Cors" name = "Flask-Cors"
@ -818,22 +811,6 @@ category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "importlib-metadata"
version = "4.13.0"
description = "Read metadata from Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
[package.dependencies]
zipp = ">=0.5"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
perf = ["ipython"]
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
[[package]] [[package]]
name = "inflection" name = "inflection"
version = "0.5.1" version = "0.5.1"
@ -1067,7 +1044,6 @@ python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
mypy-extensions = ">=0.4.3" mypy-extensions = ">=0.4.3"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=3.10" typing-extensions = ">=3.10"
[package.extras] [package.extras]
@ -1206,7 +1182,6 @@ python-versions = ">=3.7"
[package.dependencies] [package.dependencies]
"ruamel.yaml" = ">=0.15" "ruamel.yaml" = ">=0.15"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
[[package]] [[package]]
name = "prompt-toolkit" name = "prompt-toolkit"
@ -1512,7 +1487,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras] [package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"] socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "requests-toolbelt" name = "requests-toolbelt"
@ -1526,12 +1501,12 @@ python-versions = "*"
requests = ">=2.0.1,<3.0.0" requests = ">=2.0.1,<3.0.0"
[[package]] [[package]]
name = "RestrictedPython" name = "restrictedpython"
version = "5.2" version = "6.0"
description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment." description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment."
category = "main" category = "main"
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <3.11" python-versions = ">=3.6, <3.12"
[package.extras] [package.extras]
docs = ["Sphinx", "sphinx-rtd-theme"] docs = ["Sphinx", "sphinx-rtd-theme"]
@ -1567,21 +1542,10 @@ category = "dev"
optional = false optional = false
python-versions = ">=3" python-versions = ">=3"
[package.dependencies]
"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}
[package.extras] [package.extras]
docs = ["ryd"] docs = ["ryd"]
jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
[[package]]
name = "ruamel.yaml.clib"
version = "0.2.6"
description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
category = "dev"
optional = false
python-versions = ">=3.5"
[[package]] [[package]]
name = "safety" name = "safety"
version = "2.3.1" version = "2.3.1"
@ -1625,7 +1589,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"] fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"] httpx = ["httpx (>=0.16.0)"]
pure_eval = ["asttokens", "executing", "pure-eval"] pure-eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"] pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"] rq = ["rq (>=0.6)"]
@ -1701,7 +1665,6 @@ babel = ">=2.9"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
docutils = ">=0.14,<0.20" docutils = ">=0.14,<0.20"
imagesize = ">=1.3" imagesize = ">=1.3"
importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
Jinja2 = ">=3.0" Jinja2 = ">=3.0"
packaging = ">=21.0" packaging = ">=21.0"
Pygments = ">=2.12" Pygments = ">=2.12"
@ -1889,19 +1852,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"] asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"] mssql = ["pyodbc"]
mssql_pymssql = ["pymssql"] mssql-pymssql = ["pymssql"]
mssql_pyodbc = ["pyodbc"] mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql_connector = ["mysql-connector-python"] mysql-connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"] postgresql = ["psycopg2 (>=2.7)"]
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql_psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql_psycopg2cffi = ["psycopg2cffi"] postgresql-psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"] pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"] sqlcipher = ["sqlcipher3_binary"]
@ -2239,22 +2202,10 @@ runtime-strict = ["six (==1.11.0)"]
tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"]
tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"]
[[package]]
name = "zipp"
version = "3.9.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = ">=3.9,<3.11" python-versions = ">=3.11,<3.12"
content-hash = "995be3a9a60b515b281f017ff32ff27a52ca178b1980611b348dccac6afb6b89" content-hash = "1ba9277969015f0ef348dccb79e9977e20665720958f7ba22360398fba9da092"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [
@ -2636,10 +2587,6 @@ imagesize = [
{file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
] ]
importlib-metadata = [
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
]
inflection = [ inflection = [
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
{file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"},
@ -3287,9 +3234,9 @@ requests-toolbelt = [
{file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
{file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
] ]
RestrictedPython = [ restrictedpython = [
{file = "RestrictedPython-5.2-py2.py3-none-any.whl", hash = "sha256:fdf8621034c5dcb990a2a198f232f66b2d48866dd16d848e00ac7d187ae452ba"}, {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"},
{file = "RestrictedPython-5.2.tar.gz", hash = "sha256:634da1f6c5c122a262f433b083ee3d17a9a039f8f1b3778597efb47461cd361b"}, {file = "RestrictedPython-6.0.tar.gz", hash = "sha256:405cf0bd9eec2f19b1326b5f48228efe56d6590b4e91826b8cc3b2cd400a96ad"},
] ]
restructuredtext-lint = [ restructuredtext-lint = [
{file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"},
@ -3302,38 +3249,6 @@ rsa = [
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
] ]
"ruamel.yaml.clib" = [
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"},
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"},
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"},
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"},
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"},
{file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"},
{file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"},
{file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"},
{file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"},
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"},
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"},
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"},
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"},
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"},
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"},
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"},
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"},
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"},
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"},
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"},
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"},
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"},
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"},
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"},
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"},
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"},
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"},
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"},
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"},
{file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"},
]
safety = [ safety = [
{file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"},
{file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"},
@ -3705,7 +3620,3 @@ xdoctest = [
{file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"},
{file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"},
] ]
zipp = [
{file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"},
{file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"},
]

View File

@ -16,7 +16,7 @@ classifiers = [
Changelog = "https://github.com/sartography/spiffworkflow-backend/releases" Changelog = "https://github.com/sartography/spiffworkflow-backend/releases"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.9,<3.11" python = ">=3.11,<3.12"
click = "^8.0.1" click = "^8.0.1"
flask = "2.2.2" flask = "2.2.2"
flask-admin = "*" flask-admin = "*"
@ -47,7 +47,7 @@ gunicorn = "^20.1.0"
python-keycloak = "^2.5.0" python-keycloak = "^2.5.0"
APScheduler = "^3.9.1" APScheduler = "^3.9.1"
Jinja2 = "^3.1.2" Jinja2 = "^3.1.2"
RestrictedPython = "^5.2" RestrictedPython = "^6.0"
Flask-SQLAlchemy = "^3" Flask-SQLAlchemy = "^3"
orjson = "^3.8.0" orjson = "^3.8.0"

View File

@ -286,21 +286,14 @@ paths:
schema: schema:
$ref: "#/components/schemas/ProcessModel" $ref: "#/components/schemas/ProcessModel"
/process-models/{process_group_id}/{process_model_id}/files: /process-models/{modified_process_model_id}/files:
parameters: parameters:
- name: process_group_id - name: modified_process_model_id
in: path in: path
required: true required: true
description: The group containing the models we want to return description: The process_model_id, modified to replace slashes (/)
schema: schema:
type: string type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model to validate.
schema:
type: string
# add_file
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file
summary: Add a new workflow spec file summary: Add a new workflow spec file
@ -322,36 +315,15 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/File" $ref: "#/components/schemas/File"
# get:
# operationId: spiffworkflow_backend.api.process_api_blueprint.get_files
# summary: Provide a list of workflow spec files for the given workflow_spec_id. IMPORTANT, only includes metadata, not the file content.
# tags:
# - Process Model Files
# responses:
# '200':
# description: An array of file descriptions (not the file content)
# content:
# application/json:
# schema:
# type: array
# items:
# $ref: "#/components/schemas/File"
/process-models/{process_group_id}/{process_model_id}: /process-models/{modified_process_model_identifier}:
parameters: parameters:
- name: process_group_id - name: modified_process_model_identifier
in: path in: path
required: true required: true
description: The unique id of an existing process group description: the modified process model id
schema: schema:
type: string type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
# process_model_show
get: get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show
summary: Returns a single process model summary: Returns a single process model
@ -364,22 +336,9 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/ProcessModel" $ref: "#/components/schemas/ProcessModel"
# process_model_delete
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete
summary: Removes an existing process model
tags:
- Process Models
responses:
"200":
description: The process model has been removed.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
put: put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update
summary: Modifies an existing process mosel with the given parameters. summary: Modifies an existing process model with the given parameters.
tags: tags:
- Process Models - Process Models
requestBody: requestBody:
@ -394,15 +353,21 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/ProcessModel" $ref: "#/components/schemas/ProcessModel"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete
summary: Removes an existing process model
tags:
- Process Models
responses:
"200":
description: The process model has been removed.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances: /process-instances:
parameters: parameters:
- name: process_group_identifier
in: query
required: false
description: The unique id of an existing process group
schema:
type: string
- name: process_model_identifier - name: process_model_identifier
in: query in: query
required: false required: false
@ -548,15 +513,9 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances: /process-models/{modified_process_model_id}/process-instances:
parameters: parameters:
- name: process_group_id - name: modified_process_model_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path in: path
required: true required: true
description: The unique id of an existing process model. description: The unique id of an existing process model.
@ -576,18 +535,33 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}: /process-instances/{process_instance_id}:
parameters: parameters:
- name: process_group_id - name: process_instance_id
in: path in: path
required: true required: true
description: The unique id of an existing process group description: The unique id of an existing process instance.
schema: schema:
type: string type: integer
- name: process_model_id delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
summary: Deletes a single process instance
tags:
- Process Instances
responses:
"200":
description: The process instance was deleted.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}:
parameters:
- name: modified_process_model_identifier
in: path in: path
required: true required: true
description: The unique id of an existing process model. description: The unique id of an existing process model
schema: schema:
type: string type: string
- name: process_instance_id - name: process_instance_id
@ -608,34 +582,9 @@ paths:
application/json: application/json:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
# process_instance_delete
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
summary: Deletes a single process instance
tags:
- Process Instances
responses:
"200":
description: The process instance was deleted.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run: /process-instances/{process_instance_id}/run:
parameters: parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
- name: process_instance_id - name: process_instance_id
in: path in: path
required: true required: true
@ -662,20 +611,8 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/terminate: /process-instances/{process_instance_id}/terminate:
parameters: parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
- name: process_instance_id - name: process_instance_id
in: path in: path
required: true required: true
@ -695,20 +632,8 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/suspend: /process-instances/{process_instance_id}/suspend:
parameters: parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
- name: process_instance_id - name: process_instance_id
in: path in: path
required: true required: true
@ -728,20 +653,8 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/resume: /process-instances/{process_instance_id}/resume:
parameters: parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
- name: process_instance_id - name: process_instance_id
in: path in: path
required: true required: true
@ -761,6 +674,35 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-instances/reports:
parameters:
- name: page
in: query
required: false
description: The page number to return. Defaults to page 1.
schema:
type: integer
- name: per_page
in: query
required: false
description: The page number to return. Defaults to page 1.
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list
summary: Returns all process instance reports for process model
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances/reports: /process-models/{process_group_id}/{process_model_id}/process-instances/reports:
parameters: parameters:
- name: process_group_id - name: process_group_id
@ -787,20 +729,6 @@ paths:
description: The page number to return. Defaults to page 1. description: The page number to return. Defaults to page 1.
schema: schema:
type: integer type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list
summary: Returns all process instance reports for process model
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
post: post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create
summary: Returns all process instance reports for process model summary: Returns all process instance reports for process model
@ -814,6 +742,41 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-instances/reports/{report_identifier}:
parameters:
- name: report_identifier
in: path
required: true
description: The unique id of an existing report
schema:
type: string
- name: page
in: query
required: false
description: The page number to return. Defaults to page 1.
schema:
type: integer
- name: per_page
in: query
required: false
description: The page number to return. Defaults to page 1.
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show
summary: Returns a report of process instances for a given process model
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances/reports/{report_identifier}: /process-models/{process_group_id}/{process_model_id}/process-instances/reports/{report_identifier}:
parameters: parameters:
- name: process_group_id - name: process_group_id
@ -846,20 +809,6 @@ paths:
description: The page number to return. Defaults to page 1. description: The page number to return. Defaults to page 1.
schema: schema:
type: integer type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show
summary: Returns a report of process instances for a given process model
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
put: put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update
summary: Updates a process instance report summary: Updates a process instance report
@ -885,18 +834,12 @@ paths:
schema: schema:
$ref: "#/components/schemas/OkTrue" $ref: "#/components/schemas/OkTrue"
/process-models/{process_group_id}/{process_model_id}/files/{file_name}: /process-models/{modified_process_model_id}/files/{file_name}:
parameters: parameters:
- name: process_group_id - name: modified_process_model_id
in: path in: path
required: true required: true
description: The unique id of an existing process group description: The modified process model id
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model to validate.
schema: schema:
type: string type: string
- name: file_name - name: file_name
@ -905,7 +848,6 @@ paths:
description: The id of the spec file description: The id of the spec file
schema: schema:
type: string type: string
# get_file
get: get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file
summary: Returns metadata about the file summary: Returns metadata about the file
@ -1203,20 +1145,8 @@ paths:
schema: schema:
$ref: "#/components/schemas/Workflow" $ref: "#/components/schemas/Workflow"
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs: /process-instances/{process_instance_id}/logs:
parameters: parameters:
- name: process_group_id
in: path
required: true
description: The unique id of an existing process group
schema:
type: string
- name: process_model_id
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
- name: process_instance_id - name: process_instance_id
in: path in: path
required: true required: true

View File

@ -41,3 +41,15 @@ permissions:
users: [testuser4] users: [testuser4]
allowed_permissions: [create, read, update, delete] allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/finance/* uri: /v1.0/process-models/finance/*
finance-admin-model-lanes:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/finance:model_with_lanes/*
finance-admin-instance-run:
groups: ["Finance Team"]
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*

View File

@ -20,6 +20,9 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
# different places and this allows us to know exactly where we are at the start # different places and this allows us to know exactly where we are at the start
BPMN_SPEC_ABSOLUTE_DIR = os.path.join( BPMN_SPEC_ABSOLUTE_DIR = os.path.join(
os.path.dirname(__file__), os.path.dirname(__file__),
"..",
"..",
"..",
"tests", "tests",
"spiffworkflow_backend", "spiffworkflow_backend",
"files", "files",

View File

@ -72,7 +72,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance" __tablename__ = "process_instance"
id: int = db.Column(db.Integer, primary_key=True) id: int = db.Column(db.Integer, primary_key=True)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True) process_model_identifier: str = db.Column(
db.String(255), nullable=False, index=True
)
process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True) process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
process_initiator = relationship("UserModel") process_initiator = relationship("UserModel")
@ -265,7 +267,7 @@ class ProcessInstanceMetadata:
id=process_instance.id, id=process_instance.id,
display_name=process_model.display_name, display_name=process_model.display_name,
description=process_model.description, description=process_model.description,
process_group_id=process_model.process_group_id, process_group_id=process_model.process_group,
state_message=process_instance.state_message, state_message=process_instance.state_message,
status=process_instance.status, status=process_instance.status,
completed_tasks=process_instance.completed_tasks, completed_tasks=process_instance.completed_tasks,

View File

@ -21,7 +21,6 @@ from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService
ReportMetadata = dict[str, Any] ReportMetadata = dict[str, Any]
@ -58,8 +57,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance_report" __tablename__ = "process_instance_report"
__table_args__ = ( __table_args__ = (
db.UniqueConstraint( db.UniqueConstraint(
"process_group_identifier", "created_by_id",
"process_model_identifier",
"identifier", "identifier",
name="process_instance_report_unique", name="process_instance_report_unique",
), ),
@ -67,21 +65,53 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
id = db.Column(db.Integer, primary_key=True) id = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True) identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_group_identifier = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore
created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False) created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
created_by = relationship("UserModel") created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer) created_at_in_seconds = db.Column(db.Integer)
updated_at_in_seconds = db.Column(db.Integer) updated_at_in_seconds = db.Column(db.Integer)
@classmethod
def default_report(cls, user: UserModel) -> ProcessInstanceReportModel:
"""Default_report."""
identifier = "default"
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=identifier, created_by_id=user.id
).first()
if process_instance_report is None:
report_metadata = {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_group_identifier",
"accessor": "process_group_identifier",
},
{
"Header": "process_model_identifier",
"accessor": "process_model_identifier",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "status", "accessor": "status"},
],
}
process_instance_report = cls(
identifier=identifier,
created_by_id=user.id,
report_metadata=report_metadata,
)
return process_instance_report # type: ignore
@classmethod @classmethod
def add_fixtures(cls) -> None: def add_fixtures(cls) -> None:
"""Add_fixtures.""" """Add_fixtures."""
try: try:
process_model = ProcessModelService().get_process_model( # process_model = ProcessModelService().get_process_model(
group_id="sartography-admin", process_model_id="ticket" # process_model_id="sartography-admin/ticket"
) # )
user = UserModel.query.first() user = UserModel.query.first()
columns = [ columns = [
{"Header": "id", "accessor": "id"}, {"Header": "id", "accessor": "id"},
@ -96,29 +126,21 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
cls.create_report( cls.create_report(
identifier="standard", identifier="standard",
process_group_identifier=process_model.process_group_id,
process_model_identifier=process_model.id,
user=user, user=user,
report_metadata=json, report_metadata=json,
) )
cls.create_report( cls.create_report(
identifier="for-month", identifier="for-month",
process_group_identifier="sartography-admin",
process_model_identifier="ticket",
user=user, user=user,
report_metadata=cls.ticket_for_month_report(), report_metadata=cls.ticket_for_month_report(),
) )
cls.create_report( cls.create_report(
identifier="for-month-3", identifier="for-month-3",
process_group_identifier="sartography-admin",
process_model_identifier="ticket",
user=user, user=user,
report_metadata=cls.ticket_for_month_3_report(), report_metadata=cls.ticket_for_month_3_report(),
) )
cls.create_report( cls.create_report(
identifier="hot-report", identifier="hot-report",
process_group_identifier="category_number_one",
process_model_identifier="process-model-with-form",
user=user, user=user,
report_metadata=cls.process_model_with_form_report_fixture(), report_metadata=cls.process_model_with_form_report_fixture(),
) )
@ -130,23 +152,18 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
def create_report( def create_report(
cls, cls,
identifier: str, identifier: str,
process_group_identifier: str,
process_model_identifier: str,
user: UserModel, user: UserModel,
report_metadata: ReportMetadata, report_metadata: ReportMetadata,
) -> None: ) -> None:
"""Make_fixture_report.""" """Make_fixture_report."""
process_instance_report = ProcessInstanceReportModel.query.filter_by( process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=identifier, identifier=identifier,
process_group_identifier=process_group_identifier, created_by_id=user.id,
process_model_identifier=process_model_identifier,
).first() ).first()
if process_instance_report is None: if process_instance_report is None:
process_instance_report = cls( process_instance_report = cls(
identifier=identifier, identifier=identifier,
process_group_identifier=process_group_identifier,
process_model_identifier=process_model_identifier,
created_by_id=user.id, created_by_id=user.id,
report_metadata=report_metadata, report_metadata=report_metadata,
) )
@ -217,19 +234,22 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
def create_with_attributes( def create_with_attributes(
cls, cls,
identifier: str, identifier: str,
process_group_identifier: str,
process_model_identifier: str,
report_metadata: dict, report_metadata: dict,
user: UserModel, user: UserModel,
) -> ProcessInstanceReportModel: ) -> ProcessInstanceReportModel:
"""Create_with_attributes.""" """Create_with_attributes."""
process_model = ProcessModelService().get_process_model( # <<<<<<< HEAD
group_id=process_group_identifier, process_model_id=process_model_identifier # process_model = ProcessModelService().get_process_model(
) # process_model_id=f"{process_model_identifier}"
# )
# process_instance_report = cls(
# identifier=identifier,
# process_group_identifier="process_model.process_group_id",
# process_model_identifier=process_model.id,
# =======
process_instance_report = cls( process_instance_report = cls(
identifier=identifier, identifier=identifier,
process_group_identifier=process_model.process_group_id, # >>>>>>> main
process_model_identifier=process_model.id,
created_by_id=user.id, created_by_id=user.id,
report_metadata=report_metadata, report_metadata=report_metadata,
) )

View File

@ -29,7 +29,7 @@ class ProcessModelInfo:
id: str id: str
display_name: str display_name: str
description: str description: str
process_group_id: str = "" process_group: Any | None = None
primary_file_name: str | None = None primary_file_name: str | None = None
primary_process_id: str | None = None primary_process_id: str | None = None
display_order: int | None = 0 display_order: int | None = 0
@ -40,7 +40,7 @@ class ProcessModelInfo:
def __post_init__(self) -> None: def __post_init__(self) -> None:
"""__post_init__.""" """__post_init__."""
self.sort_index = f"{self.process_group_id}:{self.id}" self.sort_index = self.id
def __eq__(self, other: Any) -> bool: def __eq__(self, other: Any) -> bool:
"""__eq__.""" """__eq__."""
@ -66,7 +66,6 @@ class ProcessModelInfoSchema(Schema):
primary_file_name = marshmallow.fields.String(allow_none=True) primary_file_name = marshmallow.fields.String(allow_none=True)
primary_process_id = marshmallow.fields.String(allow_none=True) primary_process_id = marshmallow.fields.String(allow_none=True)
is_review = marshmallow.fields.Boolean(allow_none=True) is_review = marshmallow.fields.Boolean(allow_none=True)
process_group_id = marshmallow.fields.String(allow_none=True)
files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema")) files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema"))
fault_or_suspend_on_exception = marshmallow.fields.String() fault_or_suspend_on_exception = marshmallow.fields.String()
exception_notification_addresses = marshmallow.fields.List( exception_notification_addresses = marshmallow.fields.List(

View File

@ -137,6 +137,16 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
return make_response(jsonify({"results": response_dict}), 200) return make_response(jsonify({"results": response_dict}), 200)
def modify_process_model_id(process_model_id: str) -> str:
"""Modify_process_model_id."""
return process_model_id.replace("/", ":")
def un_modify_modified_process_model_id(modified_process_model_id: str) -> str:
"""Un_modify_modified_process_model_id."""
return modified_process_model_id.replace(":", "/")
def process_group_add(body: dict) -> flask.wrappers.Response: def process_group_add(body: dict) -> flask.wrappers.Response:
"""Add_process_group.""" """Add_process_group."""
process_model_service = ProcessModelService() process_model_service = ProcessModelService()
@ -216,10 +226,9 @@ def process_model_add(
status_code=400, status_code=400,
) )
process_group_id, _ = os.path.split(process_model_info.id)
process_model_service = ProcessModelService() process_model_service = ProcessModelService()
process_group = process_model_service.get_process_group( process_group = process_model_service.get_process_group(process_group_id)
process_model_info.process_group_id
)
if process_group is None: if process_group is None:
raise ApiError( raise ApiError(
error_code="process_model_could_not_be_created", error_code="process_model_could_not_be_created",
@ -236,32 +245,45 @@ def process_model_add(
def process_model_delete( def process_model_delete(
process_group_id: str, process_model_id: str modified_process_model_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_delete.""" """Process_model_delete."""
ProcessModelService().process_model_delete(process_model_id) process_model_identifier = modified_process_model_identifier.replace(":", "/")
# process_model_identifier = f"{process_group_id}/{process_model_id}"
ProcessModelService().process_model_delete(process_model_identifier)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_model_update( def process_model_update(
process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> Any: ) -> Any:
"""Process_model_update.""" """Process_model_update."""
body_include_list = ["display_name", "primary_file_name", "primary_process_id"] process_model_identifier = modified_process_model_identifier.replace(":", "/")
body_include_list = [
"display_name",
"primary_file_name",
"primary_process_id",
"description",
]
body_filtered = { body_filtered = {
include_item: body[include_item] include_item: body[include_item]
for include_item in body_include_list for include_item in body_include_list
if include_item in body if include_item in body
} }
process_model = get_process_model(process_model_id, process_group_id) # process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier)
ProcessModelService().update_spec(process_model, body_filtered) ProcessModelService().update_spec(process_model, body_filtered)
return ProcessModelInfoSchema().dump(process_model) return ProcessModelInfoSchema().dump(process_model)
def process_model_show(process_group_id: str, process_model_id: str) -> Any: def process_model_show(modified_process_model_identifier: str) -> Any:
"""Process_model_show.""" """Process_model_show."""
process_model = get_process_model(process_model_id, process_group_id) process_model_identifier = modified_process_model_identifier.replace(":", "/")
# process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier)
# TODO: Temporary. Should not need the next line once models have correct ids
# process_model.id = process_model_identifier
files = sorted(SpecFileService.get_files(process_model)) files = sorted(SpecFileService.get_files(process_model))
process_model.files = files process_model.files = files
for file in process_model.files: for file in process_model.files:
@ -298,15 +320,16 @@ def process_model_list(
return Response(json.dumps(response_json), status=200, mimetype="application/json") return Response(json.dumps(response_json), status=200, mimetype="application/json")
def get_file(process_group_id: str, process_model_id: str, file_name: str) -> Any: def get_file(modified_process_model_id: str, file_name: str) -> Any:
"""Get_file.""" """Get_file."""
process_model = get_process_model(process_model_id, process_group_id) process_model_identifier = modified_process_model_id.replace(":", "/")
process_model = get_process_model(process_model_identifier)
files = SpecFileService.get_files(process_model, file_name) files = SpecFileService.get_files(process_model, file_name)
if len(files) == 0: if len(files) == 0:
raise ApiError( raise ApiError(
error_code="unknown file", error_code="unknown file",
message=f"No information exists for file {file_name}" message=f"No information exists for file {file_name}"
f" it does not exist in workflow {process_model_id}.", f" it does not exist in workflow {process_model_identifier}.",
status_code=404, status_code=404,
) )
@ -314,15 +337,17 @@ def get_file(process_group_id: str, process_model_id: str, file_name: str) -> An
file_contents = SpecFileService.get_data(process_model, file.name) file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents file.file_contents = file_contents
file.process_model_id = process_model.id file.process_model_id = process_model.id
file.process_group_id = process_model.process_group_id # file.process_group_id = process_model.process_group_id
return FileSchema().dump(file) return FileSchema().dump(file)
def process_model_file_update( def process_model_file_update(
process_group_id: str, process_model_id: str, file_name: str modified_process_model_id: str, file_name: str
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_file_update.""" """Process_model_file_update."""
process_model = get_process_model(process_model_id, process_group_id) process_model_identifier = modified_process_model_id.replace(":", "/")
# process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier)
request_file = get_file_from_request() request_file = get_file_from_request()
request_file_contents = request_file.stream.read() request_file_contents = request_file.stream.read()
@ -337,7 +362,7 @@ def process_model_file_update(
if current_app.config["GIT_COMMIT_ON_SAVE"]: if current_app.config["GIT_COMMIT_ON_SAVE"]:
git_output = GitService.commit( git_output = GitService.commit(
message=f"User: {g.user.username} clicked save for {process_group_id}/{process_model_id}/{file_name}" message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
) )
current_app.logger.info(f"git output: {git_output}") current_app.logger.info(f"git output: {git_output}")
else: else:
@ -347,10 +372,11 @@ def process_model_file_update(
def process_model_file_delete( def process_model_file_delete(
process_group_id: str, process_model_id: str, file_name: str modified_process_model_id: str, file_name: str
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_model_file_delete.""" """Process_model_file_delete."""
process_model = get_process_model(process_model_id, process_group_id) process_model_identifier = modified_process_model_id.replace(":", "/")
process_model = get_process_model(process_model_identifier)
try: try:
SpecFileService.delete_file(process_model, file_name) SpecFileService.delete_file(process_model, file_name)
except FileNotFoundError as exception: except FileNotFoundError as exception:
@ -365,9 +391,10 @@ def process_model_file_delete(
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Response: def add_file(modified_process_model_id: str) -> flask.wrappers.Response:
"""Add_file.""" """Add_file."""
process_model = get_process_model(process_model_id, process_group_id) process_model_identifier = modified_process_model_id.replace(":", "/")
process_model = get_process_model(process_model_identifier)
request_file = get_file_from_request() request_file = get_file_from_request()
if not request_file.filename: if not request_file.filename:
raise ApiError( raise ApiError(
@ -382,18 +409,18 @@ def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Res
file_contents = SpecFileService.get_data(process_model, file.name) file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents file.file_contents = file_contents
file.process_model_id = process_model.id file.process_model_id = process_model.id
file.process_group_id = process_model.process_group_id
return Response( return Response(
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
) )
def process_instance_create( def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response:
process_group_id: str, process_model_id: str
) -> flask.wrappers.Response:
"""Create_process_instance.""" """Create_process_instance."""
process_model_identifier = un_modify_modified_process_model_id(
modified_process_model_id
)
process_instance = ProcessInstanceService.create_process_instance( process_instance = ProcessInstanceService.create_process_instance(
process_model_id, g.user, process_group_identifier=process_group_id process_model_identifier, g.user
) )
return Response( return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)), json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
@ -403,8 +430,6 @@ def process_instance_create(
def process_instance_run( def process_instance_run(
process_group_id: str,
process_model_id: str,
process_instance_id: int, process_instance_id: int,
do_engine_steps: bool = True, do_engine_steps: bool = True,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
@ -446,10 +471,7 @@ def process_instance_run(
def process_instance_terminate( def process_instance_terminate(
process_group_id: str,
process_model_id: str,
process_instance_id: int, process_instance_id: int,
do_engine_steps: bool = True,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_run.""" """Process_instance_run."""
process_instance = ProcessInstanceService().get_process_instance( process_instance = ProcessInstanceService().get_process_instance(
@ -461,8 +483,6 @@ def process_instance_terminate(
def process_instance_suspend( def process_instance_suspend(
process_group_id: str,
process_model_id: str,
process_instance_id: int, process_instance_id: int,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_suspend.""" """Process_instance_suspend."""
@ -475,8 +495,6 @@ def process_instance_suspend(
def process_instance_resume( def process_instance_resume(
process_group_id: str,
process_model_id: str,
process_instance_id: int, process_instance_id: int,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_resume.""" """Process_instance_resume."""
@ -489,8 +507,6 @@ def process_instance_resume(
def process_instance_log_list( def process_instance_log_list(
process_group_id: str,
process_model_id: str,
process_instance_id: int, process_instance_id: int,
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
@ -651,7 +667,6 @@ def message_start(
def process_instance_list( def process_instance_list(
process_group_identifier: Optional[str] = None,
process_model_identifier: Optional[str] = None, process_model_identifier: Optional[str] = None,
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
@ -662,10 +677,11 @@ def process_instance_list(
process_status: Optional[str] = None, process_status: Optional[str] = None,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
# process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier)
process_instance_query = ProcessInstanceModel.query process_instance_query = ProcessInstanceModel.query
if process_model_identifier is not None and process_group_identifier is not None: if process_model_identifier is not None:
process_model = get_process_model( process_model = get_process_model(
process_model_identifier, process_group_identifier f"{process_model_identifier}",
) )
process_instance_query = process_instance_query.filter_by( process_instance_query = process_instance_query.filter_by(
@ -711,10 +727,29 @@ def process_instance_list(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(page=page, per_page=per_page, error_out=False) ).paginate(page=page, per_page=per_page, error_out=False)
process_instance_report = ProcessInstanceReportModel.default_report(g.user)
# TODO need to look into this more - how the filter here interacts with the
# one defined in the report.
# TODO need to look into test failures when the results from result_dict is
# used instead of the process instances
# substitution_variables = request.args.to_dict()
# result_dict = process_instance_report.generate_report(
# process_instances.items, substitution_variables
# )
# results = result_dict["results"]
# report_metadata = result_dict["report_metadata"]
results = process_instances.items
report_metadata = process_instance_report.report_metadata
response_json = { response_json = {
"results": process_instances.items, "report_metadata": report_metadata,
"results": results,
"pagination": { "pagination": {
"count": len(process_instances.items), "count": len(results),
"total": process_instances.total, "total": process_instances.total,
"pages": process_instances.pages, "pages": process_instances.pages,
}, },
@ -724,12 +759,13 @@ def process_instance_list(
def process_instance_show( def process_instance_show(
process_group_id: str, process_model_id: str, process_instance_id: int modified_process_model_identifier: str, process_instance_id: int
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Create_process_instance.""" """Create_process_instance."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_instance = find_process_instance_by_id_or_raise(process_instance_id) process_instance = find_process_instance_by_id_or_raise(process_instance_id)
current_version_control_revision = GitService.get_current_revision() current_version_control_revision = GitService.get_current_revision()
process_model = get_process_model(process_model_id, process_group_id) process_model = get_process_model(process_model_identifier)
if process_model.primary_file_name: if process_model.primary_file_name:
if ( if (
@ -748,9 +784,7 @@ def process_instance_show(
return make_response(jsonify(process_instance), 200) return make_response(jsonify(process_instance), 200)
def process_instance_delete( def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response:
process_group_id: str, process_model_id: str, process_instance_id: int
) -> flask.wrappers.Response:
"""Create_process_instance.""" """Create_process_instance."""
process_instance = find_process_instance_by_id_or_raise(process_instance_id) process_instance = find_process_instance_by_id_or_raise(process_instance_id)
@ -762,27 +796,20 @@ def process_instance_delete(
def process_instance_report_list( def process_instance_report_list(
process_group_id: str, process_model_id: str, page: int = 1, per_page: int = 100 page: int = 1, per_page: int = 100
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_report_list.""" """Process_instance_report_list."""
process_model = get_process_model(process_model_id, process_group_id)
process_instance_reports = ProcessInstanceReportModel.query.filter_by( process_instance_reports = ProcessInstanceReportModel.query.filter_by(
process_group_identifier=process_group_id, created_by_id=g.user.id,
process_model_identifier=process_model.id,
).all() ).all()
return make_response(jsonify(process_instance_reports), 200) return make_response(jsonify(process_instance_reports), 200)
def process_instance_report_create( def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
process_group_id: str, process_model_id: str, body: Dict[str, Any]
) -> flask.wrappers.Response:
"""Process_instance_report_create.""" """Process_instance_report_create."""
ProcessInstanceReportModel.create_report( ProcessInstanceReportModel.create_report(
identifier=body["identifier"], identifier=body["identifier"],
process_group_identifier=process_group_id,
process_model_identifier=process_model_id,
user=g.user, user=g.user,
report_metadata=body["report_metadata"], report_metadata=body["report_metadata"],
) )
@ -791,16 +818,13 @@ def process_instance_report_create(
def process_instance_report_update( def process_instance_report_update(
process_group_id: str,
process_model_id: str,
report_identifier: str, report_identifier: str,
body: Dict[str, Any], body: Dict[str, Any],
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_report_create.""" """Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by( process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier, identifier=report_identifier,
process_group_identifier=process_group_id, created_by_id=g.user.id,
process_model_identifier=process_model_id,
).first() ).first()
if process_instance_report is None: if process_instance_report is None:
raise ApiError( raise ApiError(
@ -816,15 +840,12 @@ def process_instance_report_update(
def process_instance_report_delete( def process_instance_report_delete(
process_group_id: str,
process_model_id: str,
report_identifier: str, report_identifier: str,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_report_create.""" """Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by( process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier, identifier=report_identifier,
process_group_identifier=process_group_id, created_by_id=g.user.id,
process_model_identifier=process_model_id,
).first() ).first()
if process_instance_report is None: if process_instance_report is None:
raise ApiError( raise ApiError(
@ -877,25 +898,20 @@ def authentication_callback(
def process_instance_report_show( def process_instance_report_show(
process_group_id: str,
process_model_id: str,
report_identifier: str, report_identifier: str,
page: int = 1, page: int = 1,
per_page: int = 100, per_page: int = 100,
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_list.""" """Process_instance_list."""
process_model = get_process_model(process_model_id, process_group_id) process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id)
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
process_instances = ( ).paginate(
ProcessInstanceModel.query.filter_by(process_model_identifier=process_model.id) page=page, per_page=per_page, error_out=False
.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
)
.paginate(page=page, per_page=per_page, error_out=False)
) )
process_instance_report = ProcessInstanceReportModel.query.filter_by( process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier identifier=report_identifier,
created_by_id=g.user.id,
).first() ).first()
if process_instance_report is None: if process_instance_report is None:
raise ApiError( raise ApiError(
@ -1007,7 +1023,6 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
process_model = get_process_model( process_model = get_process_model(
process_instance.process_model_identifier, process_instance.process_model_identifier,
process_instance.process_group_identifier,
) )
form_schema_file_name = "" form_schema_file_name = ""
@ -1159,7 +1174,7 @@ def task_submit(
def script_unit_test_create( def script_unit_test_create(
process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Script_unit_test_run.""" """Script_unit_test_create."""
bpmn_task_identifier = _get_required_parameter_or_raise( bpmn_task_identifier = _get_required_parameter_or_raise(
"bpmn_task_identifier", body "bpmn_task_identifier", body
) )
@ -1168,7 +1183,8 @@ def script_unit_test_create(
"expected_output_json", body "expected_output_json", body
) )
process_model = get_process_model(process_model_id, process_group_id) process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier)
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
if file is None: if file is None:
raise ApiError( raise ApiError(
@ -1278,13 +1294,11 @@ def get_file_from_request() -> Any:
return request_file return request_file
def get_process_model(process_model_id: str, process_group_id: str) -> ProcessModelInfo: def get_process_model(process_model_id: str) -> ProcessModelInfo:
"""Get_process_model.""" """Get_process_model."""
process_model = None process_model = None
try: try:
process_model = ProcessModelService().get_process_model( process_model = ProcessModelService().get_process_model(process_model_id)
process_model_id, group_id=process_group_id
)
except ProcessEntityNotFoundError as exception: except ProcessEntityNotFoundError as exception:
raise ( raise (
ApiError( ApiError(

View File

@ -46,6 +46,7 @@ def verify_token(
ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error. ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error.
If on production and user is not authenticated, returns a 'no_user' 403 error. If on production and user is not authenticated, returns a 'no_user' 403 error.
""" """
user_info = None
if not force_run and AuthorizationService.should_disable_auth_for_request(): if not force_run and AuthorizationService.should_disable_auth_for_request():
return None return None
@ -104,6 +105,7 @@ def verify_token(
raise ApiError( raise ApiError(
error_code="fail_get_user_info", error_code="fail_get_user_info",
message="Cannot get user info from token", message="Cannot get user info from token",
status_code=401,
) from e ) from e
if ( if (

View File

@ -13,8 +13,8 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]: def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
"""Load_fixtures.""" """Load_fixtures."""
current_app.logger.debug("load_acceptance_test_fixtures() start") current_app.logger.debug("load_acceptance_test_fixtures() start")
test_process_group_id = "acceptance-tests-group-one" test_process_group_id = ""
test_process_model_id = "acceptance-tests-model-1" test_process_model_id = "acceptance-tests-group-one/acceptance-tests-model-1"
user = BaseTest.find_or_create_user() user = BaseTest.find_or_create_user()
statuses = ProcessInstanceStatus.list() statuses = ProcessInstanceStatus.list()
current_time = round(time.time()) current_time = round(time.time())

View File

@ -48,7 +48,7 @@ class DataSetupService:
except Exception as ex: except Exception as ex:
failing_process_models.append( failing_process_models.append(
( (
f"{process_model.process_group_id}/{process_model.id}/{process_model_file.name}", f"{process_model.process_group}/{process_model.id}/{process_model_file.name}",
str(ex), str(ex),
) )
) )
@ -87,7 +87,7 @@ class DataSetupService:
else: else:
failing_process_models.append( failing_process_models.append(
( (
f"{process_model.process_group_id}/{process_model.id}", f"{process_model.process_group}/{process_model.id}",
"primary_file_name not set", "primary_file_name not set",
) )
) )

View File

@ -35,7 +35,7 @@ class ErrorHandlingService:
) -> None: ) -> None:
"""On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception."""
process_model = ProcessModelService().get_process_model( process_model = ProcessModelService().get_process_model(
_processor.process_model_identifier, _processor.process_group_identifier _processor.process_model_identifier
) )
if process_model.fault_or_suspend_on_exception == "suspend": if process_model.fault_or_suspend_on_exception == "suspend":
self.set_instance_status( self.set_instance_status(

View File

@ -54,18 +54,22 @@ class FileSystemService:
@staticmethod @staticmethod
def process_group_path_for_spec(spec: ProcessModelInfo) -> str: def process_group_path_for_spec(spec: ProcessModelInfo) -> str:
"""Category_path_for_spec.""" """Category_path_for_spec."""
return FileSystemService.process_group_path(spec.process_group_id) process_group_id, _ = os.path.split(spec.id)
return FileSystemService.process_group_path(process_group_id)
@staticmethod @staticmethod
def workflow_path(spec: ProcessModelInfo) -> str: def workflow_path(spec: ProcessModelInfo) -> str:
"""Workflow_path.""" """Workflow_path."""
process_group_path = FileSystemService.process_group_path_for_spec(spec) process_model_path = os.path.join(FileSystemService.root_path(), spec.id)
return os.path.join(process_group_path, spec.id) # process_group_path = FileSystemService.process_group_path_for_spec(spec)
return process_model_path
@staticmethod @staticmethod
def full_path_to_process_model_file(spec: ProcessModelInfo, file_name: str) -> str: def full_path_to_process_model_file(spec: ProcessModelInfo) -> str:
"""Full_path_to_process_model_file.""" """Full_path_to_process_model_file."""
return os.path.join(FileSystemService.workflow_path(spec), file_name) return os.path.join(
FileSystemService.workflow_path(spec), spec.primary_file_name # type: ignore
)
def next_display_order(self, spec: ProcessModelInfo) -> int: def next_display_order(self, spec: ProcessModelInfo) -> int:
"""Next_display_order.""" """Next_display_order."""

View File

@ -120,7 +120,6 @@ class MessageService:
process_instance_receive = ProcessInstanceService.create_process_instance( process_instance_receive = ProcessInstanceService.create_process_instance(
message_triggerable_process_model.process_model_identifier, message_triggerable_process_model.process_model_identifier,
user, user,
process_group_identifier=message_triggerable_process_model.process_group_identifier,
) )
processor_receive = ProcessInstanceProcessor(process_instance_receive) processor_receive = ProcessInstanceProcessor(process_instance_receive)
processor_receive.do_engine_steps(save=False) processor_receive.do_engine_steps(save=False)

View File

@ -293,8 +293,9 @@ class ProcessInstanceProcessor:
tld.spiff_step = process_instance_model.spiff_step tld.spiff_step = process_instance_model.spiff_step
# we want this to be the fully qualified path to the process model including all group subcomponents # we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( current_app.config[
f"{process_instance_model.process_group_identifier}/" "THREAD_LOCAL_DATA"
].process_model_identifier = (
f"{process_instance_model.process_model_identifier}" f"{process_instance_model.process_model_identifier}"
) )
@ -307,8 +308,7 @@ class ProcessInstanceProcessor:
bpmn_process_spec, bpmn_process_spec,
subprocesses, subprocesses,
) = ProcessInstanceProcessor.get_process_model_and_subprocesses( ) = ProcessInstanceProcessor.get_process_model_and_subprocesses(
process_instance_model.process_model_identifier, process_instance_model.process_model_identifier
process_instance_model.process_group_identifier,
) )
else: else:
bpmn_json_length = len(process_instance_model.bpmn_json.encode("utf-8")) bpmn_json_length = len(process_instance_model.bpmn_json.encode("utf-8"))
@ -359,7 +359,7 @@ class ProcessInstanceProcessor:
check_sub_specs(test_spec, 5) check_sub_specs(test_spec, 5)
self.process_model_identifier = process_instance_model.process_model_identifier self.process_model_identifier = process_instance_model.process_model_identifier
self.process_group_identifier = process_instance_model.process_group_identifier # self.process_group_identifier = process_instance_model.process_group_identifier
try: try:
self.bpmn_process_instance = self.__get_bpmn_process_instance( self.bpmn_process_instance = self.__get_bpmn_process_instance(
@ -394,17 +394,17 @@ class ProcessInstanceProcessor:
@classmethod @classmethod
def get_process_model_and_subprocesses( def get_process_model_and_subprocesses(
cls, process_model_identifier: str, process_group_identifier: str cls, process_model_identifier: str
) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: ) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
"""Get_process_model_and_subprocesses.""" """Get_process_model_and_subprocesses."""
process_model_info = ProcessModelService().get_process_model( process_model_info = ProcessModelService().get_process_model(
process_model_identifier, process_group_identifier process_model_identifier
) )
if process_model_info is None: if process_model_info is None:
raise ( raise (
ApiError( ApiError(
"process_model_not_found", "process_model_not_found",
f"The given process model was not found: {process_group_identifier}/{process_model_identifier}.", f"The given process model was not found: {process_model_identifier}.",
) )
) )
spec_files = SpecFileService.get_files(process_model_info) spec_files = SpecFileService.get_files(process_model_info)
@ -412,12 +412,11 @@ class ProcessInstanceProcessor:
@classmethod @classmethod
def get_bpmn_process_instance_from_process_model( def get_bpmn_process_instance_from_process_model(
cls, process_model_identifier: str, process_group_identifier: str cls, process_model_identifier: str
) -> BpmnWorkflow: ) -> BpmnWorkflow:
"""Get_all_bpmn_process_identifiers_for_process_model.""" """Get_all_bpmn_process_identifiers_for_process_model."""
(bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses( (bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses(
process_model_identifier, process_model_identifier,
process_group_identifier,
) )
return cls.get_bpmn_process_instance_from_workflow_spec( return cls.get_bpmn_process_instance_from_workflow_spec(
bpmn_process_spec, subprocesses bpmn_process_spec, subprocesses
@ -698,7 +697,7 @@ class ProcessInstanceProcessor:
etree_element, etree_element,
) )
return FileSystemService.full_path_to_process_model_file( return FileSystemService.full_path_to_process_model_file(
process_model, process_model.primary_file_name process_model
) )
return None return None

View File

@ -2,7 +2,6 @@
import time import time
from typing import Any from typing import Any
from typing import List from typing import List
from typing import Optional
from flask import current_app from flask import current_app
from flask_bpmn.api.api_error import ApiError from flask_bpmn.api.api_error import ApiError
@ -32,7 +31,6 @@ class ProcessInstanceService:
def create_process_instance( def create_process_instance(
process_model_identifier: str, process_model_identifier: str,
user: UserModel, user: UserModel,
process_group_identifier: Optional[str] = None,
) -> ProcessInstanceModel: ) -> ProcessInstanceModel:
"""Get_process_instance_from_spec.""" """Get_process_instance_from_spec."""
current_git_revision = GitService.get_current_revision() current_git_revision = GitService.get_current_revision()
@ -40,7 +38,7 @@ class ProcessInstanceService:
status=ProcessInstanceStatus.not_started.value, status=ProcessInstanceStatus.not_started.value,
process_initiator=user, process_initiator=user,
process_model_identifier=process_model_identifier, process_model_identifier=process_model_identifier,
process_group_identifier=process_group_identifier, process_group_identifier="",
start_in_seconds=round(time.time()), start_in_seconds=round(time.time()),
bpmn_version_control_type="git", bpmn_version_control_type="git",
bpmn_version_control_identifier=current_git_revision, bpmn_version_control_identifier=current_git_revision,
@ -97,7 +95,7 @@ class ProcessInstanceService:
next_task=None, next_task=None,
# navigation=navigation, # navigation=navigation,
process_model_identifier=processor.process_model_identifier, process_model_identifier=processor.process_model_identifier,
process_group_identifier=processor.process_group_identifier, process_group_identifier="",
# total_tasks=len(navigation), # total_tasks=len(navigation),
completed_tasks=processor.process_instance_model.completed_tasks, completed_tasks=processor.process_instance_model.completed_tasks,
updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds, updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds,
@ -105,6 +103,20 @@ class ProcessInstanceService:
title=title_value, title=title_value,
) )
next_task_trying_again = next_task
if (
not next_task
): # The Next Task can be requested to be a certain task, useful for parallel tasks.
# This may or may not work, sometimes there is no next task to complete.
next_task_trying_again = processor.next_task()
if next_task_trying_again is not None:
process_instance_api.next_task = (
ProcessInstanceService.spiff_task_to_api_task(
next_task_trying_again, add_docs_and_forms=True
)
)
return process_instance_api return process_instance_api
def get_process_instance(self, process_instance_id: int) -> Any: def get_process_instance(self, process_instance_id: int) -> Any:

View File

@ -34,6 +34,20 @@ class ProcessModelService(FileSystemService):
GROUP_SCHEMA = ProcessGroupSchema() GROUP_SCHEMA = ProcessGroupSchema()
WF_SCHEMA = ProcessModelInfoSchema() WF_SCHEMA = ProcessModelInfoSchema()
def is_group(self, path: str) -> bool:
"""Is_group."""
group_json_path = os.path.join(path, self.CAT_JSON_FILE)
if os.path.exists(group_json_path):
return True
return False
def is_model(self, path: str) -> bool:
"""Is_model."""
model_json_path = os.path.join(path, self.WF_JSON_FILE)
if os.path.exists(model_json_path):
return True
return False
@staticmethod @staticmethod
def get_batch( def get_batch(
items: list[T], items: list[T],
@ -62,7 +76,7 @@ class ProcessModelService(FileSystemService):
def save_process_model(self, process_model: ProcessModelInfo) -> None: def save_process_model(self, process_model: ProcessModelInfo) -> None:
"""Save_process_model.""" """Save_process_model."""
spec_path = self.workflow_path(process_model) spec_path = os.path.join(FileSystemService.root_path(), process_model.id)
os.makedirs(spec_path, exist_ok=True) os.makedirs(spec_path, exist_ok=True)
json_path = os.path.join(spec_path, self.WF_JSON_FILE) json_path = os.path.join(spec_path, self.WF_JSON_FILE)
with open(json_path, "w") as wf_json: with open(json_path, "w") as wf_json:
@ -80,8 +94,9 @@ class ProcessModelService(FileSystemService):
error_code="existing_instances", error_code="existing_instances",
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
) )
process_model = self.get_process_model(process_model_id) self.get_process_model(process_model_id)
path = self.workflow_path(process_model) # path = self.workflow_path(process_model)
path = f"{FileSystemService.root_path()}/{process_model_id}"
shutil.rmtree(path) shutil.rmtree(path)
@classmethod @classmethod
@ -89,36 +104,43 @@ class ProcessModelService(FileSystemService):
cls, relative_path: str cls, relative_path: str
) -> ProcessModelInfo: ) -> ProcessModelInfo:
"""Get_process_model_from_relative_path.""" """Get_process_model_from_relative_path."""
process_group_identifier = os.path.dirname(relative_path) process_group_identifier, _ = os.path.split(relative_path)
process_group = cls().get_process_group(process_group_identifier) process_group = cls().get_process_group(process_group_identifier)
path = os.path.join(FileSystemService.root_path(), relative_path) path = os.path.join(FileSystemService.root_path(), relative_path)
return cls().__scan_spec(path, process_group=process_group) return cls().__scan_spec(path, process_group=process_group)
def get_process_model( def get_process_model(self, process_model_id: str) -> ProcessModelInfo:
self, process_model_id: str, group_id: Optional[str] = None """Get a process model from a model and group id.
) -> ProcessModelInfo:
"""Get a process model from a model and group id."""
if not os.path.exists(FileSystemService.root_path()):
raise ProcessEntityNotFoundError("process_model_not_found")
if group_id is not None: process_model_id is the full path to the model--including groups.
process_group = self.get_process_group(group_id) """
if process_group is not None: if not os.path.exists(FileSystemService.root_path()):
for process_model in process_group.process_models: raise ProcessEntityNotFoundError("process_model_root_not_found")
if process_model_id == process_model.id:
return process_model model_path = os.path.join(FileSystemService.root_path(), process_model_id)
with os.scandir(FileSystemService.root_path()) as process_group_dirs: if self.is_model(model_path):
for item in process_group_dirs: process_model = self.get_process_model_from_relative_path(process_model_id)
process_group_dir = item return process_model
if item.is_dir():
with os.scandir(item.path) as spec_dirs: # group_path, model_id = os.path.split(process_model_id)
for sd in spec_dirs: # if group_path is not None:
if sd.name == process_model_id: # process_group = self.get_process_group(group_path)
# Now we have the process_group direcotry, and spec directory # if process_group is not None:
process_group = self.__scan_process_group( # for process_model in process_group.process_models:
process_group_dir # if process_model_id == process_model.id:
) # return process_model
return self.__scan_spec(sd.path, sd.name, process_group) # with os.scandir(FileSystemService.root_path()) as process_group_dirs:
# for item in process_group_dirs:
# process_group_dir = item
# if item.is_dir():
# with os.scandir(item.path) as spec_dirs:
# for sd in spec_dirs:
# if sd.name == process_model_id:
# # Now we have the process_group directory, and spec directory
# process_group = self.__scan_process_group(
# process_group_dir
# )
# return self.__scan_spec(sd.path, sd.name, process_group)
raise ProcessEntityNotFoundError("process_model_not_found") raise ProcessEntityNotFoundError("process_model_not_found")
def get_process_models( def get_process_models(
@ -148,10 +170,24 @@ class ProcessModelService(FileSystemService):
def get_process_group(self, process_group_id: str) -> ProcessGroup: def get_process_group(self, process_group_id: str) -> ProcessGroup:
"""Look for a given process_group, and return it.""" """Look for a given process_group, and return it."""
if os.path.exists(FileSystemService.root_path()): if os.path.exists(FileSystemService.root_path()):
with os.scandir(FileSystemService.root_path()) as directory_items: process_group_path = os.path.join(
for item in directory_items: FileSystemService.root_path(), process_group_id
if item.is_dir() and item.name == process_group_id: )
return self.__scan_process_group(item) if self.is_group(process_group_path):
return self.__scan_process_group(process_group_path)
# nested_groups = []
# process_group_dir = os.scandir(process_group_path)
# for item in process_group_dir:
# if self.is_group(item.path):
# nested_group = self.get_process_group(os.path.join(process_group_path, item.path))
# nested_groups.append(nested_group)
# elif self.is_model(item.path):
# print("get_process_group: ")
# return self.__scan_process_group(process_group_path)
# with os.scandir(FileSystemService.root_path()) as directory_items:
# for item in directory_items:
# if item.is_dir() and item.name == process_group_id:
# return self.__scan_process_group(item)
raise ProcessEntityNotFoundError( raise ProcessEntityNotFoundError(
"process_group_not_found", f"Process Group Id: {process_group_id}" "process_group_not_found", f"Process Group Id: {process_group_id}"
@ -202,13 +238,15 @@ class ProcessModelService(FileSystemService):
with os.scandir(FileSystemService.root_path()) as directory_items: with os.scandir(FileSystemService.root_path()) as directory_items:
process_groups = [] process_groups = []
for item in directory_items: for item in directory_items:
if item.is_dir() and not item.name[0] == ".": # if item.is_dir() and not item.name[0] == ".":
process_groups.append(self.__scan_process_group(item)) if item.is_dir() and self.is_group(item): # type: ignore
scanned_process_group = self.__scan_process_group(item.path)
process_groups.append(scanned_process_group)
return process_groups return process_groups
def __scan_process_group(self, dir_item: os.DirEntry) -> ProcessGroup: def __scan_process_group(self, dir_path: str) -> ProcessGroup:
"""Reads the process_group.json file, and any workflow directories.""" """Reads the process_group.json file, and any nested directories."""
cat_path = os.path.join(dir_item.path, self.CAT_JSON_FILE) cat_path = os.path.join(dir_path, self.CAT_JSON_FILE)
if os.path.exists(cat_path): if os.path.exists(cat_path):
with open(cat_path) as cat_json: with open(cat_path) as cat_json:
data = json.load(cat_json) data = json.load(cat_json)
@ -216,26 +254,34 @@ class ProcessModelService(FileSystemService):
if process_group is None: if process_group is None:
raise ApiError( raise ApiError(
error_code="process_group_could_not_be_loaded_from_disk", error_code="process_group_could_not_be_loaded_from_disk",
message=f"We could not load the process_group from disk from: {dir_item}", message=f"We could not load the process_group from disk from: {dir_path}",
) )
else: else:
process_group_id = dir_path.replace(FileSystemService.root_path(), "")
process_group = ProcessGroup( process_group = ProcessGroup(
id=dir_item.name, id=process_group_id,
display_name=dir_item.name, display_name=process_group_id,
display_order=10000, display_order=10000,
admin=False, admin=False,
) )
with open(cat_path, "w") as wf_json: with open(cat_path, "w") as wf_json:
json.dump(self.GROUP_SCHEMA.dump(process_group), wf_json, indent=4) json.dump(self.GROUP_SCHEMA.dump(process_group), wf_json, indent=4)
with os.scandir(dir_item.path) as workflow_dirs: with os.scandir(dir_path) as nested_items:
process_group.process_models = [] process_group.process_models = []
for item in workflow_dirs: for nested_item in nested_items:
if item.is_dir(): if nested_item.is_dir():
process_group.process_models.append( # TODO: check whether this is a group or model
self.__scan_spec( if self.is_group(nested_item.path):
item.path, item.name, process_group=process_group # This is a nested group
...
elif self.is_model(nested_item.path):
process_group.process_models.append(
self.__scan_spec(
nested_item.path,
nested_item.name,
process_group=process_group,
)
) )
)
process_group.process_models.sort() process_group.process_models.sort()
return process_group return process_group
@ -251,6 +297,8 @@ class ProcessModelService(FileSystemService):
if os.path.exists(spec_path): if os.path.exists(spec_path):
with open(spec_path) as wf_json: with open(spec_path) as wf_json:
data = json.load(wf_json) data = json.load(wf_json)
if "process_group_id" in data:
data.pop("process_group_id")
spec = ProcessModelInfo(**data) spec = ProcessModelInfo(**data)
if spec is None: if spec is None:
raise ApiError( raise ApiError(
@ -274,5 +322,5 @@ class ProcessModelService(FileSystemService):
with open(spec_path, "w") as wf_json: with open(spec_path, "w") as wf_json:
json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4) json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4)
if process_group: if process_group:
spec.process_group_id = process_group.id spec.process_group = process_group.id
return spec return spec

View File

@ -48,7 +48,8 @@ class SpecFileService(FileSystemService):
extension_filter: str = "", extension_filter: str = "",
) -> List[File]: ) -> List[File]:
"""Return all files associated with a workflow specification.""" """Return all files associated with a workflow specification."""
path = SpecFileService.workflow_path(process_model_info) # path = SpecFileService.workflow_path(process_model_info)
path = os.path.join(FileSystemService.root_path(), process_model_info.id)
files = SpecFileService._get_files(path, file_name) files = SpecFileService._get_files(path, file_name)
if extension_filter != "": if extension_filter != "":
files = list( files = list(
@ -105,7 +106,10 @@ class SpecFileService(FileSystemService):
) -> File: ) -> File:
"""Update_file.""" """Update_file."""
SpecFileService.assert_valid_file_name(file_name) SpecFileService.assert_valid_file_name(file_name)
file_path = SpecFileService.file_path(process_model_info, file_name) # file_path = SpecFileService.file_path(process_model_info, file_name)
file_path = os.path.join(
FileSystemService.root_path(), process_model_info.id, file_name
)
SpecFileService.write_file_data_to_system(file_path, binary_data) SpecFileService.write_file_data_to_system(file_path, binary_data)
file = SpecFileService.to_file_object(file_name, file_path) file = SpecFileService.to_file_object(file_name, file_path)
@ -129,7 +133,10 @@ class SpecFileService(FileSystemService):
@staticmethod @staticmethod
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes: def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
"""Get_data.""" """Get_data."""
file_path = SpecFileService.file_path(process_model_info, file_name) # file_path = SpecFileService.file_path(process_model_info, file_name)
file_path = os.path.join(
FileSystemService.root_path(), process_model_info.id, file_name
)
if not os.path.exists(file_path): if not os.path.exists(file_path):
raise ProcessModelFileNotFoundError( raise ProcessModelFileNotFoundError(
f"No file found with name {file_name} in {process_model_info.display_name}" f"No file found with name {file_name} in {process_model_info.display_name}"
@ -163,7 +170,8 @@ class SpecFileService(FileSystemService):
# for lf in lookup_files: # for lf in lookup_files:
# session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete() # session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
# session.query(LookupFileModel).filter_by(id=lf.id).delete() # session.query(LookupFileModel).filter_by(id=lf.id).delete()
file_path = SpecFileService.file_path(spec, file_name) # file_path = SpecFileService.file_path(spec, file_name)
file_path = os.path.join(FileSystemService.root_path(), spec.id, file_name)
os.remove(file_path) os.remove(file_path)
@staticmethod @staticmethod
@ -367,9 +375,8 @@ class SpecFileService(FileSystemService):
process_model_info: ProcessModelInfo, bpmn_file_name: str, et_root: _Element process_model_info: ProcessModelInfo, bpmn_file_name: str, et_root: _Element
) -> None: ) -> None:
"""Store_bpmn_process_identifiers.""" """Store_bpmn_process_identifiers."""
relative_process_model_path = SpecFileService.process_model_relative_path( relative_process_model_path = process_model_info.id
process_model_info
)
relative_bpmn_file_path = os.path.join( relative_bpmn_file_path = os.path.join(
relative_process_model_path, bpmn_file_name relative_process_model_path, bpmn_file_name
) )
@ -462,10 +469,12 @@ class SpecFileService(FileSystemService):
) )
if message_triggerable_process_model is None: if message_triggerable_process_model is None:
message_triggerable_process_model = MessageTriggerableProcessModel( message_triggerable_process_model = (
message_model_id=message_model.id, MessageTriggerableProcessModel(
process_model_identifier=process_model_info.id, message_model_id=message_model.id,
process_group_identifier=process_model_info.process_group_id, process_model_identifier=process_model_info.id,
process_group_identifier="process_group_identifier",
)
) )
db.session.add(message_triggerable_process_model) db.session.add(message_triggerable_process_model)
db.session.commit() db.session.commit()
@ -473,12 +482,11 @@ class SpecFileService(FileSystemService):
if ( if (
message_triggerable_process_model.process_model_identifier message_triggerable_process_model.process_model_identifier
!= process_model_info.id != process_model_info.id
or message_triggerable_process_model.process_group_identifier # or message_triggerable_process_model.process_group_identifier
!= process_model_info.process_group_id # != process_model_info.process_group_id
): ):
raise ValidationException( raise ValidationException(
"Message model is already used to start process model" f"Message model is already used to start process model {process_model_info.id}"
f"'{process_model_info.process_group_id}/{process_model_info.id}'"
) )
for child in et_root: for child in et_root:

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Proccess_ManualTask" name="Manual Task" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1xlck7g</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1xlck7g" sourceRef="StartEvent_1" targetRef="Activity_Hello" />
<bpmn:endEvent id="Event_0ia26nb">
<bpmn:incoming>Flow_0nnh2x9</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0nnh2x9" sourceRef="Activity_Hello" targetRef="Event_0ia26nb" />
<bpmn:manualTask id="Activity_Hello" name="Hello">
<bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser>## Hello</spiffworkflow:instructionsForEndUser>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1xlck7g</bpmn:incoming>
<bpmn:outgoing>Flow_0nnh2x9</bpmn:outgoing>
</bpmn:manualTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Proccess_ManualTask">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0ia26nb_di" bpmnElement="Event_0ia26nb">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1rcj16n_di" bpmnElement="Activity_Hello">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1xlck7g_di" bpmnElement="Flow_1xlck7g">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0nnh2x9_di" bpmnElement="Flow_0nnh2x9">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,67 +1,87 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1kbzkan" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0"> <bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_SimpleScript" name="Simple Script" isExecutable="true"> <bpmn:process id="Proccess_SimpleScript" name="Simple Script" isExecutable="true">
<bpmn:startEvent id="StartEvent_1"> <bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1k9q28c</bpmn:outgoing> <bpmn:outgoing>Flow_0r3ua0i</bpmn:outgoing>
</bpmn:startEvent> </bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1k9q28c" sourceRef="StartEvent_1" targetRef="Activity_RunScript" /> <bpmn:sequenceFlow id="Flow_0r3ua0i" sourceRef="StartEvent_1" targetRef="Activity_SetInitialData" />
<bpmn:sequenceFlow id="Flow_1fviiob" sourceRef="Activity_RunScript" targetRef="Activity_DisplayData" /> <bpmn:scriptTask id="Activity_SetInitialData" name="Set Initial Data">
<bpmn:endEvent id="Event_1fep863"> <bpmn:incoming>Flow_0r3ua0i</bpmn:incoming>
<bpmn:incoming>Flow_10610n2</bpmn:incoming> <bpmn:outgoing>Flow_19g4f88</bpmn:outgoing>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_10610n2" sourceRef="Activity_DisplayData" targetRef="Event_1fep863" />
<bpmn:scriptTask id="Activity_RunScript" name="Run Script">
<bpmn:incoming>Flow_1k9q28c</bpmn:incoming>
<bpmn:outgoing>Flow_1fviiob</bpmn:outgoing>
<bpmn:script>a = 1 <bpmn:script>a = 1
b = 2 b = 2</bpmn:script>
c = a + b
norris=fact_service(type='norris')</bpmn:script>
</bpmn:scriptTask> </bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_19g4f88" sourceRef="Activity_SetInitialData" targetRef="Activity_CalculateNewData" />
<bpmn:scriptTask id="Activity_CalculateNewData" name="Calculate New Data">
<bpmn:extensionElements>
<spiffworkflow:unitTests>
<spiffworkflow:unitTest id="ScriptUnitTest_SimpleScript">
<spiffworkflow:inputJson>{'a': 1, 'b': 2}</spiffworkflow:inputJson>
<spiffworkflow:expectedOutputJson>{'a': 1, 'b': 2, 'c': 3}</spiffworkflow:expectedOutputJson>
</spiffworkflow:unitTest>
</spiffworkflow:unitTests>
</bpmn:extensionElements>
<bpmn:incoming>Flow_19g4f88</bpmn:incoming>
<bpmn:outgoing>Flow_152cqfw</bpmn:outgoing>
<bpmn:script>c = a + b</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_152cqfw" sourceRef="Activity_CalculateNewData" targetRef="Activity_DisplayData" />
<bpmn:manualTask id="Activity_DisplayData" name="Display Data"> <bpmn:manualTask id="Activity_DisplayData" name="Display Data">
<bpmn:documentation>## Display Data <bpmn:extensionElements>
<spiffworkflow:instructionsForEndUser>## Data
### A
### a
{{ a }} {{ a }}
### B
### b
{{ b }} {{ b }}
### C
### c {{ c }}</spiffworkflow:instructionsForEndUser>
{{ c }}</bpmn:documentation> </bpmn:extensionElements>
<bpmn:incoming>Flow_1fviiob</bpmn:incoming> <bpmn:incoming>Flow_152cqfw</bpmn:incoming>
<bpmn:outgoing>Flow_10610n2</bpmn:outgoing> <bpmn:outgoing>Flow_1vqk60p</bpmn:outgoing>
</bpmn:manualTask> </bpmn:manualTask>
<bpmn:endEvent id="Event_19fiqu4">
<bpmn:incoming>Flow_1vqk60p</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1vqk60p" sourceRef="Activity_DisplayData" targetRef="Event_19fiqu4" />
</bpmn:process> </bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1"> <bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_SimpleScript"> <bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Proccess_SimpleScript">
<bpmndi:BPMNEdge id="Flow_10610n2_di" bpmnElement="Flow_10610n2">
<di:waypoint x="530" y="117" />
<di:waypoint x="592" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1fviiob_di" bpmnElement="Flow_1fviiob">
<di:waypoint x="370" y="117" />
<di:waypoint x="430" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1k9q28c_di" bpmnElement="Flow_1k9q28c">
<di:waypoint x="215" y="117" />
<di:waypoint x="270" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1"> <bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" /> <dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape> </bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1fep863_di" bpmnElement="Event_1fep863"> <bpmndi:BPMNShape id="Activity_0l45w13_di" bpmnElement="Activity_SetInitialData">
<dc:Bounds x="592" y="99" width="36" height="36" /> <dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape> </bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_12kpu89_di" bpmnElement="Activity_RunScript"> <bpmndi:BPMNShape id="Activity_00n1s76_di" bpmnElement="Activity_CalculateNewData">
<dc:Bounds x="270" y="77" width="100" height="80" /> <dc:Bounds x="430" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape> </bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_17mrit2_di" bpmnElement="Activity_DisplayData"> <bpmndi:BPMNShape id="Activity_1nhghi0_di" bpmnElement="Activity_DisplayData">
<dc:Bounds x="430" y="77" width="100" height="80" /> <dc:Bounds x="590" y="137" width="100" height="80" />
</bpmndi:BPMNShape> </bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_19fiqu4_di" bpmnElement="Event_19fiqu4">
<dc:Bounds x="752" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0r3ua0i_di" bpmnElement="Flow_0r3ua0i">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_19g4f88_di" bpmnElement="Flow_19g4f88">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_152cqfw_di" bpmnElement="Flow_152cqfw">
<di:waypoint x="530" y="177" />
<di:waypoint x="590" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1vqk60p_di" bpmnElement="Flow_1vqk60p">
<di:waypoint x="690" y="177" />
<di:waypoint x="752" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane> </bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram> </bpmndi:BPMNDiagram>
</bpmn:definitions> </bpmn:definitions>

View File

@ -25,6 +25,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
@ -34,6 +35,46 @@ from spiffworkflow_backend.services.user_service import UserService
class BaseTest: class BaseTest:
"""BaseTest.""" """BaseTest."""
def basic_test_setup(
self,
client: FlaskClient,
user: UserModel,
process_group_id: Optional[str] = "test_group",
process_model_id: Optional[str] = "random_fact",
bpmn_file_name: Optional[str] = None,
bpmn_file_location: Optional[str] = None,
) -> str:
"""Creates a process group.
Creates a process model
Adds a bpmn file to the model.
"""
process_group_display_name = process_group_id or ""
process_group_description = process_group_id or ""
process_model_identifier = f"{process_group_id}/{process_model_id}"
if bpmn_file_location is None:
bpmn_file_location = process_model_id
self.create_process_group(
client, user, process_group_description, process_group_display_name
)
self.create_process_model_with_api(
client,
process_model_id=process_model_identifier,
process_model_display_name=process_group_display_name,
process_model_description=process_group_description,
user=user,
)
load_test_spec(
process_model_id=process_model_identifier,
bpmn_file_name=bpmn_file_name,
process_model_source_directory=bpmn_file_location,
)
return process_model_identifier
@staticmethod @staticmethod
def find_or_create_user(username: str = "test_user_1") -> UserModel: def find_or_create_user(username: str = "test_user_1") -> UserModel:
"""Find_or_create_user.""" """Find_or_create_user."""
@ -67,17 +108,19 @@ class BaseTest:
open_id_client_secret_key, open_id_client_secret_key,
) )
@staticmethod
def create_process_instance( def create_process_instance(
self,
client: FlaskClient, client: FlaskClient,
test_process_group_id: str,
test_process_model_id: str, test_process_model_id: str,
headers: Dict[str, str], headers: Dict[str, str],
) -> TestResponse: ) -> TestResponse:
"""Create_process_instance.""" """Create_process_instance.
load_test_spec(test_process_model_id, process_group_id=test_process_group_id)
There must be an existing process model to instantiate.
"""
modified_process_model_id = test_process_model_id.replace("/", ":")
response = client.post( response = client.post(
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances", f"/v1.0/process-models/{modified_process_model_id}/process-instances",
headers=headers, headers=headers,
) )
assert response.status_code == 201 assert response.status_code == 201
@ -86,8 +129,7 @@ class BaseTest:
def create_process_model_with_api( def create_process_model_with_api(
self, self,
client: FlaskClient, client: FlaskClient,
process_group_id: Optional[str] = None, process_model_id: Optional[str] = None,
process_model_id: str = "make_cookies",
process_model_display_name: str = "Cooooookies", process_model_display_name: str = "Cooooookies",
process_model_description: str = "Om nom nom delicious cookies", process_model_description: str = "Om nom nom delicious cookies",
fault_or_suspend_on_exception: str = NotificationType.suspend.value, fault_or_suspend_on_exception: str = NotificationType.suspend.value,
@ -97,65 +139,77 @@ class BaseTest:
user: Optional[UserModel] = None, user: Optional[UserModel] = None,
) -> TestResponse: ) -> TestResponse:
"""Create_process_model.""" """Create_process_model."""
process_model_service = ProcessModelService() if process_model_id is not None:
# make sure we have a group # make sure we have a group
if process_group_id is None: process_group_id, _ = os.path.split(process_model_id)
process_group_tmp = ProcessGroup( process_group_path = f"{FileSystemService.root_path()}/{process_group_id}"
id="test_cat", if ProcessModelService().is_group(process_group_path):
display_name="Test Category",
display_order=0, if exception_notification_addresses is None:
admin=False, exception_notification_addresses = []
)
process_group = process_model_service.add_process_group(process_group_tmp) model = ProcessModelInfo(
id=process_model_id,
display_name=process_model_display_name,
description=process_model_description,
is_review=False,
primary_process_id=primary_process_id,
primary_file_name=primary_file_name,
fault_or_suspend_on_exception=fault_or_suspend_on_exception,
exception_notification_addresses=exception_notification_addresses,
)
if user is None:
user = self.find_or_create_user()
response = client.post(
"/v1.0/process-models",
content_type="application/json",
data=json.dumps(ProcessModelInfoSchema().dump(model)),
headers=self.logged_in_headers(user),
)
assert response.status_code == 201
return response
else:
raise Exception("You must create the group first")
else: else:
process_group = ProcessModelService().get_process_group(process_group_id) raise Exception(
"You must include the process_model_id, which must be a path to the model"
if exception_notification_addresses is None: )
exception_notification_addresses = []
model = ProcessModelInfo(
id=process_model_id,
display_name=process_model_display_name,
description=process_model_description,
process_group_id=process_group.id,
is_review=False,
primary_process_id=primary_process_id,
primary_file_name=primary_file_name,
fault_or_suspend_on_exception=fault_or_suspend_on_exception,
exception_notification_addresses=exception_notification_addresses,
)
if user is None:
user = self.find_or_create_user()
response = client.post(
"/v1.0/process-models",
content_type="application/json",
data=json.dumps(ProcessModelInfoSchema().dump(model)),
headers=self.logged_in_headers(user),
)
assert response.status_code == 201
return response
def create_spec_file( def create_spec_file(
self, self,
client: FlaskClient, client: FlaskClient,
process_group_id: str = "random_fact", process_model_id: str,
process_model_id: str = "random_fact", process_model_location: Optional[str] = None,
process_model: Optional[ProcessModelInfo] = None, process_model: Optional[ProcessModelInfo] = None,
file_name: str = "random_fact.svg", file_name: str = "random_fact.svg",
file_data: bytes = b"abcdef", file_data: bytes = b"abcdef",
user: Optional[UserModel] = None, user: Optional[UserModel] = None,
) -> Any: ) -> Any:
"""Test_create_spec_file.""" """Test_create_spec_file.
Adds a bpmn file to the model.
process_model_id is the destination path
process_model_location is the source path
because of permissions, user might be required now..., not sure yet.
"""
if process_model_location is None:
process_model_location = file_name.split(".")[0]
if process_model is None: if process_model is None:
process_model = load_test_spec( process_model = load_test_spec(
process_model_id, process_group_id=process_group_id process_model_id=process_model_id,
bpmn_file_name=file_name,
process_model_source_directory=process_model_location,
) )
data = {"file": (io.BytesIO(file_data), file_name)} data = {"file": (io.BytesIO(file_data), file_name)}
if user is None: if user is None:
user = self.find_or_create_user() user = self.find_or_create_user()
modified_process_model_id = process_model.id.replace("/", ":")
response = client.post( response = client.post(
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files", f"/v1.0/process-models/{modified_process_model_id}/files",
data=data, data=data,
follow_redirects=True, follow_redirects=True,
content_type="multipart/form-data", content_type="multipart/form-data",
@ -168,7 +222,7 @@ class BaseTest:
# assert "image/svg+xml" == file["content_type"] # assert "image/svg+xml" == file["content_type"]
response = client.get( response = client.get(
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/{file_name}", f"/v1.0/process-models/{modified_process_model_id}/files/{file_name}",
headers=self.logged_in_headers(user), headers=self.logged_in_headers(user),
) )
assert response.status_code == 200 assert response.status_code == 200
@ -221,7 +275,7 @@ class BaseTest:
status=status, status=status,
process_initiator=user, process_initiator=user,
process_model_identifier=process_model.id, process_model_identifier=process_model.id,
process_group_identifier=process_model.process_group_id, process_group_identifier="",
updated_at_in_seconds=round(time.time()), updated_at_in_seconds=round(time.time()),
start_in_seconds=current_time - (3600 * 1), start_in_seconds=current_time - (3600 * 1),
end_in_seconds=current_time - (3600 * 1 - 20), end_in_seconds=current_time - (3600 * 1 - 20),

View File

@ -13,27 +13,30 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
class ExampleDataLoader: class ExampleDataLoader:
"""ExampleDataLoader.""" """ExampleDataLoader."""
@staticmethod
def create_spec( def create_spec(
self,
process_model_id: str, process_model_id: str,
display_name: str = "", display_name: str = "",
description: str = "", description: str = "",
process_group_id: str = "",
display_order: int = 0, display_order: int = 0,
from_tests: bool = False, # from_tests: bool = False,
bpmn_file_name: Optional[str] = None, bpmn_file_name: Optional[str] = None,
process_model_source_directory: Optional[str] = None, process_model_source_directory: Optional[str] = None,
) -> ProcessModelInfo: ) -> ProcessModelInfo:
"""Assumes that a directory exists in static/bpmn with the same name as the given process_model_id. """Assumes that process_model_source_directory exists in static/bpmn and contains bpmn_file_name.
further assumes that the [process_model_id].bpmn is the primary file for the process model. further assumes that bpmn_file_name is the primary file for the process model.
returns an array of data models to be added to the database.
if bpmn_file_name is None we load all files in process_model_source_directory,
otherwise, we only load bpmn_file_name
""" """
if process_model_source_directory is None:
raise Exception("You must include `process_model_source_directory`.")
spec = ProcessModelInfo( spec = ProcessModelInfo(
id=process_model_id, id=process_model_id,
display_name=display_name, display_name=display_name,
description=description, description=description,
process_group_id=process_group_id,
display_order=display_order, display_order=display_order,
is_review=False, is_review=False,
) )
@ -55,25 +58,16 @@ class ExampleDataLoader:
if bpmn_file_name: if bpmn_file_name:
file_name_matcher = bpmn_file_name_with_extension file_name_matcher = bpmn_file_name_with_extension
file_glob = "" # file_glob = ""
if from_tests: file_glob = os.path.join(
file_glob = os.path.join( current_app.root_path,
current_app.instance_path, "..",
"..", "..",
"..", "tests",
"tests", "data",
"data", process_model_source_directory_to_use,
process_model_source_directory_to_use, file_name_matcher,
file_name_matcher, )
)
else:
file_glob = os.path.join(
current_app.root_path,
"static",
"bpmn",
process_model_source_directory_to_use,
file_name_matcher,
)
files = glob.glob(file_glob) files = glob.glob(file_glob)
for file_path in files: for file_path in files:

View File

@ -37,40 +37,17 @@ def assure_process_group_exists(process_group_id: Optional[str] = None) -> Proce
def load_test_spec( def load_test_spec(
process_model_id: str, process_model_id: str,
process_group_id: Optional[str] = None,
bpmn_file_name: Optional[str] = None, bpmn_file_name: Optional[str] = None,
process_model_source_directory: Optional[str] = None, process_model_source_directory: Optional[str] = None,
) -> ProcessModelInfo: ) -> ProcessModelInfo:
"""Loads a process model into the bpmn dir based on a directory in tests/data.""" """Loads a bpmn file into the process model dir based on a directory in tests/data."""
process_group = None if process_model_source_directory is None:
process_model_service = ProcessModelService() raise Exception("You must inclode a `process_model_source_directory`.")
if process_group_id is None:
process_group_id = "test_process_group_id"
process_group = assure_process_group_exists(process_group_id)
process_group_id = process_group.id
try: spec = ExampleDataLoader.create_spec(
return process_model_service.get_process_model( process_model_id=process_model_id,
process_model_id, group_id=process_group_id display_name=process_model_id,
) bpmn_file_name=bpmn_file_name,
except ProcessEntityNotFoundError: process_model_source_directory=process_model_source_directory,
spec = ExampleDataLoader().create_spec( )
process_model_id=process_model_id, return spec
from_tests=True,
display_name=process_model_id,
process_group_id=process_group_id,
bpmn_file_name=bpmn_file_name,
process_model_source_directory=process_model_source_directory,
)
return spec
# def user_info_to_query_string(user_info, redirect_url):
# query_string_list = []
# items = user_info.items()
# for key, value in items:
# query_string_list.append('%s=%s' % (key, urllib.parse.quote(value)))
#
# query_string_list.append('redirect_url=%s' % redirect_url)
#
# return '?%s' % '&'.join(query_string_list)

View File

@ -19,20 +19,45 @@ class TestLoggingService(BaseTest):
"""Test_process_instance_run.""" """Test_process_instance_run."""
process_group_id = "test_logging_spiff_logger" process_group_id = "test_logging_spiff_logger"
process_model_id = "simple_script" process_model_id = "simple_script"
self.create_process_group(
client=client, user=with_super_admin_user, process_group_id=process_group_id
)
process_model_identifier = f"{process_group_id}/{process_model_id}"
# create the model
self.create_process_model_with_api(
client=client,
process_model_id=process_model_identifier,
process_model_display_name="Simple Script",
process_model_description="Simple Script",
user=with_super_admin_user,
)
bpmn_file_name = "simple_script.bpmn"
bpmn_file_data_bytes = self.get_test_data_file_contents(
bpmn_file_name, "simple_script"
)
# add bpmn to the model
self.create_spec_file(
client=client,
process_model_id=process_model_identifier,
file_name=bpmn_file_name,
file_data=bpmn_file_data_bytes,
user=with_super_admin_user,
)
headers = self.logged_in_headers(with_super_admin_user) headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance( response = self.create_process_instance(
client, process_group_id, process_model_id, headers client, process_model_identifier, headers
) )
assert response.json is not None assert response.json is not None
process_instance_id = response.json["id"] process_instance_id = response.json["id"]
response = client.post( response = client.post(
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", f"/v1.0/process-instances/{process_instance_id}/run",
headers=headers, headers=headers,
) )
assert response.status_code == 200 assert response.status_code == 200
log_response = client.get( log_response = client.get(
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs", f"/v1.0/process-instances/{process_instance_id}/logs",
headers=headers, headers=headers,
) )
assert log_response.status_code == 200 assert log_response.status_code == 200

View File

@ -0,0 +1,174 @@
"""Test_nested_groups."""
import json
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.user import UserModel
class TestNestedGroups(BaseTest):
"""TestNestedGroups."""
def test_nested_groups(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_nested_groups."""
# /process-groups/{process_group_path}/show
target_uri = "/v1.0/process-groups/group_a,group_b"
user = self.find_or_create_user()
self.add_permissions_to_user(
user, target_uri=target_uri, permission_names=["read"]
)
response = client.get( # noqa: F841
target_uri, headers=self.logged_in_headers(user)
)
print("test_nested_groups")
def test_add_nested_group(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_add_nested_group."""
# user = self.find_or_create_user()
# self.add_permissions_to_user(
# user, target_uri=target_uri, permission_names=["read", "create"]
# )
process_group_a = ProcessGroup(
id="group_a",
display_name="Group A",
display_order=0,
admin=False,
)
response_a = client.post( # noqa: F841
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessGroupSchema().dump(process_group_a)),
)
process_group_b = ProcessGroup(
id="group_a/group_b",
display_name="Group B",
display_order=0,
admin=False,
)
response_b = client.post( # noqa: F841
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessGroupSchema().dump(process_group_b)),
)
process_group_c = ProcessGroup(
id="group_a/group_b/group_c",
display_name="Group C",
display_order=0,
admin=False,
)
response_c = client.post( # noqa: F841
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessGroupSchema().dump(process_group_c)),
)
print("test_add_nested_group")
def test_process_model_add(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_add."""
process_group_a = ProcessGroup(
id="group_a",
display_name="Group A",
display_order=0,
admin=False,
)
response_a = client.post( # noqa: F841
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessGroupSchema().dump(process_group_a)),
)
process_group_b = ProcessGroup(
id="group_a/group_b",
display_name="Group B",
display_order=0,
admin=False,
)
response_b = client.post( # noqa: F841
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessGroupSchema().dump(process_group_b)),
)
process_model = ProcessModelInfo(
id="process_model",
display_name="Process Model",
description="Process Model",
primary_file_name="primary_file.bpmn",
primary_process_id="primary_process_id",
display_order=0,
)
model_response = client.post( # noqa: F841
"v1.0/process-models",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessModelInfoSchema().dump(process_model)),
)
print("test_process_model_add")
def test_process_group_show(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_group_show."""
# target_uri = "/process-groups/{process_group_id}"
# user = self.find_or_create_user("testadmin1")
# self.add_permissions_to_user(
# user, target_uri="v1.0/process-groups", permission_names=["read", "create"]
# )
# self.add_permissions_to_user(
# user, target_uri="/process-groups/{process_group_id}", permission_names=["read", "create"]
# )
process_group_a = ProcessGroup(
id="group_a",
display_name="Group A",
display_order=0,
admin=False,
)
response_create_a = client.post( # noqa: F841
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessGroupSchema().dump(process_group_a)),
)
target_uri = "/v1.0/process-groups/group_a"
user = self.find_or_create_user()
self.add_permissions_to_user(
user, target_uri=target_uri, permission_names=["read"]
)
response = client.get( # noqa: F841
target_uri, headers=self.logged_in_headers(user)
)
print("test_process_group_show: ")

File diff suppressed because it is too large Load Diff

View File

@ -42,16 +42,18 @@ class SecretServiceTestHelpers(BaseTest):
self.test_process_group_id, self.test_process_group_id,
display_name=self.test_process_group_display_name, display_name=self.test_process_group_display_name,
) )
process_model_identifier = (
f"{self.test_process_group_id}/{self.test_process_model_id}"
)
self.create_process_model_with_api( self.create_process_model_with_api(
client, client,
process_group_id=self.test_process_group_id, process_model_id=process_model_identifier,
process_model_id=self.test_process_model_id,
process_model_display_name=self.test_process_model_display_name, process_model_display_name=self.test_process_model_display_name,
process_model_description=self.test_process_model_description, process_model_description=self.test_process_model_description,
user=user, user=user,
) )
process_model_info = ProcessModelService().get_process_model( process_model_info = ProcessModelService().get_process_model(
self.test_process_model_id, self.test_process_group_id process_model_identifier
) )
return process_model_info return process_model_info

View File

@ -1,10 +1,12 @@
"""Test_get_localtime.""" """Test_get_localtime."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
@ -17,7 +19,9 @@ class TestGetGroupMembers(BaseTest):
def test_can_get_members_of_a_group( def test_can_get_members_of_a_group(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_get_members_of_a_group.""" """Test_can_get_members_of_a_group."""
initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
@ -34,9 +38,13 @@ class TestGetGroupMembers(BaseTest):
UserService.add_user_to_group(testuser2, group_a) UserService.add_user_to_group(testuser2, group_a)
UserService.add_user_to_group(testuser3, group_b) UserService.add_user_to_group(testuser3, group_b)
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="get_group_members", process_model_id="test_group/get_group_members",
bpmn_file_name="get_group_members.bpmn", bpmn_file_name="get_group_members.bpmn",
process_model_source_directory="get_group_members",
) )
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user

View File

@ -49,8 +49,18 @@ class TestGetLocaltime(BaseTest):
) -> None: ) -> None:
"""Test_process_instance_run.""" """Test_process_instance_run."""
initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
self.add_permissions_to_user(
initiator_user,
target_uri="/v1.0/process-groups",
permission_names=["read", "create"],
)
self.create_process_group(
client=client, user=initiator_user, process_group_id="test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="get_localtime", bpmn_file_name="get_localtime.bpmn" process_model_id="test_group/get_localtime",
bpmn_file_name="get_localtime.bpmn",
process_model_source_directory="get_localtime",
) )
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user

View File

@ -1,9 +1,10 @@
"""Test_message_service.""" """Test_message_service."""
import pytest import pytest
from flask import Flask from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
@ -12,6 +13,7 @@ from spiffworkflow_backend.services.process_instance_processor import (
from spiffworkflow_backend.services.process_instance_service import ( from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService, ProcessInstanceService,
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestAuthorizationService(BaseTest): class TestAuthorizationService(BaseTest):
@ -89,7 +91,11 @@ class TestAuthorizationService(BaseTest):
) )
def test_user_can_be_added_to_active_task_on_first_login( def test_user_can_be_added_to_active_task_on_first_login(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_user_can_be_added_to_active_task_on_first_login.""" """Test_user_can_be_added_to_active_task_on_first_login."""
initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
@ -98,8 +104,17 @@ class TestAuthorizationService(BaseTest):
self.find_or_create_user("testuser1") self.find_or_create_user("testuser1")
AuthorizationService.import_permissions_from_yaml_file() AuthorizationService.import_permissions_from_yaml_file()
process_model = load_test_spec( process_model_identifier = self.basic_test_setup(
process_model_id="model_with_lanes", bpmn_file_name="lanes.bpmn" client=client,
user=with_super_admin_user,
process_group_id="test_group",
process_model_id="model_with_lanes",
bpmn_file_name="lanes.bpmn",
bpmn_file_location="model_with_lanes",
)
process_model = ProcessModelService().get_process_model(
process_model_id=process_model_identifier
) )
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user

View File

@ -1,8 +1,9 @@
"""Test_various_bpmn_constructs.""" """Test_various_bpmn_constructs."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
@ -15,21 +16,36 @@ class TestDotNotation(BaseTest):
"""TestVariousBpmnConstructs.""" """TestVariousBpmnConstructs."""
def test_dot_notation( def test_dot_notation(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_form_data_conversion_to_dot_dict.""" """Test_form_data_conversion_to_dot_dict."""
process_model = load_test_spec( process_group_id = "dot_notation_group"
"test_dot_notation", process_model_id = "test_dot_notation"
bpmn_file_name="diagram.bpmn", bpmn_file_name = "diagram.bpmn"
process_model_source_directory="dot_notation", bpmn_file_location = "dot_notation"
process_model_identifier = self.basic_test_setup(
client,
with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
) )
current_user = self.find_or_create_user()
process_instance = self.create_process_instance_from_process_model( headers = self.logged_in_headers(with_super_admin_user)
process_model response = self.create_process_instance(
client, process_model_identifier, headers
) )
process_instance_id = response.json["id"]
process_instance = ProcessInstanceService().get_process_instance(
process_instance_id
)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)
user_task = processor.get_ready_user_tasks()[0] user_task = processor.get_ready_user_tasks()[0]
@ -41,7 +57,7 @@ class TestDotNotation(BaseTest):
"invoice.dueDate": "09/30/2022", "invoice.dueDate": "09/30/2022",
} }
ProcessInstanceService.complete_form_task( ProcessInstanceService.complete_form_task(
processor, user_task, form_data, current_user processor, user_task, form_data, with_super_admin_user
) )
expected = { expected = {

View File

@ -1,24 +1,52 @@
"""Test_message_instance.""" """Test_message_instance."""
import pytest import pytest
from flask import Flask from flask import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_model import MessageModel from spiffworkflow_backend.models.message_model import MessageModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestMessageInstance(BaseTest): class TestMessageInstance(BaseTest):
"""TestMessageInstance.""" """TestMessageInstance."""
def setup_message_tests(self, client: FlaskClient, user: UserModel) -> str:
"""Setup_message_tests."""
process_group_id = "test_group"
process_model_id = "hello_world"
bpmn_file_name = "hello_world.bpmn"
bpmn_file_location = "hello_world"
process_model_identifier = self.basic_test_setup(
client,
user,
process_group_id=process_group_id,
process_model_id=process_model_id,
bpmn_file_name=bpmn_file_name,
bpmn_file_location=bpmn_file_location,
)
return process_model_identifier
def test_can_create_message_instance( def test_can_create_message_instance(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_create_message_instance.""" """Test_can_create_message_instance."""
message_model_identifier = "message_model_one" message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier) message_model = self.create_message_model(message_model_identifier)
process_model = load_test_spec("hello_world") process_model_identifier = self.setup_message_tests(
client, with_super_admin_user
)
process_model = ProcessModelService().get_process_model(
process_model_id=process_model_identifier
)
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model, "waiting" process_model, "waiting"
) )
@ -40,12 +68,22 @@ class TestMessageInstance(BaseTest):
assert queued_message_from_query is not None assert queued_message_from_query is not None
def test_cannot_set_invalid_status( def test_cannot_set_invalid_status(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_cannot_set_invalid_status.""" """Test_cannot_set_invalid_status."""
message_model_identifier = "message_model_one" message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier) message_model = self.create_message_model(message_model_identifier)
process_model = load_test_spec("hello_world") process_model_identifier = self.setup_message_tests(
client, with_super_admin_user
)
process_model = ProcessModelService().get_process_model(
process_model_id=process_model_identifier
)
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model, "waiting" process_model, "waiting"
) )
@ -76,12 +114,22 @@ class TestMessageInstance(BaseTest):
) )
def test_cannot_set_invalid_message_type( def test_cannot_set_invalid_message_type(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_cannot_set_invalid_message_type.""" """Test_cannot_set_invalid_message_type."""
message_model_identifier = "message_model_one" message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier) message_model = self.create_message_model(message_model_identifier)
process_model = load_test_spec("hello_world") process_model_identifier = self.setup_message_tests(
client, with_super_admin_user
)
process_model = ProcessModelService().get_process_model(
process_model_id=process_model_identifier
)
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model, "waiting" process_model, "waiting"
) )
@ -113,12 +161,22 @@ class TestMessageInstance(BaseTest):
) )
def test_force_failure_cause_if_status_is_failure( def test_force_failure_cause_if_status_is_failure(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_force_failure_cause_if_status_is_failure.""" """Test_force_failure_cause_if_status_is_failure."""
message_model_identifier = "message_model_one" message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier) message_model = self.create_message_model(message_model_identifier)
process_model = load_test_spec("hello_world") process_model_identifier = self.setup_message_tests(
client, with_super_admin_user
)
process_model = ProcessModelService().get_process_model(
process_model_id=process_model_identifier
)
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model, "waiting" process_model, "waiting"
) )
@ -154,7 +212,8 @@ class TestMessageInstance(BaseTest):
assert queued_message.id is not None assert queued_message.id is not None
assert queued_message.failure_cause == "THIS TEST FAILURE" assert queued_message.failure_cause == "THIS TEST FAILURE"
def create_message_model(self, message_model_identifier: str) -> MessageModel: @staticmethod
def create_message_model(message_model_identifier: str) -> MessageModel:
"""Create_message_model.""" """Create_message_model."""
message_model = MessageModel(identifier=message_model_identifier) message_model = MessageModel(identifier=message_model_identifier)
db.session.add(message_model) db.session.add(message_model)

View File

@ -1,5 +1,6 @@
"""Test_message_service.""" """Test_message_service."""
from flask import Flask from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -9,6 +10,7 @@ from spiffworkflow_backend.models.message_correlation_message_instance import (
) )
from spiffworkflow_backend.models.message_instance import MessageInstanceModel from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
@ -22,25 +24,32 @@ class TestMessageService(BaseTest):
"""TestMessageService.""" """TestMessageService."""
def test_can_send_message_to_waiting_message( def test_can_send_message_to_waiting_message(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_send_message_to_waiting_message.""" """Test_can_send_message_to_waiting_message."""
process_model_sender = load_test_spec( process_group_id = "test_group"
"message_sender", self.create_process_group(
process_model_source_directory="message_send_one_conversation", client, with_super_admin_user, process_group_id, process_group_id
bpmn_file_name="message_sender",
) )
load_test_spec( load_test_spec(
"message_receiver", "test_group/message_receiver",
process_model_source_directory="message_send_one_conversation", process_model_source_directory="message_send_one_conversation",
bpmn_file_name="message_receiver", bpmn_file_name="message_receiver.bpmn",
)
process_model_sender = load_test_spec(
"test_group/message_sender",
process_model_source_directory="message_send_one_conversation",
bpmn_file_name="message_sender.bpmn",
) )
user = self.find_or_create_user()
process_instance_sender = ProcessInstanceService.create_process_instance( process_instance_sender = ProcessInstanceService.create_process_instance(
process_model_sender.id, process_model_sender.id,
user, with_super_admin_user,
process_group_identifier=process_model_sender.process_group_id,
) )
processor_sender = ProcessInstanceProcessor(process_instance_sender) processor_sender = ProcessInstanceProcessor(process_instance_sender)
@ -115,21 +124,30 @@ class TestMessageService(BaseTest):
assert process_instance.status == "complete" assert process_instance.status == "complete"
def test_can_send_message_to_multiple_process_models( def test_can_send_message_to_multiple_process_models(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_send_message_to_multiple_process_models.""" """Test_can_send_message_to_multiple_process_models."""
process_group_id = "test_group"
self.create_process_group(
client, with_super_admin_user, process_group_id, process_group_id
)
process_model_sender = load_test_spec( process_model_sender = load_test_spec(
"message_sender", "test_group/message_sender",
process_model_source_directory="message_send_two_conversations", process_model_source_directory="message_send_two_conversations",
bpmn_file_name="message_sender", bpmn_file_name="message_sender",
) )
load_test_spec( load_test_spec(
"message_receiver_one", "test_group/message_receiver_one",
process_model_source_directory="message_send_two_conversations", process_model_source_directory="message_send_two_conversations",
bpmn_file_name="message_receiver_one", bpmn_file_name="message_receiver_one",
) )
load_test_spec( load_test_spec(
"message_receiver_two", "test_group/message_receiver_two",
process_model_source_directory="message_send_two_conversations", process_model_source_directory="message_send_two_conversations",
bpmn_file_name="message_receiver_two", bpmn_file_name="message_receiver_two",
) )
@ -139,7 +157,7 @@ class TestMessageService(BaseTest):
process_instance_sender = ProcessInstanceService.create_process_instance( process_instance_sender = ProcessInstanceService.create_process_instance(
process_model_sender.id, process_model_sender.id,
user, user,
process_group_identifier=process_model_sender.process_group_id, # process_group_identifier=process_model_sender.process_group_id,
) )
processor_sender = ProcessInstanceProcessor(process_instance_sender) processor_sender = ProcessInstanceProcessor(process_instance_sender)
@ -189,24 +207,24 @@ class TestMessageService(BaseTest):
assert len(process_instance_result) == 3 assert len(process_instance_result) == 3
process_instance_receiver_one = ProcessInstanceModel.query.filter_by( process_instance_receiver_one = ProcessInstanceModel.query.filter_by(
process_model_identifier="message_receiver_one" process_model_identifier="test_group/message_receiver_one"
).first() ).first()
assert process_instance_receiver_one is not None assert process_instance_receiver_one is not None
process_instance_receiver_two = ProcessInstanceModel.query.filter_by( process_instance_receiver_two = ProcessInstanceModel.query.filter_by(
process_model_identifier="message_receiver_two" process_model_identifier="test_group/message_receiver_two"
).first() ).first()
assert process_instance_receiver_two is not None assert process_instance_receiver_two is not None
# just make sure it's a different process instance # just make sure it's a different process instance
assert ( assert (
process_instance_receiver_one.process_model_identifier process_instance_receiver_one.process_model_identifier
== "message_receiver_one" == "test_group/message_receiver_one"
) )
assert process_instance_receiver_one.id != process_instance_sender.id assert process_instance_receiver_one.id != process_instance_sender.id
assert process_instance_receiver_one.status == "complete" assert process_instance_receiver_one.status == "complete"
assert ( assert (
process_instance_receiver_two.process_model_identifier process_instance_receiver_two.process_model_identifier
== "message_receiver_two" == "test_group/message_receiver_two"
) )
assert process_instance_receiver_two.id != process_instance_sender.id assert process_instance_receiver_two.id != process_instance_sender.id
assert process_instance_receiver_two.status == "complete" assert process_instance_receiver_two.status == "complete"

View File

@ -1,5 +1,6 @@
"""Test Permissions.""" """Test Permissions."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@ -8,6 +9,7 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
@ -22,13 +24,21 @@ class TestPermissions(BaseTest):
"""TestPermissions.""" """TestPermissions."""
def test_user_can_be_given_permission_to_administer_process_group( def test_user_can_be_given_permission_to_administer_process_group(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group.""" """Test_user_can_be_given_permission_to_administer_process_group."""
process_group_id = "group-a" process_group_id = "group-a"
self.create_process_group(
client, with_super_admin_user, process_group_id, process_group_id
)
load_test_spec( load_test_spec(
"timers_intermediate_catch_event", "group-a/timers_intermediate_catch_event",
process_group_id=process_group_id, bpmn_file_name="timers_intermediate_catch_event.bpmn",
process_model_source_directory="timers_intermediate_catch_event",
) )
dan = self.find_or_create_user() dan = self.find_or_create_user()
principal = dan.principal principal = dan.principal
@ -55,8 +65,9 @@ class TestPermissions(BaseTest):
process_group_b_id = process_group_ids[1] process_group_b_id = process_group_ids[1]
for process_group_id in process_group_ids: for process_group_id in process_group_ids:
load_test_spec( load_test_spec(
"timers_intermediate_catch_event", f"{process_group_id}/timers_intermediate_catch_event",
process_group_id=process_group_id, bpmn_file_name="timers_intermediate_catch_event",
process_model_source_directory="timers_intermediate_catch_event",
) )
group_a_admin = self.find_or_create_user() group_a_admin = self.find_or_create_user()
@ -86,11 +97,11 @@ class TestPermissions(BaseTest):
"""Test_user_can_be_granted_access_through_a_group.""" """Test_user_can_be_granted_access_through_a_group."""
process_group_ids = ["group-a", "group-b"] process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0] process_group_a_id = process_group_ids[0]
process_group_ids[1]
for process_group_id in process_group_ids: for process_group_id in process_group_ids:
load_test_spec( load_test_spec(
"timers_intermediate_catch_event", f"{process_group_id}/timers_intermediate_catch_event",
process_group_id=process_group_id, bpmn_file_name="timers_intermediate_catch_event.bpmn",
process_model_source_directory="timers_intermediate_catch_event",
) )
user = self.find_or_create_user() user = self.find_or_create_user()
group = GroupModel(identifier="groupA") group = GroupModel(identifier="groupA")
@ -127,8 +138,9 @@ class TestPermissions(BaseTest):
process_group_b_id = process_group_ids[1] process_group_b_id = process_group_ids[1]
for process_group_id in process_group_ids: for process_group_id in process_group_ids:
load_test_spec( load_test_spec(
"timers_intermediate_catch_event", f"{process_group_id}/timers_intermediate_catch_event",
process_group_id=process_group_id, bpmn_file_name="timers_intermediate_catch_event.bpmn",
process_model_source_directory="timers_intermediate_catch_event",
) )
group_a_admin = self.find_or_create_user() group_a_admin = self.find_or_create_user()

View File

@ -2,11 +2,13 @@
import pytest import pytest
from flask import g from flask import g
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.authorization_service import ( from spiffworkflow_backend.services.authorization_service import (
UserDoesNotHaveAccessToTaskError, UserDoesNotHaveAccessToTaskError,
@ -50,9 +52,14 @@ class TestProcessInstanceProcessor(BaseTest):
def test_sets_permission_correctly_on_active_task( def test_sets_permission_correctly_on_active_task(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_sets_permission_correctly_on_active_task.""" """Test_sets_permission_correctly_on_active_task."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
finance_user = self.find_or_create_user("testuser2") finance_user = self.find_or_create_user("testuser2")
assert initiator_user.principal is not None assert initiator_user.principal is not None
@ -63,7 +70,9 @@ class TestProcessInstanceProcessor(BaseTest):
assert finance_group is not None assert finance_group is not None
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="model_with_lanes", bpmn_file_name="lanes.bpmn" process_model_id="test_group/model_with_lanes",
bpmn_file_name="lanes.bpmn",
process_model_source_directory="model_with_lanes",
) )
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user
@ -123,9 +132,14 @@ class TestProcessInstanceProcessor(BaseTest):
def test_sets_permission_correctly_on_active_task_when_using_dict( def test_sets_permission_correctly_on_active_task_when_using_dict(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_sets_permission_correctly_on_active_task_when_using_dict.""" """Test_sets_permission_correctly_on_active_task_when_using_dict."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
initiator_user = self.find_or_create_user("initiator_user") initiator_user = self.find_or_create_user("initiator_user")
finance_user_three = self.find_or_create_user("testuser3") finance_user_three = self.find_or_create_user("testuser3")
finance_user_four = self.find_or_create_user("testuser4") finance_user_four = self.find_or_create_user("testuser4")
@ -138,8 +152,9 @@ class TestProcessInstanceProcessor(BaseTest):
assert finance_group is not None assert finance_group is not None
process_model = load_test_spec( process_model = load_test_spec(
process_model_id="model_with_lanes", process_model_id="test_group/model_with_lanes",
bpmn_file_name="lanes_with_owner_dict.bpmn", bpmn_file_name="lanes_with_owner_dict.bpmn",
process_model_source_directory="model_with_lanes",
) )
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user process_model=process_model, user=initiator_user

View File

@ -128,8 +128,6 @@ def do_report_with_metadata_and_instances(
"""Do_report_with_metadata_and_instances.""" """Do_report_with_metadata_and_instances."""
process_instance_report = ProcessInstanceReportModel.create_with_attributes( process_instance_report = ProcessInstanceReportModel.create_with_attributes(
identifier="sure", identifier="sure",
process_group_identifier=process_instances[0].process_group_identifier,
process_model_identifier=process_instances[0].process_model_identifier,
report_metadata=report_metadata, report_metadata=report_metadata,
user=BaseTest.find_or_create_user(), user=BaseTest.find_or_create_user(),
) )

View File

@ -1,11 +1,13 @@
"""Process Model.""" """Process Model."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
@ -22,11 +24,19 @@ class TestProcessModel(BaseTest):
assert process_model_one.files == [] assert process_model_one.files == []
def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory( def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_run_process_model_with_call_activities.""" """Test_can_run_process_model_with_call_activities."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
"call_activity_test", "test_group/call_activity_test",
# bpmn_file_name="call_activity_test.bpmn",
process_model_source_directory="call_activity_same_directory", process_model_source_directory="call_activity_same_directory",
) )
@ -38,11 +48,18 @@ class TestProcessModel(BaseTest):
assert process_instance.status == "complete" assert process_instance.status == "complete"
def test_can_run_process_model_with_call_activities_when_not_in_same_directory( def test_can_run_process_model_with_call_activities_when_not_in_same_directory(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_run_process_model_with_call_activities.""" """Test_can_run_process_model_with_call_activities."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
"call_activity_nested", "test_group/call_activity_nested",
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested", bpmn_file_name="call_activity_nested",
) )
@ -54,7 +71,7 @@ class TestProcessModel(BaseTest):
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
bpmn_file_name, f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
@ -66,11 +83,18 @@ class TestProcessModel(BaseTest):
assert process_instance.status == "complete" assert process_instance.status == "complete"
def test_can_run_process_model_with_call_activities_when_process_identifier_is_not_in_database( def test_can_run_process_model_with_call_activities_when_process_identifier_is_not_in_database(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_run_process_model_with_call_activities.""" """Test_can_run_process_model_with_call_activities."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
"call_activity_nested", "test_group/call_activity_nested",
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested", bpmn_file_name="call_activity_nested",
) )
@ -82,7 +106,7 @@ class TestProcessModel(BaseTest):
] ]
for bpmn_file_name in bpmn_file_names: for bpmn_file_name in bpmn_file_names:
load_test_spec( load_test_spec(
bpmn_file_name, f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested", process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name, bpmn_file_name=bpmn_file_name,
) )
@ -93,6 +117,7 @@ class TestProcessModel(BaseTest):
# delete all of the id lookup items to force to processor to find the correct # delete all of the id lookup items to force to processor to find the correct
# process model when running the process # process model when running the process
db.session.query(BpmnProcessIdLookup).delete() db.session.query(BpmnProcessIdLookup).delete()
db.session.commit()
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True) processor.do_engine_steps(save=True)
assert process_instance.status == "complete" assert process_instance.status == "complete"

View File

@ -1,8 +1,10 @@
"""Test_process_model_service.""" """Test_process_model_service."""
from flask import Flask from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
@ -10,11 +12,22 @@ class TestProcessModelService(BaseTest):
"""TestProcessModelService.""" """TestProcessModelService."""
def test_can_update_specified_attributes( def test_can_update_specified_attributes(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_update_specified_attributes.""" """Test_can_update_specified_attributes."""
process_model = load_test_spec("hello_world") self.create_process_group(
assert process_model.display_name == "hello_world" client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec(
"test_group/hello_world",
bpmn_file_name="hello_world.bpmn",
process_model_source_directory="hello_world",
)
assert process_model.display_name == "test_group/hello_world"
primary_process_id = process_model.primary_process_id primary_process_id = process_model.primary_process_id
assert primary_process_id == "Process_HelloWorld" assert primary_process_id == "Process_HelloWorld"

View File

@ -1,10 +1,12 @@
"""Test_various_bpmn_constructs.""" """Test_various_bpmn_constructs."""
import pytest import pytest
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from flask_bpmn.api.api_error import ApiError from flask_bpmn.api.api_error import ApiError
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
@ -14,11 +16,18 @@ class TestOpenFile(BaseTest):
"""TestVariousBpmnConstructs.""" """TestVariousBpmnConstructs."""
def test_dot_notation( def test_dot_notation(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_form_data_conversion_to_dot_dict.""" """Test_form_data_conversion_to_dot_dict."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
"dangerous", "test_group/dangerous",
bpmn_file_name="read_etc_passwd.bpmn", bpmn_file_name="read_etc_passwd.bpmn",
process_model_source_directory="dangerous-scripts", process_model_source_directory="dangerous-scripts",
) )
@ -38,11 +47,18 @@ class TestImportModule(BaseTest):
"""TestVariousBpmnConstructs.""" """TestVariousBpmnConstructs."""
def test_dot_notation( def test_dot_notation(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_form_data_conversion_to_dot_dict.""" """Test_form_data_conversion_to_dot_dict."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec( process_model = load_test_spec(
"dangerous", "test_group/dangerous",
bpmn_file_name="read_env.bpmn", bpmn_file_name="read_env.bpmn",
process_model_source_directory="dangerous-scripts", process_model_source_directory="dangerous-scripts",
) )

View File

@ -1,8 +1,10 @@
"""Test Permissions.""" """Test Permissions."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
@ -16,21 +18,31 @@ class TestScriptUnitTestRunner(BaseTest):
def test_takes_data_and_returns_expected_result( def test_takes_data_and_returns_expected_result(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_takes_data_and_returns_expected_result.""" """Test_takes_data_and_returns_expected_result."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "test_logging_spiff_logger" process_group_id = "test_logging_spiff_logger"
self.create_process_group(
client, with_super_admin_user, process_group_id, process_group_id
)
process_model_id = "simple_script" process_model_id = "simple_script"
load_test_spec(process_model_id, process_group_id=process_group_id) process_model_identifier = f"{process_group_id}/{process_model_id}"
load_test_spec(
process_model_identifier,
bpmn_file_name=process_model_id,
process_model_source_directory=process_model_id,
)
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
process_model_id, process_group_id process_model_identifier
) )
) )
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
"Activity_RunScript", bpmn_process_instance "Activity_CalculateNewData", bpmn_process_instance
) )
assert task is not None assert task is not None
@ -48,21 +60,32 @@ class TestScriptUnitTestRunner(BaseTest):
def test_fails_when_expected_output_does_not_match_actual_output( def test_fails_when_expected_output_does_not_match_actual_output(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_fails_when_expected_output_does_not_match_actual_output.""" """Test_fails_when_expected_output_does_not_match_actual_output."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "test_logging_spiff_logger" process_group_id = "test_logging_spiff_logger"
self.create_process_group(
client, with_super_admin_user, process_group_id, process_group_id
)
process_model_id = "simple_script" process_model_id = "simple_script"
load_test_spec(process_model_id, process_group_id=process_group_id) process_model_identifier = f"{process_group_id}/{process_model_id}"
load_test_spec(
process_model_identifier,
bpmn_file_name=process_model_id,
process_model_source_directory=process_model_id,
)
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
process_model_id, process_group_id process_model_identifier
) )
) )
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
"Activity_RunScript", bpmn_process_instance "Activity_CalculateNewData", bpmn_process_instance
) )
assert task is not None assert task is not None
@ -80,17 +103,28 @@ class TestScriptUnitTestRunner(BaseTest):
def test_script_with_unit_tests_when_hey_is_passed_in( def test_script_with_unit_tests_when_hey_is_passed_in(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_script_with_unit_tests_when_hey_is_passed_in.""" """Test_script_with_unit_tests_when_hey_is_passed_in."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "script_with_unit_tests" process_group_id = "script_with_unit_tests"
self.create_process_group(
client, with_super_admin_user, process_group_id, process_group_id
)
process_model_id = "script_with_unit_tests" process_model_id = "script_with_unit_tests"
load_test_spec(process_model_id, process_group_id=process_group_id) process_model_identifier = f"{process_group_id}/{process_model_id}"
load_test_spec(
process_model_identifier,
bpmn_file_name=process_model_id,
process_model_source_directory=process_model_id,
)
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
process_model_id, process_group_id process_model_identifier
) )
) )
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
@ -110,17 +144,29 @@ class TestScriptUnitTestRunner(BaseTest):
def test_script_with_unit_tests_when_hey_is_not_passed_in( def test_script_with_unit_tests_when_hey_is_not_passed_in(
self, self,
app: Flask, app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_script_with_unit_tests_when_hey_is_not_passed_in.""" """Test_script_with_unit_tests_when_hey_is_not_passed_in."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "script_with_unit_tests" process_group_id = "script_with_unit_tests"
self.create_process_group(
client, with_super_admin_user, process_group_id, process_group_id
)
process_model_id = "script_with_unit_tests" process_model_id = "script_with_unit_tests"
load_test_spec(process_model_id, process_group_id=process_group_id) process_model_identifier = f"{process_group_id}/{process_model_id}"
load_test_spec(
process_model_identifier,
bpmn_file_name=process_model_id,
process_model_source_directory=process_model_id,
)
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
process_model_id, process_group_id process_model_identifier
) )
) )
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(

View File

@ -3,6 +3,7 @@ import os
import pytest import pytest
from flask import Flask from flask import Flask
from flask.testing import FlaskClient
from flask_bpmn.api.api_error import ApiError from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db from flask_bpmn.models.db import db
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
@ -10,6 +11,7 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
@ -17,18 +19,29 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
class TestSpecFileService(BaseTest): class TestSpecFileService(BaseTest):
"""TestSpecFileService.""" """TestSpecFileService."""
process_group_id = "test_process_group_id"
process_model_id = "call_activity_nested"
bpmn_file_name = "call_activity_nested.bpmn"
call_activity_nested_relative_file_path = os.path.join( call_activity_nested_relative_file_path = os.path.join(
"test_process_group_id", "call_activity_nested", "call_activity_nested.bpmn" process_group_id, process_model_id, bpmn_file_name
) )
def test_can_store_process_ids_for_lookup( def test_can_store_process_ids_for_lookup(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_can_store_process_ids_for_lookup.""" """Test_can_store_process_ids_for_lookup."""
load_test_spec( self.basic_test_setup(
"call_activity_nested", client=client,
process_model_source_directory="call_activity_nested", user=with_super_admin_user,
bpmn_file_name="call_activity_nested", process_group_id=self.process_group_id,
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
bpmn_file_location="call_activity_nested",
) )
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() bpmn_process_id_lookups = BpmnProcessIdLookup.query.all()
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
@ -39,14 +52,21 @@ class TestSpecFileService(BaseTest):
) )
def test_fails_to_save_duplicate_process_id( def test_fails_to_save_duplicate_process_id(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_fails_to_save_duplicate_process_id.""" """Test_fails_to_save_duplicate_process_id."""
bpmn_process_identifier = "Level1" bpmn_process_identifier = "Level1"
load_test_spec( self.basic_test_setup(
"call_activity_nested", client=client,
process_model_source_directory="call_activity_nested", user=with_super_admin_user,
bpmn_file_name="call_activity_nested", process_group_id=self.process_group_id,
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
bpmn_file_location=self.process_model_id,
) )
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() bpmn_process_id_lookups = BpmnProcessIdLookup.query.all()
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
@ -69,25 +89,30 @@ class TestSpecFileService(BaseTest):
) )
def test_updates_relative_file_path_when_appropriate( def test_updates_relative_file_path_when_appropriate(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_updates_relative_file_path_when_appropriate.""" """Test_updates_relative_file_path_when_appropriate."""
bpmn_process_identifier = "Level1" bpmn_process_identifier = "Level1"
bpmn_file_relative_path = os.path.join(
"test_process_group_id", "call_activity_nested", "new_bpmn_file.bpmn"
)
process_id_lookup = BpmnProcessIdLookup( process_id_lookup = BpmnProcessIdLookup(
bpmn_process_identifier=bpmn_process_identifier, bpmn_process_identifier=bpmn_process_identifier,
bpmn_file_relative_path=bpmn_file_relative_path, bpmn_file_relative_path=self.call_activity_nested_relative_file_path,
) )
db.session.add(process_id_lookup) db.session.add(process_id_lookup)
db.session.commit() db.session.commit()
load_test_spec( self.basic_test_setup(
"call_activity_nested", client=client,
process_model_source_directory="call_activity_nested", user=with_super_admin_user,
bpmn_file_name="call_activity_nested", process_group_id=self.process_group_id,
process_model_id=self.process_model_id,
bpmn_file_name=self.bpmn_file_name,
bpmn_file_location=self.process_model_id,
) )
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() bpmn_process_id_lookups = BpmnProcessIdLookup.query.all()
assert len(bpmn_process_id_lookups) == 1 assert len(bpmn_process_id_lookups) == 1
assert ( assert (
@ -100,7 +125,11 @@ class TestSpecFileService(BaseTest):
) )
def test_load_reference_information( def test_load_reference_information(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_load_reference_information. """Test_load_reference_information.
@ -113,12 +142,22 @@ class TestSpecFileService(BaseTest):
a DMN file can (theoretically) contain many decisions. So this a DMN file can (theoretically) contain many decisions. So this
is an array. is an array.
""" """
load_test_spec( process_group_id = "test_group"
"call_activity_nested", process_model_id = "call_activity_nested"
process_model_source_directory="call_activity_nested", process_model_identifier = self.basic_test_setup(
client=client,
user=with_super_admin_user,
process_group_id=process_group_id,
process_model_id=process_model_id,
# bpmn_file_name=bpmn_file_name,
bpmn_file_location=process_model_id,
) )
# load_test_spec(
# ,
# process_model_source_directory="call_activity_nested",
# )
process_model_info = ProcessModelService().get_process_model( process_model_info = ProcessModelService().get_process_model(
"call_activity_nested" process_model_identifier
) )
files = SpecFileService.get_files(process_model_info) files = SpecFileService.get_files(process_model_info)

View File

@ -1,23 +1,35 @@
"""Test_various_bpmn_constructs.""" """Test_various_bpmn_constructs."""
from flask.app import Flask from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import ( from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor, ProcessInstanceProcessor,
) )
from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestVariousBpmnConstructs(BaseTest): class TestVariousBpmnConstructs(BaseTest):
"""TestVariousBpmnConstructs.""" """TestVariousBpmnConstructs."""
def test_running_process_with_timer_intermediate_catch_event( def test_running_process_with_timer_intermediate_catch_event(
self, app: Flask, with_db_and_bpmn_file_cleanup: None self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test_running_process_with_timer_intermediate_catch_event.""" """Test_running_process_with_timer_intermediate_catch_event."""
process_model = load_test_spec( process_model_identifier = self.basic_test_setup(
"timers_intermediate_catch_event", client,
process_model_source_directory="timer_intermediate_catch_event", with_super_admin_user,
"test_group",
"timer_intermediate_catch_event",
)
process_model = ProcessModelService().get_process_model(
process_model_id=process_model_identifier
) )
process_instance = self.create_process_instance_from_process_model( process_instance = self.create_process_instance_from_process_model(