diff --git a/.tool-versions b/.tool-versions
index 7e78d9af5..a7b6ef2e7 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -1 +1 @@
-python 3.10.4
+python 3.11.0
diff --git a/Dockerfile b/Dockerfile
index 6c3fabcf6..6c533206f 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM ghcr.io/sartography/python:3.10
+FROM ghcr.io/sartography/python:3.11
RUN pip install poetry
RUN useradd _gunicorn --no-create-home --user-group
diff --git a/bin/update_all_json.py b/bin/update_all_json.py
new file mode 100644
index 000000000..4e6b1b8aa
--- /dev/null
+++ b/bin/update_all_json.py
@@ -0,0 +1,22 @@
+"""Updates all JSON files, based on the current state of BPMN_SPEC_ABSOLUTE_DIR."""
+from spiffworkflow_backend import get_hacked_up_app_for_script
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
+
+
+def main() -> None:
+ """Main."""
+ app = get_hacked_up_app_for_script()
+ with app.app_context():
+
+ groups = ProcessModelService().get_process_groups()
+ for group in groups:
+ for process_model in group.process_models:
+ update_items = {
+ "process_group_id": "",
+ "id": f"{group.id}/{process_model.id}",
+ }
+ ProcessModelService().update_spec(process_model, update_items)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/conftest.py b/conftest.py
index 87da7c8d9..d73693c23 100644
--- a/conftest.py
+++ b/conftest.py
@@ -4,10 +4,10 @@ import shutil
import pytest
from flask.app import Flask
+from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
-from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
@@ -20,6 +20,8 @@ from spiffworkflow_backend.services.process_instance_service import (
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
+# from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+
# We need to call this before importing spiffworkflow_backend
# otherwise typeguard cannot work. hence the noqa: E402
@@ -66,17 +68,37 @@ def with_super_admin_user() -> UserModel:
@pytest.fixture()
-def setup_process_instances_for_reports() -> list[ProcessInstanceModel]:
+def setup_process_instances_for_reports(
+ client: FlaskClient, with_super_admin_user: UserModel
+) -> list[ProcessInstanceModel]:
"""Setup_process_instances_for_reports."""
- user = BaseTest.find_or_create_user()
+ user = with_super_admin_user
process_group_id = "runs_without_input"
process_model_id = "sample"
- load_test_spec(process_group_id=process_group_id, process_model_id=process_model_id)
+ # bpmn_file_name = "sample.bpmn"
+ bpmn_file_location = "sample"
+ process_model_identifier = BaseTest().basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ # bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ # BaseTest().create_process_group(
+ # client=client, user=user, process_group_id=process_group_id, display_name=process_group_id
+ # )
+ # process_model_id = "runs_without_input/sample"
+ # load_test_spec(
+ # process_model_id=f"{process_group_id}/{process_model_id}",
+ # process_model_source_directory="sample"
+ # )
process_instances = []
for data in [kay(), ray(), jay()]:
process_instance = ProcessInstanceService.create_process_instance(
- process_group_identifier=process_group_id,
- process_model_identifier=process_model_id,
+ # process_group_identifier=process_group_id,
+ process_model_identifier=process_model_identifier,
user=user,
)
processor = ProcessInstanceProcessor(process_instance)
diff --git a/migrations/versions/b1647eff45c9_.py b/migrations/versions/50dd2e016d94_.py
similarity index 94%
rename from migrations/versions/b1647eff45c9_.py
rename to migrations/versions/50dd2e016d94_.py
index d6ff25e3b..a702c5a4b 100644
--- a/migrations/versions/b1647eff45c9_.py
+++ b/migrations/versions/50dd2e016d94_.py
@@ -1,8 +1,8 @@
"""empty message
-Revision ID: b1647eff45c9
+Revision ID: 50dd2e016d94
Revises:
-Create Date: 2022-11-02 14:25:09.992800
+Create Date: 2022-11-08 16:28:18.991635
"""
from alembic import op
@@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
-revision = 'b1647eff45c9'
+revision = '50dd2e016d94'
down_revision = None
branch_labels = None
depends_on = None
@@ -95,7 +95,7 @@ def upgrade():
)
op.create_table('process_instance',
sa.Column('id', sa.Integer(), nullable=False),
- sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
+ sa.Column('process_model_identifier', sa.String(length=255), nullable=False),
sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('process_initiator_id', sa.Integer(), nullable=False),
sa.Column('bpmn_json', sa.JSON(), nullable=True),
@@ -115,19 +115,16 @@ def upgrade():
op.create_table('process_instance_report',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('identifier', sa.String(length=50), nullable=False),
- sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
- sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
sa.Column('report_metadata', sa.JSON(), nullable=True),
sa.Column('created_by_id', sa.Integer(), nullable=False),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id'),
- sa.UniqueConstraint('process_group_identifier', 'process_model_identifier', 'identifier', name='process_instance_report_unique')
+ sa.UniqueConstraint('created_by_id', 'identifier', name='process_instance_report_unique')
)
+ op.create_index(op.f('ix_process_instance_report_created_by_id'), 'process_instance_report', ['created_by_id'], unique=False)
op.create_index(op.f('ix_process_instance_report_identifier'), 'process_instance_report', ['identifier'], unique=False)
- op.create_index(op.f('ix_process_instance_report_process_group_identifier'), 'process_instance_report', ['process_group_identifier'], unique=False)
- op.create_index(op.f('ix_process_instance_report_process_model_identifier'), 'process_instance_report', ['process_model_identifier'], unique=False)
op.create_table('refresh_token',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
@@ -292,9 +289,8 @@ def downgrade():
op.drop_table('user_group_assignment')
op.drop_table('secret')
op.drop_table('refresh_token')
- op.drop_index(op.f('ix_process_instance_report_process_model_identifier'), table_name='process_instance_report')
- op.drop_index(op.f('ix_process_instance_report_process_group_identifier'), table_name='process_instance_report')
op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report')
+ op.drop_index(op.f('ix_process_instance_report_created_by_id'), table_name='process_instance_report')
op.drop_table('process_instance_report')
op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance')
op.drop_index(op.f('ix_process_instance_process_group_identifier'), table_name='process_instance')
diff --git a/poetry.lock b/poetry.lock
index 8c9186309..cf161845e 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -80,8 +80,7 @@ python-versions = ">=3.7.2"
[package.dependencies]
lazy-object-proxy = ">=1.4.0"
-typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
-wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""}
+wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}
[[package]]
name = "attrs"
@@ -95,7 +94,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
-tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
+tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]]
name = "Babel"
@@ -175,8 +174,6 @@ click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
pathspec = ">=0.9.0"
platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
-typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
@@ -268,7 +265,7 @@ optional = false
python-versions = ">=3.6.0"
[package.extras]
-unicode_backport = ["unicodedata2"]
+unicode-backport = ["unicodedata2"]
[[package]]
name = "classify-imports"
@@ -394,9 +391,6 @@ category = "dev"
optional = false
python-versions = ">=3.7"
-[package.dependencies]
-tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
-
[package.extras]
toml = ["tomli"]
@@ -574,7 +568,6 @@ python-versions = ">=3.7"
[package.dependencies]
click = ">=8.0"
-importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
itsdangerous = ">=2.0"
Jinja2 = ">=3.0"
Werkzeug = ">=2.2.2"
@@ -639,7 +632,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
-resolved_reference = "191f0f32798720c9ce1e5307732c90ac26433298"
+resolved_reference = "17434e0907cc35914d013614bb79288eed1bd437"
[[package]]
name = "Flask-Cors"
@@ -818,22 +811,6 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
-[[package]]
-name = "importlib-metadata"
-version = "4.13.0"
-description = "Read metadata from Python packages"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.dependencies]
-zipp = ">=0.5"
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
-perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
-
[[package]]
name = "inflection"
version = "0.5.1"
@@ -1067,7 +1044,6 @@ python-versions = ">=3.7"
[package.dependencies]
mypy-extensions = ">=0.4.3"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=3.10"
[package.extras]
@@ -1206,7 +1182,6 @@ python-versions = ">=3.7"
[package.dependencies]
"ruamel.yaml" = ">=0.15"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
[[package]]
name = "prompt-toolkit"
@@ -1512,7 +1487,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
-use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
+use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-toolbelt"
@@ -1526,12 +1501,12 @@ python-versions = "*"
requests = ">=2.0.1,<3.0.0"
[[package]]
-name = "RestrictedPython"
-version = "5.2"
+name = "restrictedpython"
+version = "6.0"
description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment."
category = "main"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <3.11"
+python-versions = ">=3.6, <3.12"
[package.extras]
docs = ["Sphinx", "sphinx-rtd-theme"]
@@ -1567,21 +1542,10 @@ category = "dev"
optional = false
python-versions = ">=3"
-[package.dependencies]
-"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""}
-
[package.extras]
docs = ["ryd"]
jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
-[[package]]
-name = "ruamel.yaml.clib"
-version = "0.2.6"
-description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-
[[package]]
name = "safety"
version = "2.3.1"
@@ -1625,7 +1589,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"]
-pure_eval = ["asttokens", "executing", "pure-eval"]
+pure-eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"]
@@ -1701,7 +1665,6 @@ babel = ">=2.9"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
docutils = ">=0.14,<0.20"
imagesize = ">=1.3"
-importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""}
Jinja2 = ">=3.0"
packaging = ">=21.0"
Pygments = ">=2.12"
@@ -1889,19 +1852,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
-mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
+mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"]
-mssql_pymssql = ["pymssql"]
-mssql_pyodbc = ["pyodbc"]
+mssql-pymssql = ["pymssql"]
+mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
-mysql_connector = ["mysql-connector-python"]
+mysql-connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"]
-postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
-postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
-postgresql_psycopg2binary = ["psycopg2-binary"]
-postgresql_psycopg2cffi = ["psycopg2cffi"]
+postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
+postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
+postgresql-psycopg2binary = ["psycopg2-binary"]
+postgresql-psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"]
@@ -2239,22 +2202,10 @@ runtime-strict = ["six (==1.11.0)"]
tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"]
tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"]
-[[package]]
-name = "zipp"
-version = "3.9.0"
-description = "Backport of pathlib-compatible object wrapper for zip files"
-category = "main"
-optional = false
-python-versions = ">=3.7"
-
-[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
-testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
-
[metadata]
lock-version = "1.1"
-python-versions = ">=3.9,<3.11"
-content-hash = "995be3a9a60b515b281f017ff32ff27a52ca178b1980611b348dccac6afb6b89"
+python-versions = ">=3.11,<3.12"
+content-hash = "1ba9277969015f0ef348dccb79e9977e20665720958f7ba22360398fba9da092"
[metadata.files]
alabaster = [
@@ -2636,10 +2587,6 @@ imagesize = [
{file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
]
-importlib-metadata = [
- {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
- {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
-]
inflection = [
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
{file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"},
@@ -3287,9 +3234,9 @@ requests-toolbelt = [
{file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
{file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
]
-RestrictedPython = [
- {file = "RestrictedPython-5.2-py2.py3-none-any.whl", hash = "sha256:fdf8621034c5dcb990a2a198f232f66b2d48866dd16d848e00ac7d187ae452ba"},
- {file = "RestrictedPython-5.2.tar.gz", hash = "sha256:634da1f6c5c122a262f433b083ee3d17a9a039f8f1b3778597efb47461cd361b"},
+restrictedpython = [
+ {file = "RestrictedPython-6.0-py3-none-any.whl", hash = "sha256:3479303f7bff48a7dedad76f96e7704993c5e86c5adbd67f607295d5352f0fb8"},
+ {file = "RestrictedPython-6.0.tar.gz", hash = "sha256:405cf0bd9eec2f19b1326b5f48228efe56d6590b4e91826b8cc3b2cd400a96ad"},
]
restructuredtext-lint = [
{file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"},
@@ -3302,38 +3249,6 @@ rsa = [
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
]
-"ruamel.yaml.clib" = [
- {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"},
- {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"},
- {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"},
- {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"},
- {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"},
- {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"},
- {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"},
- {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"},
- {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"},
- {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"},
- {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"},
- {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"},
- {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"},
- {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"},
- {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"},
- {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"},
- {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"},
- {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"},
- {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"},
- {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"},
- {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"},
- {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"},
- {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"},
- {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"},
- {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"},
- {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"},
- {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"},
- {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"},
- {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"},
- {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"},
-]
safety = [
{file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"},
{file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"},
@@ -3705,7 +3620,3 @@ xdoctest = [
{file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"},
{file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"},
]
-zipp = [
- {file = "zipp-3.9.0-py3-none-any.whl", hash = "sha256:972cfa31bc2fedd3fa838a51e9bc7e64b7fb725a8c00e7431554311f180e9980"},
- {file = "zipp-3.9.0.tar.gz", hash = "sha256:3a7af91c3db40ec72dd9d154ae18e008c69efe8ca88dde4f9a731bb82fe2f9eb"},
-]
diff --git a/pyproject.toml b/pyproject.toml
index 7f2d09a40..abb6d175f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -16,7 +16,7 @@ classifiers = [
Changelog = "https://github.com/sartography/spiffworkflow-backend/releases"
[tool.poetry.dependencies]
-python = ">=3.9,<3.11"
+python = ">=3.11,<3.12"
click = "^8.0.1"
flask = "2.2.2"
flask-admin = "*"
@@ -47,7 +47,7 @@ gunicorn = "^20.1.0"
python-keycloak = "^2.5.0"
APScheduler = "^3.9.1"
Jinja2 = "^3.1.2"
-RestrictedPython = "^5.2"
+RestrictedPython = "^6.0"
Flask-SQLAlchemy = "^3"
orjson = "^3.8.0"
diff --git a/src/spiffworkflow_backend/api.yml b/src/spiffworkflow_backend/api.yml
index 1ef9cf674..7f2fbc91a 100755
--- a/src/spiffworkflow_backend/api.yml
+++ b/src/spiffworkflow_backend/api.yml
@@ -286,21 +286,14 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
- /process-models/{process_group_id}/{process_model_id}/files:
+ /process-models/{modified_process_model_id}/files:
parameters:
- - name: process_group_id
+ - name: modified_process_model_id
in: path
required: true
- description: The group containing the models we want to return
+ description: The process_model_id, modified to replace slashes (/)
schema:
type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model to validate.
- schema:
- type: string
- # add_file
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file
summary: Add a new workflow spec file
@@ -322,36 +315,15 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/File"
- # get:
- # operationId: spiffworkflow_backend.api.process_api_blueprint.get_files
- # summary: Provide a list of workflow spec files for the given workflow_spec_id. IMPORTANT, only includes metadata, not the file content.
- # tags:
- # - Process Model Files
- # responses:
- # '200':
- # description: An array of file descriptions (not the file content)
- # content:
- # application/json:
- # schema:
- # type: array
- # items:
- # $ref: "#/components/schemas/File"
- /process-models/{process_group_id}/{process_model_id}:
+ /process-models/{modified_process_model_identifier}:
parameters:
- - name: process_group_id
+ - name: modified_process_model_identifier
in: path
required: true
- description: The unique id of an existing process group
+ description: the modified process model id
schema:
type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model.
- schema:
- type: string
- # process_model_show
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show
summary: Returns a single process model
@@ -364,22 +336,9 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/ProcessModel"
- # process_model_delete
- delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete
- summary: Removes an existing process model
- tags:
- - Process Models
- responses:
- "200":
- description: The process model has been removed.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/OkTrue"
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update
- summary: Modifies an existing process mosel with the given parameters.
+ summary: Modifies an existing process model with the given parameters.
tags:
- Process Models
requestBody:
@@ -394,15 +353,21 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/ProcessModel"
+ delete:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete
+ summary: Removes an existing process model
+ tags:
+ - Process Models
+ responses:
+ "200":
+ description: The process model has been removed.
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OkTrue"
/process-instances:
parameters:
- - name: process_group_identifier
- in: query
- required: false
- description: The unique id of an existing process group
- schema:
- type: string
- name: process_model_identifier
in: query
required: false
@@ -548,15 +513,9 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
- /process-models/{process_group_id}/{process_model_id}/process-instances:
+ /process-models/{modified_process_model_id}/process-instances:
parameters:
- - name: process_group_id
- in: path
- required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
+ - name: modified_process_model_id
in: path
required: true
description: The unique id of an existing process model.
@@ -576,18 +535,33 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
- /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}:
+ /process-instances/{process_instance_id}:
parameters:
- - name: process_group_id
+ - name: process_instance_id
in: path
required: true
- description: The unique id of an existing process group
+ description: The unique id of an existing process instance.
schema:
- type: string
- - name: process_model_id
+ type: integer
+ delete:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
+ summary: Deletes a single process instance
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: The process instance was deleted.
+ content:
+ application/json:
+ schema:
+ $ref: "#/components/schemas/OkTrue"
+
+ /process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}:
+ parameters:
+ - name: modified_process_model_identifier
in: path
required: true
- description: The unique id of an existing process model.
+ description: The unique id of an existing process model
schema:
type: string
- name: process_instance_id
@@ -608,34 +582,9 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
- # process_instance_delete
- delete:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
- summary: Deletes a single process instance
- tags:
- - Process Instances
- responses:
- "200":
- description: The process instance was deleted.
- content:
- application/json:
- schema:
- $ref: "#/components/schemas/OkTrue"
- /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run:
+ /process-instances/{process_instance_id}/run:
parameters:
- - name: process_group_id
- in: path
- required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model.
- schema:
- type: string
- name: process_instance_id
in: path
required: true
@@ -662,20 +611,8 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
- /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/terminate:
+ /process-instances/{process_instance_id}/terminate:
parameters:
- - name: process_group_id
- in: path
- required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model.
- schema:
- type: string
- name: process_instance_id
in: path
required: true
@@ -695,20 +632,8 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
- /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/suspend:
+ /process-instances/{process_instance_id}/suspend:
parameters:
- - name: process_group_id
- in: path
- required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model.
- schema:
- type: string
- name: process_instance_id
in: path
required: true
@@ -728,20 +653,8 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
- /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/resume:
+ /process-instances/{process_instance_id}/resume:
parameters:
- - name: process_group_id
- in: path
- required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model.
- schema:
- type: string
- name: process_instance_id
in: path
required: true
@@ -761,6 +674,35 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
+ /process-instances/reports:
+ parameters:
+ - name: page
+ in: query
+ required: false
+ description: The page number to return. Defaults to page 1.
+ schema:
+ type: integer
+ - name: per_page
+ in: query
+ required: false
+ description: The page number to return. Defaults to page 1.
+ schema:
+ type: integer
+ get:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list
+ summary: Returns all process instance reports for process model
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Workflow.
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Workflow"
+
/process-models/{process_group_id}/{process_model_id}/process-instances/reports:
parameters:
- name: process_group_id
@@ -787,20 +729,6 @@ paths:
description: The page number to return. Defaults to page 1.
schema:
type: integer
- get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list
- summary: Returns all process instance reports for process model
- tags:
- - Process Instances
- responses:
- "200":
- description: Workflow.
- content:
- application/json:
- schema:
- type: array
- items:
- $ref: "#/components/schemas/Workflow"
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create
summary: Returns all process instance reports for process model
@@ -814,6 +742,41 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
+ /process-instances/reports/{report_identifier}:
+ parameters:
+ - name: report_identifier
+ in: path
+ required: true
+ description: The unique id of an existing report
+ schema:
+ type: string
+ - name: page
+ in: query
+ required: false
+ description: The page number to return. Defaults to page 1.
+ schema:
+ type: integer
+ - name: per_page
+ in: query
+ required: false
+ description: The page number to return. Defaults to page 1.
+ schema:
+ type: integer
+ get:
+ operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show
+ summary: Returns a report of process instances for a given process model
+ tags:
+ - Process Instances
+ responses:
+ "200":
+ description: Workflow.
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: "#/components/schemas/Workflow"
+
/process-models/{process_group_id}/{process_model_id}/process-instances/reports/{report_identifier}:
parameters:
- name: process_group_id
@@ -846,20 +809,6 @@ paths:
description: The page number to return. Defaults to page 1.
schema:
type: integer
- get:
- operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show
- summary: Returns a report of process instances for a given process model
- tags:
- - Process Instances
- responses:
- "200":
- description: Workflow.
- content:
- application/json:
- schema:
- type: array
- items:
- $ref: "#/components/schemas/Workflow"
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update
summary: Updates a process instance report
@@ -885,18 +834,12 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
- /process-models/{process_group_id}/{process_model_id}/files/{file_name}:
+ /process-models/{modified_process_model_id}/files/{file_name}:
parameters:
- - name: process_group_id
+ - name: modified_process_model_id
in: path
required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model to validate.
+ description: The modified process model id
schema:
type: string
- name: file_name
@@ -905,7 +848,6 @@ paths:
description: The id of the spec file
schema:
type: string
- # get_file
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file
summary: Returns metadata about the file
@@ -1203,20 +1145,8 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
- /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs:
+ /process-instances/{process_instance_id}/logs:
parameters:
- - name: process_group_id
- in: path
- required: true
- description: The unique id of an existing process group
- schema:
- type: string
- - name: process_model_id
- in: path
- required: true
- description: The unique id of an existing process model.
- schema:
- type: string
- name: process_instance_id
in: path
required: true
diff --git a/src/spiffworkflow_backend/config/permissions/testing.yml b/src/spiffworkflow_backend/config/permissions/testing.yml
index 333ced14b..c678205df 100644
--- a/src/spiffworkflow_backend/config/permissions/testing.yml
+++ b/src/spiffworkflow_backend/config/permissions/testing.yml
@@ -41,3 +41,15 @@ permissions:
users: [testuser4]
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-models/finance/*
+
+ finance-admin-model-lanes:
+ groups: ["Finance Team"]
+ users: [testuser4]
+ allowed_permissions: [create, read, update, delete]
+ uri: /v1.0/process-models/finance:model_with_lanes/*
+
+ finance-admin-instance-run:
+ groups: ["Finance Team"]
+ users: [testuser4]
+ allowed_permissions: [create, read, update, delete]
+ uri: /v1.0/process-instances/*
diff --git a/src/spiffworkflow_backend/config/testing.py b/src/spiffworkflow_backend/config/testing.py
index ce422587d..bbda9db9a 100644
--- a/src/spiffworkflow_backend/config/testing.py
+++ b/src/spiffworkflow_backend/config/testing.py
@@ -20,6 +20,9 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
# different places and this allows us to know exactly where we are at the start
BPMN_SPEC_ABSOLUTE_DIR = os.path.join(
os.path.dirname(__file__),
+ "..",
+ "..",
+ "..",
"tests",
"spiffworkflow_backend",
"files",
diff --git a/src/spiffworkflow_backend/models/process_instance.py b/src/spiffworkflow_backend/models/process_instance.py
index 50c3c9f76..8d4cb5387 100644
--- a/src/spiffworkflow_backend/models/process_instance.py
+++ b/src/spiffworkflow_backend/models/process_instance.py
@@ -72,7 +72,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance"
id: int = db.Column(db.Integer, primary_key=True)
- process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
+ process_model_identifier: str = db.Column(
+ db.String(255), nullable=False, index=True
+ )
process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True)
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
process_initiator = relationship("UserModel")
@@ -265,7 +267,7 @@ class ProcessInstanceMetadata:
id=process_instance.id,
display_name=process_model.display_name,
description=process_model.description,
- process_group_id=process_model.process_group_id,
+ process_group_id=process_model.process_group,
state_message=process_instance.state_message,
status=process_instance.status,
completed_tasks=process_instance.completed_tasks,
diff --git a/src/spiffworkflow_backend/models/process_instance_report.py b/src/spiffworkflow_backend/models/process_instance_report.py
index 8f8886bfb..b6f16288f 100644
--- a/src/spiffworkflow_backend/models/process_instance_report.py
+++ b/src/spiffworkflow_backend/models/process_instance_report.py
@@ -21,7 +21,6 @@ from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
-from spiffworkflow_backend.services.process_model_service import ProcessModelService
ReportMetadata = dict[str, Any]
@@ -58,8 +57,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance_report"
__table_args__ = (
db.UniqueConstraint(
- "process_group_identifier",
- "process_model_identifier",
+ "created_by_id",
"identifier",
name="process_instance_report_unique",
),
@@ -67,21 +65,53 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
id = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True)
- process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
- process_group_identifier = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore
- created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False)
+ created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
created_by = relationship("UserModel")
created_at_in_seconds = db.Column(db.Integer)
updated_at_in_seconds = db.Column(db.Integer)
+ @classmethod
+ def default_report(cls, user: UserModel) -> ProcessInstanceReportModel:
+ """Default_report."""
+ identifier = "default"
+ process_instance_report = ProcessInstanceReportModel.query.filter_by(
+ identifier=identifier, created_by_id=user.id
+ ).first()
+
+ if process_instance_report is None:
+ report_metadata = {
+ "columns": [
+ {"Header": "id", "accessor": "id"},
+ {
+ "Header": "process_group_identifier",
+ "accessor": "process_group_identifier",
+ },
+ {
+ "Header": "process_model_identifier",
+ "accessor": "process_model_identifier",
+ },
+ {"Header": "start_in_seconds", "accessor": "start_in_seconds"},
+ {"Header": "end_in_seconds", "accessor": "end_in_seconds"},
+ {"Header": "status", "accessor": "status"},
+ ],
+ }
+
+ process_instance_report = cls(
+ identifier=identifier,
+ created_by_id=user.id,
+ report_metadata=report_metadata,
+ )
+
+ return process_instance_report # type: ignore
+
@classmethod
def add_fixtures(cls) -> None:
"""Add_fixtures."""
try:
- process_model = ProcessModelService().get_process_model(
- group_id="sartography-admin", process_model_id="ticket"
- )
+ # process_model = ProcessModelService().get_process_model(
+ # process_model_id="sartography-admin/ticket"
+ # )
user = UserModel.query.first()
columns = [
{"Header": "id", "accessor": "id"},
@@ -96,29 +126,21 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
cls.create_report(
identifier="standard",
- process_group_identifier=process_model.process_group_id,
- process_model_identifier=process_model.id,
user=user,
report_metadata=json,
)
cls.create_report(
identifier="for-month",
- process_group_identifier="sartography-admin",
- process_model_identifier="ticket",
user=user,
report_metadata=cls.ticket_for_month_report(),
)
cls.create_report(
identifier="for-month-3",
- process_group_identifier="sartography-admin",
- process_model_identifier="ticket",
user=user,
report_metadata=cls.ticket_for_month_3_report(),
)
cls.create_report(
identifier="hot-report",
- process_group_identifier="category_number_one",
- process_model_identifier="process-model-with-form",
user=user,
report_metadata=cls.process_model_with_form_report_fixture(),
)
@@ -130,23 +152,18 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
def create_report(
cls,
identifier: str,
- process_group_identifier: str,
- process_model_identifier: str,
user: UserModel,
report_metadata: ReportMetadata,
) -> None:
"""Make_fixture_report."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=identifier,
- process_group_identifier=process_group_identifier,
- process_model_identifier=process_model_identifier,
+ created_by_id=user.id,
).first()
if process_instance_report is None:
process_instance_report = cls(
identifier=identifier,
- process_group_identifier=process_group_identifier,
- process_model_identifier=process_model_identifier,
created_by_id=user.id,
report_metadata=report_metadata,
)
@@ -217,19 +234,22 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
def create_with_attributes(
cls,
identifier: str,
- process_group_identifier: str,
- process_model_identifier: str,
report_metadata: dict,
user: UserModel,
) -> ProcessInstanceReportModel:
"""Create_with_attributes."""
- process_model = ProcessModelService().get_process_model(
- group_id=process_group_identifier, process_model_id=process_model_identifier
- )
+ # <<<<<<< HEAD
+ # process_model = ProcessModelService().get_process_model(
+ # process_model_id=f"{process_model_identifier}"
+ # )
+ # process_instance_report = cls(
+ # identifier=identifier,
+ # process_group_identifier="process_model.process_group_id",
+ # process_model_identifier=process_model.id,
+ # =======
process_instance_report = cls(
identifier=identifier,
- process_group_identifier=process_model.process_group_id,
- process_model_identifier=process_model.id,
+ # >>>>>>> main
created_by_id=user.id,
report_metadata=report_metadata,
)
diff --git a/src/spiffworkflow_backend/models/process_model.py b/src/spiffworkflow_backend/models/process_model.py
index 1928f3124..d63197be2 100644
--- a/src/spiffworkflow_backend/models/process_model.py
+++ b/src/spiffworkflow_backend/models/process_model.py
@@ -29,7 +29,7 @@ class ProcessModelInfo:
id: str
display_name: str
description: str
- process_group_id: str = ""
+ process_group: Any | None = None
primary_file_name: str | None = None
primary_process_id: str | None = None
display_order: int | None = 0
@@ -40,7 +40,7 @@ class ProcessModelInfo:
def __post_init__(self) -> None:
"""__post_init__."""
- self.sort_index = f"{self.process_group_id}:{self.id}"
+ self.sort_index = self.id
def __eq__(self, other: Any) -> bool:
"""__eq__."""
@@ -66,7 +66,6 @@ class ProcessModelInfoSchema(Schema):
primary_file_name = marshmallow.fields.String(allow_none=True)
primary_process_id = marshmallow.fields.String(allow_none=True)
is_review = marshmallow.fields.Boolean(allow_none=True)
- process_group_id = marshmallow.fields.String(allow_none=True)
files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema"))
fault_or_suspend_on_exception = marshmallow.fields.String()
exception_notification_addresses = marshmallow.fields.List(
diff --git a/src/spiffworkflow_backend/routes/process_api_blueprint.py b/src/spiffworkflow_backend/routes/process_api_blueprint.py
index f0b9065c7..2f60e994d 100644
--- a/src/spiffworkflow_backend/routes/process_api_blueprint.py
+++ b/src/spiffworkflow_backend/routes/process_api_blueprint.py
@@ -137,6 +137,16 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
return make_response(jsonify({"results": response_dict}), 200)
+def modify_process_model_id(process_model_id: str) -> str:
+ """Modify_process_model_id."""
+ return process_model_id.replace("/", ":")
+
+
+def un_modify_modified_process_model_id(modified_process_model_id: str) -> str:
+ """Un_modify_modified_process_model_id."""
+ return modified_process_model_id.replace(":", "/")
+
+
def process_group_add(body: dict) -> flask.wrappers.Response:
"""Add_process_group."""
process_model_service = ProcessModelService()
@@ -216,10 +226,9 @@ def process_model_add(
status_code=400,
)
+ process_group_id, _ = os.path.split(process_model_info.id)
process_model_service = ProcessModelService()
- process_group = process_model_service.get_process_group(
- process_model_info.process_group_id
- )
+ process_group = process_model_service.get_process_group(process_group_id)
if process_group is None:
raise ApiError(
error_code="process_model_could_not_be_created",
@@ -236,32 +245,45 @@ def process_model_add(
def process_model_delete(
- process_group_id: str, process_model_id: str
+ modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_model_delete."""
- ProcessModelService().process_model_delete(process_model_id)
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ # process_model_identifier = f"{process_group_id}/{process_model_id}"
+ ProcessModelService().process_model_delete(process_model_identifier)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def process_model_update(
- process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]]
+ modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
) -> Any:
"""Process_model_update."""
- body_include_list = ["display_name", "primary_file_name", "primary_process_id"]
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ body_include_list = [
+ "display_name",
+ "primary_file_name",
+ "primary_process_id",
+ "description",
+ ]
body_filtered = {
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
- process_model = get_process_model(process_model_id, process_group_id)
+ # process_model_identifier = f"{process_group_id}/{process_model_id}"
+ process_model = get_process_model(process_model_identifier)
ProcessModelService().update_spec(process_model, body_filtered)
return ProcessModelInfoSchema().dump(process_model)
-def process_model_show(process_group_id: str, process_model_id: str) -> Any:
+def process_model_show(modified_process_model_identifier: str) -> Any:
"""Process_model_show."""
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
+ # process_model_identifier = f"{process_group_id}/{process_model_id}"
+ process_model = get_process_model(process_model_identifier)
+ # TODO: Temporary. Should not need the next line once models have correct ids
+ # process_model.id = process_model_identifier
files = sorted(SpecFileService.get_files(process_model))
process_model.files = files
for file in process_model.files:
@@ -298,15 +320,16 @@ def process_model_list(
return Response(json.dumps(response_json), status=200, mimetype="application/json")
-def get_file(process_group_id: str, process_model_id: str, file_name: str) -> Any:
+def get_file(modified_process_model_id: str, file_name: str) -> Any:
"""Get_file."""
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model_identifier = modified_process_model_id.replace(":", "/")
+ process_model = get_process_model(process_model_identifier)
files = SpecFileService.get_files(process_model, file_name)
if len(files) == 0:
raise ApiError(
error_code="unknown file",
message=f"No information exists for file {file_name}"
- f" it does not exist in workflow {process_model_id}.",
+ f" it does not exist in workflow {process_model_identifier}.",
status_code=404,
)
@@ -314,15 +337,17 @@ def get_file(process_group_id: str, process_model_id: str, file_name: str) -> An
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
- file.process_group_id = process_model.process_group_id
+ # file.process_group_id = process_model.process_group_id
return FileSchema().dump(file)
def process_model_file_update(
- process_group_id: str, process_model_id: str, file_name: str
+ modified_process_model_id: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_update."""
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model_identifier = modified_process_model_id.replace(":", "/")
+ # process_model_identifier = f"{process_group_id}/{process_model_id}"
+ process_model = get_process_model(process_model_identifier)
request_file = get_file_from_request()
request_file_contents = request_file.stream.read()
@@ -337,7 +362,7 @@ def process_model_file_update(
if current_app.config["GIT_COMMIT_ON_SAVE"]:
git_output = GitService.commit(
- message=f"User: {g.user.username} clicked save for {process_group_id}/{process_model_id}/{file_name}"
+ message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}"
)
current_app.logger.info(f"git output: {git_output}")
else:
@@ -347,10 +372,11 @@ def process_model_file_update(
def process_model_file_delete(
- process_group_id: str, process_model_id: str, file_name: str
+ modified_process_model_id: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_delete."""
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model_identifier = modified_process_model_id.replace(":", "/")
+ process_model = get_process_model(process_model_identifier)
try:
SpecFileService.delete_file(process_model, file_name)
except FileNotFoundError as exception:
@@ -365,9 +391,10 @@ def process_model_file_delete(
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
-def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Response:
+def add_file(modified_process_model_id: str) -> flask.wrappers.Response:
"""Add_file."""
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model_identifier = modified_process_model_id.replace(":", "/")
+ process_model = get_process_model(process_model_identifier)
request_file = get_file_from_request()
if not request_file.filename:
raise ApiError(
@@ -382,18 +409,18 @@ def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Res
file_contents = SpecFileService.get_data(process_model, file.name)
file.file_contents = file_contents
file.process_model_id = process_model.id
- file.process_group_id = process_model.process_group_id
return Response(
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
)
-def process_instance_create(
- process_group_id: str, process_model_id: str
-) -> flask.wrappers.Response:
+def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response:
"""Create_process_instance."""
+ process_model_identifier = un_modify_modified_process_model_id(
+ modified_process_model_id
+ )
process_instance = ProcessInstanceService.create_process_instance(
- process_model_id, g.user, process_group_identifier=process_group_id
+ process_model_identifier, g.user
)
return Response(
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
@@ -403,8 +430,6 @@ def process_instance_create(
def process_instance_run(
- process_group_id: str,
- process_model_id: str,
process_instance_id: int,
do_engine_steps: bool = True,
) -> flask.wrappers.Response:
@@ -446,10 +471,7 @@ def process_instance_run(
def process_instance_terminate(
- process_group_id: str,
- process_model_id: str,
process_instance_id: int,
- do_engine_steps: bool = True,
) -> flask.wrappers.Response:
"""Process_instance_run."""
process_instance = ProcessInstanceService().get_process_instance(
@@ -461,8 +483,6 @@ def process_instance_terminate(
def process_instance_suspend(
- process_group_id: str,
- process_model_id: str,
process_instance_id: int,
) -> flask.wrappers.Response:
"""Process_instance_suspend."""
@@ -475,8 +495,6 @@ def process_instance_suspend(
def process_instance_resume(
- process_group_id: str,
- process_model_id: str,
process_instance_id: int,
) -> flask.wrappers.Response:
"""Process_instance_resume."""
@@ -489,8 +507,6 @@ def process_instance_resume(
def process_instance_log_list(
- process_group_id: str,
- process_model_id: str,
process_instance_id: int,
page: int = 1,
per_page: int = 100,
@@ -651,7 +667,6 @@ def message_start(
def process_instance_list(
- process_group_identifier: Optional[str] = None,
process_model_identifier: Optional[str] = None,
page: int = 1,
per_page: int = 100,
@@ -662,10 +677,11 @@ def process_instance_list(
process_status: Optional[str] = None,
) -> flask.wrappers.Response:
"""Process_instance_list."""
+ # process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier)
process_instance_query = ProcessInstanceModel.query
- if process_model_identifier is not None and process_group_identifier is not None:
+ if process_model_identifier is not None:
process_model = get_process_model(
- process_model_identifier, process_group_identifier
+ f"{process_model_identifier}",
)
process_instance_query = process_instance_query.filter_by(
@@ -711,10 +727,29 @@ def process_instance_list(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(page=page, per_page=per_page, error_out=False)
+ process_instance_report = ProcessInstanceReportModel.default_report(g.user)
+
+ # TODO need to look into this more - how the filter here interacts with the
+ # one defined in the report.
+ # TODO need to look into test failures when the results from result_dict is
+ # used instead of the process instances
+
+ # substitution_variables = request.args.to_dict()
+ # result_dict = process_instance_report.generate_report(
+ # process_instances.items, substitution_variables
+ # )
+
+ # results = result_dict["results"]
+ # report_metadata = result_dict["report_metadata"]
+
+ results = process_instances.items
+ report_metadata = process_instance_report.report_metadata
+
response_json = {
- "results": process_instances.items,
+ "report_metadata": report_metadata,
+ "results": results,
"pagination": {
- "count": len(process_instances.items),
+ "count": len(results),
"total": process_instances.total,
"pages": process_instances.pages,
},
@@ -724,12 +759,13 @@ def process_instance_list(
def process_instance_show(
- process_group_id: str, process_model_id: str, process_instance_id: int
+ modified_process_model_identifier: str, process_instance_id: int
) -> flask.wrappers.Response:
"""Create_process_instance."""
+ process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
current_version_control_revision = GitService.get_current_revision()
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model = get_process_model(process_model_identifier)
if process_model.primary_file_name:
if (
@@ -748,9 +784,7 @@ def process_instance_show(
return make_response(jsonify(process_instance), 200)
-def process_instance_delete(
- process_group_id: str, process_model_id: str, process_instance_id: int
-) -> flask.wrappers.Response:
+def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response:
"""Create_process_instance."""
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
@@ -762,27 +796,20 @@ def process_instance_delete(
def process_instance_report_list(
- process_group_id: str, process_model_id: str, page: int = 1, per_page: int = 100
+ page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Process_instance_report_list."""
- process_model = get_process_model(process_model_id, process_group_id)
-
process_instance_reports = ProcessInstanceReportModel.query.filter_by(
- process_group_identifier=process_group_id,
- process_model_identifier=process_model.id,
+ created_by_id=g.user.id,
).all()
return make_response(jsonify(process_instance_reports), 200)
-def process_instance_report_create(
- process_group_id: str, process_model_id: str, body: Dict[str, Any]
-) -> flask.wrappers.Response:
+def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
"""Process_instance_report_create."""
ProcessInstanceReportModel.create_report(
identifier=body["identifier"],
- process_group_identifier=process_group_id,
- process_model_identifier=process_model_id,
user=g.user,
report_metadata=body["report_metadata"],
)
@@ -791,16 +818,13 @@ def process_instance_report_create(
def process_instance_report_update(
- process_group_id: str,
- process_model_id: str,
report_identifier: str,
body: Dict[str, Any],
) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier,
- process_group_identifier=process_group_id,
- process_model_identifier=process_model_id,
+ created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
@@ -816,15 +840,12 @@ def process_instance_report_update(
def process_instance_report_delete(
- process_group_id: str,
- process_model_id: str,
report_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier,
- process_group_identifier=process_group_id,
- process_model_identifier=process_model_id,
+ created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
@@ -877,25 +898,20 @@ def authentication_callback(
def process_instance_report_show(
- process_group_id: str,
- process_model_id: str,
report_identifier: str,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response:
"""Process_instance_list."""
- process_model = get_process_model(process_model_id, process_group_id)
-
- process_instances = (
- ProcessInstanceModel.query.filter_by(process_model_identifier=process_model.id)
- .order_by(
- ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
- )
- .paginate(page=page, per_page=per_page, error_out=False)
+ process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id)
+ ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
+ ).paginate(
+ page=page, per_page=per_page, error_out=False
)
process_instance_report = ProcessInstanceReportModel.query.filter_by(
- identifier=report_identifier
+ identifier=report_identifier,
+ created_by_id=g.user.id,
).first()
if process_instance_report is None:
raise ApiError(
@@ -1007,7 +1023,6 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
process_model = get_process_model(
process_instance.process_model_identifier,
- process_instance.process_group_identifier,
)
form_schema_file_name = ""
@@ -1159,7 +1174,7 @@ def task_submit(
def script_unit_test_create(
process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response:
- """Script_unit_test_run."""
+ """Script_unit_test_create."""
bpmn_task_identifier = _get_required_parameter_or_raise(
"bpmn_task_identifier", body
)
@@ -1168,7 +1183,8 @@ def script_unit_test_create(
"expected_output_json", body
)
- process_model = get_process_model(process_model_id, process_group_id)
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ process_model = get_process_model(process_model_identifier)
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
if file is None:
raise ApiError(
@@ -1278,13 +1294,11 @@ def get_file_from_request() -> Any:
return request_file
-def get_process_model(process_model_id: str, process_group_id: str) -> ProcessModelInfo:
+def get_process_model(process_model_id: str) -> ProcessModelInfo:
"""Get_process_model."""
process_model = None
try:
- process_model = ProcessModelService().get_process_model(
- process_model_id, group_id=process_group_id
- )
+ process_model = ProcessModelService().get_process_model(process_model_id)
except ProcessEntityNotFoundError as exception:
raise (
ApiError(
diff --git a/src/spiffworkflow_backend/routes/user.py b/src/spiffworkflow_backend/routes/user.py
index aa5bcdd8b..662992721 100644
--- a/src/spiffworkflow_backend/routes/user.py
+++ b/src/spiffworkflow_backend/routes/user.py
@@ -46,6 +46,7 @@ def verify_token(
ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error.
If on production and user is not authenticated, returns a 'no_user' 403 error.
"""
+ user_info = None
if not force_run and AuthorizationService.should_disable_auth_for_request():
return None
@@ -104,6 +105,7 @@ def verify_token(
raise ApiError(
error_code="fail_get_user_info",
message="Cannot get user info from token",
+ status_code=401,
) from e
if (
diff --git a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py
index 144c5ef27..c6c1b578a 100644
--- a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py
+++ b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py
@@ -13,8 +13,8 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]:
"""Load_fixtures."""
current_app.logger.debug("load_acceptance_test_fixtures() start")
- test_process_group_id = "acceptance-tests-group-one"
- test_process_model_id = "acceptance-tests-model-1"
+ test_process_group_id = ""
+ test_process_model_id = "acceptance-tests-group-one/acceptance-tests-model-1"
user = BaseTest.find_or_create_user()
statuses = ProcessInstanceStatus.list()
current_time = round(time.time())
diff --git a/src/spiffworkflow_backend/services/data_setup_service.py b/src/spiffworkflow_backend/services/data_setup_service.py
index 88bd60480..23df25f34 100644
--- a/src/spiffworkflow_backend/services/data_setup_service.py
+++ b/src/spiffworkflow_backend/services/data_setup_service.py
@@ -48,7 +48,7 @@ class DataSetupService:
except Exception as ex:
failing_process_models.append(
(
- f"{process_model.process_group_id}/{process_model.id}/{process_model_file.name}",
+ f"{process_model.process_group}/{process_model.id}/{process_model_file.name}",
str(ex),
)
)
@@ -87,7 +87,7 @@ class DataSetupService:
else:
failing_process_models.append(
(
- f"{process_model.process_group_id}/{process_model.id}",
+ f"{process_model.process_group}/{process_model.id}",
"primary_file_name not set",
)
)
diff --git a/src/spiffworkflow_backend/services/error_handling_service.py b/src/spiffworkflow_backend/services/error_handling_service.py
index 36c66d93e..3f1622a47 100644
--- a/src/spiffworkflow_backend/services/error_handling_service.py
+++ b/src/spiffworkflow_backend/services/error_handling_service.py
@@ -35,7 +35,7 @@ class ErrorHandlingService:
) -> None:
"""On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception."""
process_model = ProcessModelService().get_process_model(
- _processor.process_model_identifier, _processor.process_group_identifier
+ _processor.process_model_identifier
)
if process_model.fault_or_suspend_on_exception == "suspend":
self.set_instance_status(
diff --git a/src/spiffworkflow_backend/services/file_system_service.py b/src/spiffworkflow_backend/services/file_system_service.py
index 3b23ce1b4..a4a01b836 100644
--- a/src/spiffworkflow_backend/services/file_system_service.py
+++ b/src/spiffworkflow_backend/services/file_system_service.py
@@ -54,18 +54,22 @@ class FileSystemService:
@staticmethod
def process_group_path_for_spec(spec: ProcessModelInfo) -> str:
"""Category_path_for_spec."""
- return FileSystemService.process_group_path(spec.process_group_id)
+ process_group_id, _ = os.path.split(spec.id)
+ return FileSystemService.process_group_path(process_group_id)
@staticmethod
def workflow_path(spec: ProcessModelInfo) -> str:
"""Workflow_path."""
- process_group_path = FileSystemService.process_group_path_for_spec(spec)
- return os.path.join(process_group_path, spec.id)
+ process_model_path = os.path.join(FileSystemService.root_path(), spec.id)
+ # process_group_path = FileSystemService.process_group_path_for_spec(spec)
+ return process_model_path
@staticmethod
- def full_path_to_process_model_file(spec: ProcessModelInfo, file_name: str) -> str:
+ def full_path_to_process_model_file(spec: ProcessModelInfo) -> str:
"""Full_path_to_process_model_file."""
- return os.path.join(FileSystemService.workflow_path(spec), file_name)
+ return os.path.join(
+ FileSystemService.workflow_path(spec), spec.primary_file_name # type: ignore
+ )
def next_display_order(self, spec: ProcessModelInfo) -> int:
"""Next_display_order."""
diff --git a/src/spiffworkflow_backend/services/message_service.py b/src/spiffworkflow_backend/services/message_service.py
index da1e62245..216a66a58 100644
--- a/src/spiffworkflow_backend/services/message_service.py
+++ b/src/spiffworkflow_backend/services/message_service.py
@@ -120,7 +120,6 @@ class MessageService:
process_instance_receive = ProcessInstanceService.create_process_instance(
message_triggerable_process_model.process_model_identifier,
user,
- process_group_identifier=message_triggerable_process_model.process_group_identifier,
)
processor_receive = ProcessInstanceProcessor(process_instance_receive)
processor_receive.do_engine_steps(save=False)
diff --git a/src/spiffworkflow_backend/services/process_instance_processor.py b/src/spiffworkflow_backend/services/process_instance_processor.py
index 35320f652..0ed85511a 100644
--- a/src/spiffworkflow_backend/services/process_instance_processor.py
+++ b/src/spiffworkflow_backend/services/process_instance_processor.py
@@ -293,8 +293,9 @@ class ProcessInstanceProcessor:
tld.spiff_step = process_instance_model.spiff_step
# we want this to be the fully qualified path to the process model including all group subcomponents
- current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
- f"{process_instance_model.process_group_identifier}/"
+ current_app.config[
+ "THREAD_LOCAL_DATA"
+ ].process_model_identifier = (
f"{process_instance_model.process_model_identifier}"
)
@@ -307,8 +308,7 @@ class ProcessInstanceProcessor:
bpmn_process_spec,
subprocesses,
) = ProcessInstanceProcessor.get_process_model_and_subprocesses(
- process_instance_model.process_model_identifier,
- process_instance_model.process_group_identifier,
+ process_instance_model.process_model_identifier
)
else:
bpmn_json_length = len(process_instance_model.bpmn_json.encode("utf-8"))
@@ -359,7 +359,7 @@ class ProcessInstanceProcessor:
check_sub_specs(test_spec, 5)
self.process_model_identifier = process_instance_model.process_model_identifier
- self.process_group_identifier = process_instance_model.process_group_identifier
+ # self.process_group_identifier = process_instance_model.process_group_identifier
try:
self.bpmn_process_instance = self.__get_bpmn_process_instance(
@@ -394,17 +394,17 @@ class ProcessInstanceProcessor:
@classmethod
def get_process_model_and_subprocesses(
- cls, process_model_identifier: str, process_group_identifier: str
+ cls, process_model_identifier: str
) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]:
"""Get_process_model_and_subprocesses."""
process_model_info = ProcessModelService().get_process_model(
- process_model_identifier, process_group_identifier
+ process_model_identifier
)
if process_model_info is None:
raise (
ApiError(
"process_model_not_found",
- f"The given process model was not found: {process_group_identifier}/{process_model_identifier}.",
+ f"The given process model was not found: {process_model_identifier}.",
)
)
spec_files = SpecFileService.get_files(process_model_info)
@@ -412,12 +412,11 @@ class ProcessInstanceProcessor:
@classmethod
def get_bpmn_process_instance_from_process_model(
- cls, process_model_identifier: str, process_group_identifier: str
+ cls, process_model_identifier: str
) -> BpmnWorkflow:
"""Get_all_bpmn_process_identifiers_for_process_model."""
(bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses(
process_model_identifier,
- process_group_identifier,
)
return cls.get_bpmn_process_instance_from_workflow_spec(
bpmn_process_spec, subprocesses
@@ -698,7 +697,7 @@ class ProcessInstanceProcessor:
etree_element,
)
return FileSystemService.full_path_to_process_model_file(
- process_model, process_model.primary_file_name
+ process_model
)
return None
diff --git a/src/spiffworkflow_backend/services/process_instance_service.py b/src/spiffworkflow_backend/services/process_instance_service.py
index 062d0ef7c..7854537e3 100644
--- a/src/spiffworkflow_backend/services/process_instance_service.py
+++ b/src/spiffworkflow_backend/services/process_instance_service.py
@@ -2,7 +2,6 @@
import time
from typing import Any
from typing import List
-from typing import Optional
from flask import current_app
from flask_bpmn.api.api_error import ApiError
@@ -32,7 +31,6 @@ class ProcessInstanceService:
def create_process_instance(
process_model_identifier: str,
user: UserModel,
- process_group_identifier: Optional[str] = None,
) -> ProcessInstanceModel:
"""Get_process_instance_from_spec."""
current_git_revision = GitService.get_current_revision()
@@ -40,7 +38,7 @@ class ProcessInstanceService:
status=ProcessInstanceStatus.not_started.value,
process_initiator=user,
process_model_identifier=process_model_identifier,
- process_group_identifier=process_group_identifier,
+ process_group_identifier="",
start_in_seconds=round(time.time()),
bpmn_version_control_type="git",
bpmn_version_control_identifier=current_git_revision,
@@ -97,7 +95,7 @@ class ProcessInstanceService:
next_task=None,
# navigation=navigation,
process_model_identifier=processor.process_model_identifier,
- process_group_identifier=processor.process_group_identifier,
+ process_group_identifier="",
# total_tasks=len(navigation),
completed_tasks=processor.process_instance_model.completed_tasks,
updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds,
@@ -105,6 +103,20 @@ class ProcessInstanceService:
title=title_value,
)
+ next_task_trying_again = next_task
+ if (
+ not next_task
+ ): # The Next Task can be requested to be a certain task, useful for parallel tasks.
+ # This may or may not work, sometimes there is no next task to complete.
+ next_task_trying_again = processor.next_task()
+
+ if next_task_trying_again is not None:
+ process_instance_api.next_task = (
+ ProcessInstanceService.spiff_task_to_api_task(
+ next_task_trying_again, add_docs_and_forms=True
+ )
+ )
+
return process_instance_api
def get_process_instance(self, process_instance_id: int) -> Any:
diff --git a/src/spiffworkflow_backend/services/process_model_service.py b/src/spiffworkflow_backend/services/process_model_service.py
index e10092d50..c650e1f55 100644
--- a/src/spiffworkflow_backend/services/process_model_service.py
+++ b/src/spiffworkflow_backend/services/process_model_service.py
@@ -34,6 +34,20 @@ class ProcessModelService(FileSystemService):
GROUP_SCHEMA = ProcessGroupSchema()
WF_SCHEMA = ProcessModelInfoSchema()
+ def is_group(self, path: str) -> bool:
+ """Is_group."""
+ group_json_path = os.path.join(path, self.CAT_JSON_FILE)
+ if os.path.exists(group_json_path):
+ return True
+ return False
+
+ def is_model(self, path: str) -> bool:
+ """Is_model."""
+ model_json_path = os.path.join(path, self.WF_JSON_FILE)
+ if os.path.exists(model_json_path):
+ return True
+ return False
+
@staticmethod
def get_batch(
items: list[T],
@@ -62,7 +76,7 @@ class ProcessModelService(FileSystemService):
def save_process_model(self, process_model: ProcessModelInfo) -> None:
"""Save_process_model."""
- spec_path = self.workflow_path(process_model)
+ spec_path = os.path.join(FileSystemService.root_path(), process_model.id)
os.makedirs(spec_path, exist_ok=True)
json_path = os.path.join(spec_path, self.WF_JSON_FILE)
with open(json_path, "w") as wf_json:
@@ -80,8 +94,9 @@ class ProcessModelService(FileSystemService):
error_code="existing_instances",
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
)
- process_model = self.get_process_model(process_model_id)
- path = self.workflow_path(process_model)
+ self.get_process_model(process_model_id)
+ # path = self.workflow_path(process_model)
+ path = f"{FileSystemService.root_path()}/{process_model_id}"
shutil.rmtree(path)
@classmethod
@@ -89,36 +104,43 @@ class ProcessModelService(FileSystemService):
cls, relative_path: str
) -> ProcessModelInfo:
"""Get_process_model_from_relative_path."""
- process_group_identifier = os.path.dirname(relative_path)
+ process_group_identifier, _ = os.path.split(relative_path)
process_group = cls().get_process_group(process_group_identifier)
path = os.path.join(FileSystemService.root_path(), relative_path)
return cls().__scan_spec(path, process_group=process_group)
- def get_process_model(
- self, process_model_id: str, group_id: Optional[str] = None
- ) -> ProcessModelInfo:
- """Get a process model from a model and group id."""
- if not os.path.exists(FileSystemService.root_path()):
- raise ProcessEntityNotFoundError("process_model_not_found")
+ def get_process_model(self, process_model_id: str) -> ProcessModelInfo:
+ """Get a process model from a model and group id.
- if group_id is not None:
- process_group = self.get_process_group(group_id)
- if process_group is not None:
- for process_model in process_group.process_models:
- if process_model_id == process_model.id:
- return process_model
- with os.scandir(FileSystemService.root_path()) as process_group_dirs:
- for item in process_group_dirs:
- process_group_dir = item
- if item.is_dir():
- with os.scandir(item.path) as spec_dirs:
- for sd in spec_dirs:
- if sd.name == process_model_id:
- # Now we have the process_group direcotry, and spec directory
- process_group = self.__scan_process_group(
- process_group_dir
- )
- return self.__scan_spec(sd.path, sd.name, process_group)
+ process_model_id is the full path to the model--including groups.
+ """
+ if not os.path.exists(FileSystemService.root_path()):
+ raise ProcessEntityNotFoundError("process_model_root_not_found")
+
+ model_path = os.path.join(FileSystemService.root_path(), process_model_id)
+ if self.is_model(model_path):
+ process_model = self.get_process_model_from_relative_path(process_model_id)
+ return process_model
+
+ # group_path, model_id = os.path.split(process_model_id)
+ # if group_path is not None:
+ # process_group = self.get_process_group(group_path)
+ # if process_group is not None:
+ # for process_model in process_group.process_models:
+ # if process_model_id == process_model.id:
+ # return process_model
+ # with os.scandir(FileSystemService.root_path()) as process_group_dirs:
+ # for item in process_group_dirs:
+ # process_group_dir = item
+ # if item.is_dir():
+ # with os.scandir(item.path) as spec_dirs:
+ # for sd in spec_dirs:
+ # if sd.name == process_model_id:
+ # # Now we have the process_group directory, and spec directory
+ # process_group = self.__scan_process_group(
+ # process_group_dir
+ # )
+ # return self.__scan_spec(sd.path, sd.name, process_group)
raise ProcessEntityNotFoundError("process_model_not_found")
def get_process_models(
@@ -148,10 +170,24 @@ class ProcessModelService(FileSystemService):
def get_process_group(self, process_group_id: str) -> ProcessGroup:
"""Look for a given process_group, and return it."""
if os.path.exists(FileSystemService.root_path()):
- with os.scandir(FileSystemService.root_path()) as directory_items:
- for item in directory_items:
- if item.is_dir() and item.name == process_group_id:
- return self.__scan_process_group(item)
+ process_group_path = os.path.join(
+ FileSystemService.root_path(), process_group_id
+ )
+ if self.is_group(process_group_path):
+ return self.__scan_process_group(process_group_path)
+ # nested_groups = []
+ # process_group_dir = os.scandir(process_group_path)
+ # for item in process_group_dir:
+ # if self.is_group(item.path):
+ # nested_group = self.get_process_group(os.path.join(process_group_path, item.path))
+ # nested_groups.append(nested_group)
+ # elif self.is_model(item.path):
+ # print("get_process_group: ")
+ # return self.__scan_process_group(process_group_path)
+ # with os.scandir(FileSystemService.root_path()) as directory_items:
+ # for item in directory_items:
+ # if item.is_dir() and item.name == process_group_id:
+ # return self.__scan_process_group(item)
raise ProcessEntityNotFoundError(
"process_group_not_found", f"Process Group Id: {process_group_id}"
@@ -202,13 +238,15 @@ class ProcessModelService(FileSystemService):
with os.scandir(FileSystemService.root_path()) as directory_items:
process_groups = []
for item in directory_items:
- if item.is_dir() and not item.name[0] == ".":
- process_groups.append(self.__scan_process_group(item))
+ # if item.is_dir() and not item.name[0] == ".":
+ if item.is_dir() and self.is_group(item): # type: ignore
+ scanned_process_group = self.__scan_process_group(item.path)
+ process_groups.append(scanned_process_group)
return process_groups
- def __scan_process_group(self, dir_item: os.DirEntry) -> ProcessGroup:
- """Reads the process_group.json file, and any workflow directories."""
- cat_path = os.path.join(dir_item.path, self.CAT_JSON_FILE)
+ def __scan_process_group(self, dir_path: str) -> ProcessGroup:
+ """Reads the process_group.json file, and any nested directories."""
+ cat_path = os.path.join(dir_path, self.CAT_JSON_FILE)
if os.path.exists(cat_path):
with open(cat_path) as cat_json:
data = json.load(cat_json)
@@ -216,26 +254,34 @@ class ProcessModelService(FileSystemService):
if process_group is None:
raise ApiError(
error_code="process_group_could_not_be_loaded_from_disk",
- message=f"We could not load the process_group from disk from: {dir_item}",
+ message=f"We could not load the process_group from disk from: {dir_path}",
)
else:
+ process_group_id = dir_path.replace(FileSystemService.root_path(), "")
process_group = ProcessGroup(
- id=dir_item.name,
- display_name=dir_item.name,
+ id=process_group_id,
+ display_name=process_group_id,
display_order=10000,
admin=False,
)
with open(cat_path, "w") as wf_json:
json.dump(self.GROUP_SCHEMA.dump(process_group), wf_json, indent=4)
- with os.scandir(dir_item.path) as workflow_dirs:
+ with os.scandir(dir_path) as nested_items:
process_group.process_models = []
- for item in workflow_dirs:
- if item.is_dir():
- process_group.process_models.append(
- self.__scan_spec(
- item.path, item.name, process_group=process_group
+ for nested_item in nested_items:
+ if nested_item.is_dir():
+ # TODO: check whether this is a group or model
+ if self.is_group(nested_item.path):
+ # This is a nested group
+ ...
+ elif self.is_model(nested_item.path):
+ process_group.process_models.append(
+ self.__scan_spec(
+ nested_item.path,
+ nested_item.name,
+ process_group=process_group,
+ )
)
- )
process_group.process_models.sort()
return process_group
@@ -251,6 +297,8 @@ class ProcessModelService(FileSystemService):
if os.path.exists(spec_path):
with open(spec_path) as wf_json:
data = json.load(wf_json)
+ if "process_group_id" in data:
+ data.pop("process_group_id")
spec = ProcessModelInfo(**data)
if spec is None:
raise ApiError(
@@ -274,5 +322,5 @@ class ProcessModelService(FileSystemService):
with open(spec_path, "w") as wf_json:
json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4)
if process_group:
- spec.process_group_id = process_group.id
+ spec.process_group = process_group.id
return spec
diff --git a/src/spiffworkflow_backend/services/spec_file_service.py b/src/spiffworkflow_backend/services/spec_file_service.py
index c6b86d3e9..92e905bf3 100644
--- a/src/spiffworkflow_backend/services/spec_file_service.py
+++ b/src/spiffworkflow_backend/services/spec_file_service.py
@@ -48,7 +48,8 @@ class SpecFileService(FileSystemService):
extension_filter: str = "",
) -> List[File]:
"""Return all files associated with a workflow specification."""
- path = SpecFileService.workflow_path(process_model_info)
+ # path = SpecFileService.workflow_path(process_model_info)
+ path = os.path.join(FileSystemService.root_path(), process_model_info.id)
files = SpecFileService._get_files(path, file_name)
if extension_filter != "":
files = list(
@@ -105,7 +106,10 @@ class SpecFileService(FileSystemService):
) -> File:
"""Update_file."""
SpecFileService.assert_valid_file_name(file_name)
- file_path = SpecFileService.file_path(process_model_info, file_name)
+ # file_path = SpecFileService.file_path(process_model_info, file_name)
+ file_path = os.path.join(
+ FileSystemService.root_path(), process_model_info.id, file_name
+ )
SpecFileService.write_file_data_to_system(file_path, binary_data)
file = SpecFileService.to_file_object(file_name, file_path)
@@ -129,7 +133,10 @@ class SpecFileService(FileSystemService):
@staticmethod
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
"""Get_data."""
- file_path = SpecFileService.file_path(process_model_info, file_name)
+ # file_path = SpecFileService.file_path(process_model_info, file_name)
+ file_path = os.path.join(
+ FileSystemService.root_path(), process_model_info.id, file_name
+ )
if not os.path.exists(file_path):
raise ProcessModelFileNotFoundError(
f"No file found with name {file_name} in {process_model_info.display_name}"
@@ -163,7 +170,8 @@ class SpecFileService(FileSystemService):
# for lf in lookup_files:
# session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete()
# session.query(LookupFileModel).filter_by(id=lf.id).delete()
- file_path = SpecFileService.file_path(spec, file_name)
+ # file_path = SpecFileService.file_path(spec, file_name)
+ file_path = os.path.join(FileSystemService.root_path(), spec.id, file_name)
os.remove(file_path)
@staticmethod
@@ -367,9 +375,8 @@ class SpecFileService(FileSystemService):
process_model_info: ProcessModelInfo, bpmn_file_name: str, et_root: _Element
) -> None:
"""Store_bpmn_process_identifiers."""
- relative_process_model_path = SpecFileService.process_model_relative_path(
- process_model_info
- )
+ relative_process_model_path = process_model_info.id
+
relative_bpmn_file_path = os.path.join(
relative_process_model_path, bpmn_file_name
)
@@ -462,10 +469,12 @@ class SpecFileService(FileSystemService):
)
if message_triggerable_process_model is None:
- message_triggerable_process_model = MessageTriggerableProcessModel(
- message_model_id=message_model.id,
- process_model_identifier=process_model_info.id,
- process_group_identifier=process_model_info.process_group_id,
+ message_triggerable_process_model = (
+ MessageTriggerableProcessModel(
+ message_model_id=message_model.id,
+ process_model_identifier=process_model_info.id,
+ process_group_identifier="process_group_identifier",
+ )
)
db.session.add(message_triggerable_process_model)
db.session.commit()
@@ -473,12 +482,11 @@ class SpecFileService(FileSystemService):
if (
message_triggerable_process_model.process_model_identifier
!= process_model_info.id
- or message_triggerable_process_model.process_group_identifier
- != process_model_info.process_group_id
+ # or message_triggerable_process_model.process_group_identifier
+ # != process_model_info.process_group_id
):
raise ValidationException(
- "Message model is already used to start process model"
- f"'{process_model_info.process_group_id}/{process_model_info.id}'"
+ f"Message model is already used to start process model {process_model_info.id}"
)
for child in et_root:
diff --git a/tests/data/manual_task/manual_task.bpmn b/tests/data/manual_task/manual_task.bpmn
new file mode 100644
index 000000000..aefbb376b
--- /dev/null
+++ b/tests/data/manual_task/manual_task.bpmn
@@ -0,0 +1,41 @@
+
+
+
+
+ Flow_1xlck7g
+
+
+
+ Flow_0nnh2x9
+
+
+
+
+ ## Hello
+
+ Flow_1xlck7g
+ Flow_0nnh2x9
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/data/simple_script/simple_script.bpmn b/tests/data/simple_script/simple_script.bpmn
index ab573ce41..6e14807fa 100644
--- a/tests/data/simple_script/simple_script.bpmn
+++ b/tests/data/simple_script/simple_script.bpmn
@@ -1,67 +1,87 @@
-
-
+
+
- Flow_1k9q28c
+ Flow_0r3ua0i
-
-
-
- Flow_10610n2
-
-
-
- Flow_1k9q28c
- Flow_1fviiob
+
+
+ Flow_0r3ua0i
+ Flow_19g4f88
a = 1
-b = 2
-c = a + b
-norris=fact_service(type='norris')
+b = 2
+
+
+
+
+
+ {'a': 1, 'b': 2}
+ {'a': 1, 'b': 2, 'c': 3}
+
+
+
+ Flow_19g4f88
+ Flow_152cqfw
+ c = a + b
+
+
- ## Display Data
+
+ ## Data
-
-### a
+### A
{{ a }}
-
-### b
+### B
{{ b }}
-
-### c
-{{ c }}
- Flow_1fviiob
- Flow_10610n2
+### C
+{{ c }}
+
+ Flow_152cqfw
+ Flow_1vqk60p
+
+ Flow_1vqk60p
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
-
-
+
+
+
-
-
+
+
+
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/spiffworkflow_backend/helpers/base_test.py b/tests/spiffworkflow_backend/helpers/base_test.py
index a573e8a3e..794e1cea6 100644
--- a/tests/spiffworkflow_backend/helpers/base_test.py
+++ b/tests/spiffworkflow_backend/helpers/base_test.py
@@ -25,6 +25,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
+from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.user_service import UserService
@@ -34,6 +35,46 @@ from spiffworkflow_backend.services.user_service import UserService
class BaseTest:
"""BaseTest."""
+ def basic_test_setup(
+ self,
+ client: FlaskClient,
+ user: UserModel,
+ process_group_id: Optional[str] = "test_group",
+ process_model_id: Optional[str] = "random_fact",
+ bpmn_file_name: Optional[str] = None,
+ bpmn_file_location: Optional[str] = None,
+ ) -> str:
+ """Creates a process group.
+
+ Creates a process model
+ Adds a bpmn file to the model.
+ """
+ process_group_display_name = process_group_id or ""
+ process_group_description = process_group_id or ""
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ if bpmn_file_location is None:
+ bpmn_file_location = process_model_id
+
+ self.create_process_group(
+ client, user, process_group_description, process_group_display_name
+ )
+
+ self.create_process_model_with_api(
+ client,
+ process_model_id=process_model_identifier,
+ process_model_display_name=process_group_display_name,
+ process_model_description=process_group_description,
+ user=user,
+ )
+
+ load_test_spec(
+ process_model_id=process_model_identifier,
+ bpmn_file_name=bpmn_file_name,
+ process_model_source_directory=bpmn_file_location,
+ )
+
+ return process_model_identifier
+
@staticmethod
def find_or_create_user(username: str = "test_user_1") -> UserModel:
"""Find_or_create_user."""
@@ -67,17 +108,19 @@ class BaseTest:
open_id_client_secret_key,
)
+ @staticmethod
def create_process_instance(
- self,
client: FlaskClient,
- test_process_group_id: str,
test_process_model_id: str,
headers: Dict[str, str],
) -> TestResponse:
- """Create_process_instance."""
- load_test_spec(test_process_model_id, process_group_id=test_process_group_id)
+ """Create_process_instance.
+
+ There must be an existing process model to instantiate.
+ """
+ modified_process_model_id = test_process_model_id.replace("/", ":")
response = client.post(
- f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances",
+ f"/v1.0/process-models/{modified_process_model_id}/process-instances",
headers=headers,
)
assert response.status_code == 201
@@ -86,8 +129,7 @@ class BaseTest:
def create_process_model_with_api(
self,
client: FlaskClient,
- process_group_id: Optional[str] = None,
- process_model_id: str = "make_cookies",
+ process_model_id: Optional[str] = None,
process_model_display_name: str = "Cooooookies",
process_model_description: str = "Om nom nom delicious cookies",
fault_or_suspend_on_exception: str = NotificationType.suspend.value,
@@ -97,65 +139,77 @@ class BaseTest:
user: Optional[UserModel] = None,
) -> TestResponse:
"""Create_process_model."""
- process_model_service = ProcessModelService()
+ if process_model_id is not None:
- # make sure we have a group
- if process_group_id is None:
- process_group_tmp = ProcessGroup(
- id="test_cat",
- display_name="Test Category",
- display_order=0,
- admin=False,
- )
- process_group = process_model_service.add_process_group(process_group_tmp)
+ # make sure we have a group
+ process_group_id, _ = os.path.split(process_model_id)
+ process_group_path = f"{FileSystemService.root_path()}/{process_group_id}"
+ if ProcessModelService().is_group(process_group_path):
+
+ if exception_notification_addresses is None:
+ exception_notification_addresses = []
+
+ model = ProcessModelInfo(
+ id=process_model_id,
+ display_name=process_model_display_name,
+ description=process_model_description,
+ is_review=False,
+ primary_process_id=primary_process_id,
+ primary_file_name=primary_file_name,
+ fault_or_suspend_on_exception=fault_or_suspend_on_exception,
+ exception_notification_addresses=exception_notification_addresses,
+ )
+ if user is None:
+ user = self.find_or_create_user()
+
+ response = client.post(
+ "/v1.0/process-models",
+ content_type="application/json",
+ data=json.dumps(ProcessModelInfoSchema().dump(model)),
+ headers=self.logged_in_headers(user),
+ )
+ assert response.status_code == 201
+ return response
+
+ else:
+ raise Exception("You must create the group first")
else:
- process_group = ProcessModelService().get_process_group(process_group_id)
-
- if exception_notification_addresses is None:
- exception_notification_addresses = []
- model = ProcessModelInfo(
- id=process_model_id,
- display_name=process_model_display_name,
- description=process_model_description,
- process_group_id=process_group.id,
- is_review=False,
- primary_process_id=primary_process_id,
- primary_file_name=primary_file_name,
- fault_or_suspend_on_exception=fault_or_suspend_on_exception,
- exception_notification_addresses=exception_notification_addresses,
- )
- if user is None:
- user = self.find_or_create_user()
-
- response = client.post(
- "/v1.0/process-models",
- content_type="application/json",
- data=json.dumps(ProcessModelInfoSchema().dump(model)),
- headers=self.logged_in_headers(user),
- )
- assert response.status_code == 201
- return response
+ raise Exception(
+ "You must include the process_model_id, which must be a path to the model"
+ )
def create_spec_file(
self,
client: FlaskClient,
- process_group_id: str = "random_fact",
- process_model_id: str = "random_fact",
+ process_model_id: str,
+ process_model_location: Optional[str] = None,
process_model: Optional[ProcessModelInfo] = None,
file_name: str = "random_fact.svg",
file_data: bytes = b"abcdef",
user: Optional[UserModel] = None,
) -> Any:
- """Test_create_spec_file."""
+ """Test_create_spec_file.
+
+ Adds a bpmn file to the model.
+ process_model_id is the destination path
+ process_model_location is the source path
+
+ because of permissions, user might be required now..., not sure yet.
+ """
+ if process_model_location is None:
+ process_model_location = file_name.split(".")[0]
if process_model is None:
process_model = load_test_spec(
- process_model_id, process_group_id=process_group_id
+ process_model_id=process_model_id,
+ bpmn_file_name=file_name,
+ process_model_source_directory=process_model_location,
)
data = {"file": (io.BytesIO(file_data), file_name)}
if user is None:
user = self.find_or_create_user()
+ modified_process_model_id = process_model.id.replace("/", ":")
response = client.post(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files",
+ f"/v1.0/process-models/{modified_process_model_id}/files",
data=data,
follow_redirects=True,
content_type="multipart/form-data",
@@ -168,7 +222,7 @@ class BaseTest:
# assert "image/svg+xml" == file["content_type"]
response = client.get(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/{file_name}",
+ f"/v1.0/process-models/{modified_process_model_id}/files/{file_name}",
headers=self.logged_in_headers(user),
)
assert response.status_code == 200
@@ -221,7 +275,7 @@ class BaseTest:
status=status,
process_initiator=user,
process_model_identifier=process_model.id,
- process_group_identifier=process_model.process_group_id,
+ process_group_identifier="",
updated_at_in_seconds=round(time.time()),
start_in_seconds=current_time - (3600 * 1),
end_in_seconds=current_time - (3600 * 1 - 20),
diff --git a/tests/spiffworkflow_backend/helpers/example_data.py b/tests/spiffworkflow_backend/helpers/example_data.py
index a94be48e1..ac1e8dc8b 100644
--- a/tests/spiffworkflow_backend/helpers/example_data.py
+++ b/tests/spiffworkflow_backend/helpers/example_data.py
@@ -13,27 +13,30 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
class ExampleDataLoader:
"""ExampleDataLoader."""
+ @staticmethod
def create_spec(
- self,
process_model_id: str,
display_name: str = "",
description: str = "",
- process_group_id: str = "",
display_order: int = 0,
- from_tests: bool = False,
+ # from_tests: bool = False,
bpmn_file_name: Optional[str] = None,
process_model_source_directory: Optional[str] = None,
) -> ProcessModelInfo:
- """Assumes that a directory exists in static/bpmn with the same name as the given process_model_id.
+ """Assumes that process_model_source_directory exists in static/bpmn and contains bpmn_file_name.
- further assumes that the [process_model_id].bpmn is the primary file for the process model.
- returns an array of data models to be added to the database.
+ further assumes that bpmn_file_name is the primary file for the process model.
+
+ if bpmn_file_name is None we load all files in process_model_source_directory,
+ otherwise, we only load bpmn_file_name
"""
+ if process_model_source_directory is None:
+ raise Exception("You must include `process_model_source_directory`.")
+
spec = ProcessModelInfo(
id=process_model_id,
display_name=display_name,
description=description,
- process_group_id=process_group_id,
display_order=display_order,
is_review=False,
)
@@ -55,25 +58,16 @@ class ExampleDataLoader:
if bpmn_file_name:
file_name_matcher = bpmn_file_name_with_extension
- file_glob = ""
- if from_tests:
- file_glob = os.path.join(
- current_app.instance_path,
- "..",
- "..",
- "tests",
- "data",
- process_model_source_directory_to_use,
- file_name_matcher,
- )
- else:
- file_glob = os.path.join(
- current_app.root_path,
- "static",
- "bpmn",
- process_model_source_directory_to_use,
- file_name_matcher,
- )
+ # file_glob = ""
+ file_glob = os.path.join(
+ current_app.root_path,
+ "..",
+ "..",
+ "tests",
+ "data",
+ process_model_source_directory_to_use,
+ file_name_matcher,
+ )
files = glob.glob(file_glob)
for file_path in files:
diff --git a/tests/spiffworkflow_backend/helpers/test_data.py b/tests/spiffworkflow_backend/helpers/test_data.py
index fd418aa22..d6b4f730c 100644
--- a/tests/spiffworkflow_backend/helpers/test_data.py
+++ b/tests/spiffworkflow_backend/helpers/test_data.py
@@ -37,40 +37,17 @@ def assure_process_group_exists(process_group_id: Optional[str] = None) -> Proce
def load_test_spec(
process_model_id: str,
- process_group_id: Optional[str] = None,
bpmn_file_name: Optional[str] = None,
process_model_source_directory: Optional[str] = None,
) -> ProcessModelInfo:
- """Loads a process model into the bpmn dir based on a directory in tests/data."""
- process_group = None
- process_model_service = ProcessModelService()
- if process_group_id is None:
- process_group_id = "test_process_group_id"
- process_group = assure_process_group_exists(process_group_id)
- process_group_id = process_group.id
+ """Loads a bpmn file into the process model dir based on a directory in tests/data."""
+ if process_model_source_directory is None:
+ raise Exception("You must inclode a `process_model_source_directory`.")
- try:
- return process_model_service.get_process_model(
- process_model_id, group_id=process_group_id
- )
- except ProcessEntityNotFoundError:
- spec = ExampleDataLoader().create_spec(
- process_model_id=process_model_id,
- from_tests=True,
- display_name=process_model_id,
- process_group_id=process_group_id,
- bpmn_file_name=bpmn_file_name,
- process_model_source_directory=process_model_source_directory,
- )
- return spec
-
-
-# def user_info_to_query_string(user_info, redirect_url):
-# query_string_list = []
-# items = user_info.items()
-# for key, value in items:
-# query_string_list.append('%s=%s' % (key, urllib.parse.quote(value)))
-#
-# query_string_list.append('redirect_url=%s' % redirect_url)
-#
-# return '?%s' % '&'.join(query_string_list)
+ spec = ExampleDataLoader.create_spec(
+ process_model_id=process_model_id,
+ display_name=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ process_model_source_directory=process_model_source_directory,
+ )
+ return spec
diff --git a/tests/spiffworkflow_backend/integration/test_logging_service.py b/tests/spiffworkflow_backend/integration/test_logging_service.py
index a8d2720f6..02d8ade17 100644
--- a/tests/spiffworkflow_backend/integration/test_logging_service.py
+++ b/tests/spiffworkflow_backend/integration/test_logging_service.py
@@ -19,20 +19,45 @@ class TestLoggingService(BaseTest):
"""Test_process_instance_run."""
process_group_id = "test_logging_spiff_logger"
process_model_id = "simple_script"
+ self.create_process_group(
+ client=client, user=with_super_admin_user, process_group_id=process_group_id
+ )
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ # create the model
+ self.create_process_model_with_api(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_display_name="Simple Script",
+ process_model_description="Simple Script",
+ user=with_super_admin_user,
+ )
+
+ bpmn_file_name = "simple_script.bpmn"
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, "simple_script"
+ )
+ # add bpmn to the model
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance(
- client, process_group_id, process_model_id, headers
+ client, process_model_identifier, headers
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=headers,
)
assert response.status_code == 200
log_response = client.get(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs",
+ f"/v1.0/process-instances/{process_instance_id}/logs",
headers=headers,
)
assert log_response.status_code == 200
diff --git a/tests/spiffworkflow_backend/integration/test_nested_groups.py b/tests/spiffworkflow_backend/integration/test_nested_groups.py
new file mode 100644
index 000000000..ef89a561a
--- /dev/null
+++ b/tests/spiffworkflow_backend/integration/test_nested_groups.py
@@ -0,0 +1,174 @@
+"""Test_nested_groups."""
+import json
+
+from flask.app import Flask
+from flask.testing import FlaskClient
+from tests.spiffworkflow_backend.helpers.base_test import BaseTest
+
+from spiffworkflow_backend.models.process_group import ProcessGroup
+from spiffworkflow_backend.models.process_group import ProcessGroupSchema
+from spiffworkflow_backend.models.process_model import ProcessModelInfo
+from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
+from spiffworkflow_backend.models.user import UserModel
+
+
+class TestNestedGroups(BaseTest):
+ """TestNestedGroups."""
+
+ def test_nested_groups(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ ) -> None:
+ """Test_nested_groups."""
+ # /process-groups/{process_group_path}/show
+ target_uri = "/v1.0/process-groups/group_a,group_b"
+ user = self.find_or_create_user()
+ self.add_permissions_to_user(
+ user, target_uri=target_uri, permission_names=["read"]
+ )
+ response = client.get( # noqa: F841
+ target_uri, headers=self.logged_in_headers(user)
+ )
+ print("test_nested_groups")
+
+ def test_add_nested_group(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_add_nested_group."""
+ # user = self.find_or_create_user()
+ # self.add_permissions_to_user(
+ # user, target_uri=target_uri, permission_names=["read", "create"]
+ # )
+ process_group_a = ProcessGroup(
+ id="group_a",
+ display_name="Group A",
+ display_order=0,
+ admin=False,
+ )
+ response_a = client.post( # noqa: F841
+ "/v1.0/process-groups",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessGroupSchema().dump(process_group_a)),
+ )
+ process_group_b = ProcessGroup(
+ id="group_a/group_b",
+ display_name="Group B",
+ display_order=0,
+ admin=False,
+ )
+ response_b = client.post( # noqa: F841
+ "/v1.0/process-groups",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessGroupSchema().dump(process_group_b)),
+ )
+ process_group_c = ProcessGroup(
+ id="group_a/group_b/group_c",
+ display_name="Group C",
+ display_order=0,
+ admin=False,
+ )
+ response_c = client.post( # noqa: F841
+ "/v1.0/process-groups",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessGroupSchema().dump(process_group_c)),
+ )
+
+ print("test_add_nested_group")
+
+ def test_process_model_add(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_process_model_add."""
+ process_group_a = ProcessGroup(
+ id="group_a",
+ display_name="Group A",
+ display_order=0,
+ admin=False,
+ )
+ response_a = client.post( # noqa: F841
+ "/v1.0/process-groups",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessGroupSchema().dump(process_group_a)),
+ )
+ process_group_b = ProcessGroup(
+ id="group_a/group_b",
+ display_name="Group B",
+ display_order=0,
+ admin=False,
+ )
+ response_b = client.post( # noqa: F841
+ "/v1.0/process-groups",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessGroupSchema().dump(process_group_b)),
+ )
+ process_model = ProcessModelInfo(
+ id="process_model",
+ display_name="Process Model",
+ description="Process Model",
+ primary_file_name="primary_file.bpmn",
+ primary_process_id="primary_process_id",
+ display_order=0,
+ )
+ model_response = client.post( # noqa: F841
+ "v1.0/process-models",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessModelInfoSchema().dump(process_model)),
+ )
+ print("test_process_model_add")
+
+ def test_process_group_show(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_process_group_show."""
+ # target_uri = "/process-groups/{process_group_id}"
+ # user = self.find_or_create_user("testadmin1")
+ # self.add_permissions_to_user(
+ # user, target_uri="v1.0/process-groups", permission_names=["read", "create"]
+ # )
+ # self.add_permissions_to_user(
+ # user, target_uri="/process-groups/{process_group_id}", permission_names=["read", "create"]
+ # )
+
+ process_group_a = ProcessGroup(
+ id="group_a",
+ display_name="Group A",
+ display_order=0,
+ admin=False,
+ )
+ response_create_a = client.post( # noqa: F841
+ "/v1.0/process-groups",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(ProcessGroupSchema().dump(process_group_a)),
+ )
+
+ target_uri = "/v1.0/process-groups/group_a"
+ user = self.find_or_create_user()
+ self.add_permissions_to_user(
+ user, target_uri=target_uri, permission_names=["read"]
+ )
+ response = client.get( # noqa: F841
+ target_uri, headers=self.logged_in_headers(user)
+ )
+
+ print("test_process_group_show: ")
diff --git a/tests/spiffworkflow_backend/integration/test_process_api.py b/tests/spiffworkflow_backend/integration/test_process_api.py
index bad47ce9f..91355e0e0 100644
--- a/tests/spiffworkflow_backend/integration/test_process_api.py
+++ b/tests/spiffworkflow_backend/integration/test_process_api.py
@@ -1,6 +1,7 @@
"""Test Process Api Blueprint."""
import io
import json
+import os
import time
from typing import Any
@@ -30,6 +31,9 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
+from spiffworkflow_backend.services.process_instance_service import (
+ ProcessInstanceService,
+)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
# from spiffworkflow_backend.services.git_service import GitService
@@ -108,10 +112,19 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_add_new_process_model."""
- process_model_identifier = "sample"
- model_display_name = "Sample"
- model_description = "The sample"
+ process_group_id = "test_process_group"
+ process_group_display_name = "Test Process Group"
+ # creates the group directory, and the json file
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_display_name
+ )
+ process_model_id = "sample"
+ model_display_name = "Sample"
+ model_description = "The Sample"
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+
+ # creates the model directory, and adds the json file
self.create_process_model_with_api(
client,
process_model_id=process_model_identifier,
@@ -120,23 +133,27 @@ class TestProcessApi(BaseTest):
user=with_super_admin_user,
)
process_model = ProcessModelService().get_process_model(
- process_model_identifier
+ process_model_identifier,
)
assert model_display_name == process_model.display_name
assert 0 == process_model.display_order
assert 1 == len(ProcessModelService().get_process_groups())
+ # add bpmn file to the model
bpmn_file_name = "sample.bpmn"
bpmn_file_data_bytes = self.get_test_data_file_contents(
bpmn_file_name, "sample"
)
self.create_spec_file(
client,
+ process_model_id=process_model.id,
+ process_model_location="sample",
+ process_model=process_model,
file_name=bpmn_file_name,
file_data=bpmn_file_data_bytes,
- process_model=process_model,
user=with_super_admin_user,
)
+ # get the model, assert that primary is set
process_model = ProcessModelService().get_process_model(
process_model_identifier
)
@@ -151,16 +168,26 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_primary_process_id_updates_via_xml."""
- process_model_identifier = "sample"
+ process_group_id = "test_group"
+ process_model_id = "sample"
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
initial_primary_process_id = "sample"
terminal_primary_process_id = "new_process_id"
+ self.create_process_group(
+ client=client, user=with_super_admin_user, process_group_id=process_group_id
+ )
- process_model = load_test_spec(process_model_id=process_model_identifier)
+ bpmn_file_name = f"{process_model_id}.bpmn"
+ bpmn_file_source_directory = process_model_id
+ process_model = load_test_spec(
+ process_model_id=process_model_identifier,
+ bpmn_file_name=bpmn_file_name,
+ process_model_source_directory=process_model_id,
+ )
assert process_model.primary_process_id == initial_primary_process_id
- bpmn_file_name = "sample.bpmn"
bpmn_file_data_bytes = self.get_test_data_file_contents(
- bpmn_file_name, "sample"
+ bpmn_file_name, bpmn_file_source_directory
)
bpmn_file_data_string = bpmn_file_data_bytes.decode("utf-8")
old_string = f'bpmn:process id="{initial_primary_process_id}"'
@@ -171,8 +198,9 @@ class TestProcessApi(BaseTest):
updated_bpmn_file_data_bytes = bytearray(updated_bpmn_file_data_string, "utf-8")
data = {"file": (io.BytesIO(updated_bpmn_file_data_bytes), bpmn_file_name)}
+ modified_process_model_id = process_model_identifier.replace("/", ":")
response = client.put(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/{bpmn_file_name}",
+ f"/v1.0/process-models/{modified_process_model_id}/files/{bpmn_file_name}",
data=data,
follow_redirects=True,
content_type="multipart/form-data",
@@ -193,19 +221,30 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_delete."""
+ process_group_id = "test_process_group"
+ process_group_description = "Test Process Group"
+ process_model_id = "sample"
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_description
+ )
self.create_process_model_with_api(
client,
+ process_model_id=process_model_identifier,
user=with_super_admin_user,
)
# assert we have a model
- process_model = ProcessModelService().get_process_model("make_cookies")
+ process_model = ProcessModelService().get_process_model(
+ process_model_identifier
+ )
assert process_model is not None
- assert process_model.id == "make_cookies"
+ assert process_model.id == process_model_identifier
# delete the model
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
response = client.delete(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}",
+ f"/v1.0/process-models/{modified_process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -214,7 +253,7 @@ class TestProcessApi(BaseTest):
# assert we no longer have a model
with pytest.raises(ProcessEntityNotFoundError):
- ProcessModelService().get_process_model("make_cookies")
+ ProcessModelService().get_process_model(process_model_identifier)
def test_process_model_delete_with_instances(
self,
@@ -226,19 +265,38 @@ class TestProcessApi(BaseTest):
"""Test_process_model_delete_with_instances."""
test_process_group_id = "runs_without_input"
test_process_model_id = "sample"
+ bpmn_file_name = "sample.bpmn"
+ bpmn_file_location = "sample"
+ process_model_identifier = f"{test_process_group_id}/{test_process_model_id}"
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
+ self.create_process_group(client, with_super_admin_user, test_process_group_id)
+ self.create_process_model_with_api(
+ client, process_model_identifier, user=with_super_admin_user
+ )
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=test_process_model_id,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
headers = self.logged_in_headers(with_super_admin_user)
# create an instance from a model
response = self.create_process_instance(
- client, test_process_group_id, test_process_model_id, headers
+ client, process_model_identifier, headers
)
data = json.loads(response.get_data(as_text=True))
# make sure the instance has the correct model
- assert data["process_model_identifier"] == test_process_model_id
+ assert data["process_model_identifier"] == process_model_identifier
# try to delete the model
response = client.delete(
- f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}",
+ f"/v1.0/process-models/{modified_process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
@@ -248,7 +306,7 @@ class TestProcessApi(BaseTest):
assert data["error_code"] == "existing_instances"
assert (
data["message"]
- == "We cannot delete the model `sample`, there are existing instances that depend on it."
+ == f"We cannot delete the model `{process_model_identifier}`, there are existing instances that depend on it."
)
def test_process_model_update(
@@ -259,12 +317,19 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_update."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_process_group", "Test Process Group"
+ )
+ process_model_identifier = "test_process_group/make_cookies"
self.create_process_model_with_api(
client,
+ process_model_id=process_model_identifier,
user=with_super_admin_user,
)
- process_model = ProcessModelService().get_process_model("make_cookies")
- assert process_model.id == "make_cookies"
+ process_model = ProcessModelService().get_process_model(
+ process_model_identifier
+ )
+ assert process_model.id == process_model_identifier
assert process_model.display_name == "Cooooookies"
assert process_model.is_review is False
assert process_model.primary_file_name is None
@@ -275,8 +340,9 @@ class TestProcessApi(BaseTest):
process_model.primary_process_id = "superduper"
process_model.is_review = True # not in the include list, so get ignored
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
response = client.put(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}",
+ f"/v1.0/process-models/{modified_process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
content_type="application/json",
data=json.dumps(ProcessModelInfoSchema().dump(process_model)),
@@ -302,12 +368,11 @@ class TestProcessApi(BaseTest):
# add 5 models to the group
for i in range(5):
- process_model_identifier = f"test_model_{i}"
+ process_model_identifier = f"{group_id}/test_model_{i}"
model_display_name = f"Test Model {i}"
model_description = f"Test Model {i} Description"
self.create_process_model_with_api(
client,
- process_group_id=group_id,
process_model_id=process_model_identifier,
process_model_display_name=model_display_name,
process_model_description=model_description,
@@ -332,7 +397,7 @@ class TestProcessApi(BaseTest):
)
assert response.json is not None
assert len(response.json["results"]) == 1
- assert response.json["results"][0]["id"] == "test_model_0"
+ assert response.json["results"][0]["id"] == "test_group/test_model_0"
assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["total"] == 5
assert response.json["pagination"]["pages"] == 5
@@ -344,7 +409,7 @@ class TestProcessApi(BaseTest):
)
assert response.json is not None
assert len(response.json["results"]) == 1
- assert response.json["results"][0]["id"] == "test_model_1"
+ assert response.json["results"][0]["id"] == "test_group/test_model_1"
assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["total"] == 5
assert response.json["pagination"]["pages"] == 5
@@ -356,7 +421,7 @@ class TestProcessApi(BaseTest):
)
assert response.json is not None
assert len(response.json["results"]) == 3
- assert response.json["results"][0]["id"] == "test_model_0"
+ assert response.json["results"][0]["id"] == "test_group/test_model_0"
assert response.json["pagination"]["count"] == 3
assert response.json["pagination"]["total"] == 5
assert response.json["pagination"]["pages"] == 2
@@ -369,7 +434,7 @@ class TestProcessApi(BaseTest):
# there should only be 2 left
assert response.json is not None
assert len(response.json["results"]) == 2
- assert response.json["results"][0]["id"] == "test_model_3"
+ assert response.json["results"][0]["id"] == "test_group/test_model_3"
assert response.json["pagination"]["count"] == 2
assert response.json["pagination"]["total"] == 5
assert response.json["pagination"]["pages"] == 2
@@ -558,12 +623,12 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_file_update."""
- self.create_spec_file(client, user=with_super_admin_user)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ modified_process_model_id = process_model_identifier.replace("/", ":")
- process_model = load_test_spec("random_fact")
data = {"key1": "THIS DATA"}
response = client.put(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_process_model_id}/files/random_fact.svg",
data=data,
follow_redirects=True,
content_type="multipart/form-data",
@@ -582,12 +647,12 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_file_update."""
- self.create_spec_file(client, user=with_super_admin_user)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ modified_process_model_id = process_model_identifier.replace("/", ":")
- process_model = load_test_spec("random_fact")
data = {"file": (io.BytesIO(b""), "random_fact.svg")}
response = client.put(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_process_model_id}/files/random_fact.svg",
data=data,
follow_redirects=True,
content_type="multipart/form-data",
@@ -606,13 +671,31 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_file_update."""
- original_file = self.create_spec_file(client, user=with_super_admin_user)
+ process_group_id = "test_group"
+ process_group_description = "Test Group"
+ process_model_id = "random_fact"
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_description
+ )
+ self.create_process_model_with_api(
+ client,
+ process_model_id=process_model_identifier,
+ user=with_super_admin_user,
+ )
- process_model = load_test_spec("random_fact")
+ bpmn_file_name = "random_fact.bpmn"
+ original_file = load_test_spec(
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ process_model_source_directory="random_fact",
+ )
+
+ modified_process_model_id = process_model_identifier.replace("/", ":")
new_file_contents = b"THIS_IS_NEW_DATA"
data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")}
response = client.put(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_process_model_id}/files/random_fact.svg",
data=data,
follow_redirects=True,
content_type="multipart/form-data",
@@ -624,7 +707,7 @@ class TestProcessApi(BaseTest):
assert response.json["ok"]
response = client.get(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_process_model_id}/files/random_fact.svg",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -640,11 +723,16 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_file_update."""
- self.create_spec_file(client, user=with_super_admin_user)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ # self.create_spec_file(client, user=with_super_admin_user)
- process_model = load_test_spec("random_fact")
+ # process_model = load_test_spec("random_fact")
+ bad_process_model_identifier = f"x{process_model_identifier}"
+ modified_bad_process_model_identifier = bad_process_model_identifier.replace(
+ "/", ":"
+ )
response = client.delete(
- f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_bad_process_model_identifier}/files/random_fact.svg",
follow_redirects=True,
headers=self.logged_in_headers(with_super_admin_user),
)
@@ -661,11 +749,11 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_file_update."""
- self.create_spec_file(client, user=with_super_admin_user)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
- process_model = load_test_spec("random_fact")
response = client.delete(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact_DOES_NOT_EXIST.svg",
+ f"/v1.0/process-models/{modified_process_model_identifier}/files/random_fact_DOES_NOT_EXIST.svg",
follow_redirects=True,
headers=self.logged_in_headers(with_super_admin_user),
)
@@ -682,11 +770,11 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_model_file_update."""
- self.create_spec_file(client, user=with_super_admin_user)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
- process_model = load_test_spec("random_fact")
response = client.delete(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_process_model_identifier}/files/random_fact.bpmn",
follow_redirects=True,
headers=self.logged_in_headers(with_super_admin_user),
)
@@ -696,7 +784,7 @@ class TestProcessApi(BaseTest):
assert response.json["ok"]
response = client.get(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
+ f"/v1.0/process-models/{modified_process_model_identifier}/files/random_fact.svg",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 404
@@ -709,18 +797,17 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_get_file."""
- test_process_group_id = "group_id1"
- process_model_dir_name = "hello_world"
- load_test_spec(process_model_dir_name, process_group_id=test_process_group_id)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
+
response = client.get(
- f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/files/hello_world.bpmn",
+ f"/v1.0/process-models/{modified_process_model_identifier}/files/random_fact.bpmn",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
- assert response.json["name"] == "hello_world.bpmn"
- assert response.json["process_group_id"] == "group_id1"
- assert response.json["process_model_id"] == "hello_world"
+ assert response.json["name"] == "random_fact.bpmn"
+ assert response.json["process_model_id"] == "test_group/random_fact"
def test_get_workflow_from_workflow_spec(
self,
@@ -730,15 +817,16 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_get_workflow_from_workflow_spec."""
- process_model = load_test_spec("hello_world")
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
+
response = client.post(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/process-instances",
+ f"/v1.0/process-models/{modified_process_model_identifier}/process-instances",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 201
assert response.json is not None
- assert "hello_world" == response.json["process_model_identifier"]
- # assert('Task_GetName' == response.json['next_task']['name'])
+ assert "test_group/random_fact" == response.json["process_model_identifier"]
def test_get_process_groups_when_none(
self,
@@ -764,7 +852,7 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_get_process_groups_when_there_are_some."""
- load_test_spec("hello_world")
+ self.basic_test_setup(client, with_super_admin_user)
response = client.get(
"/v1.0/process-groups",
headers=self.logged_in_headers(with_super_admin_user),
@@ -784,18 +872,18 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_get_process_group_when_found."""
- test_process_group_id = "group_id1"
- process_model_dir_name = "hello_world"
- load_test_spec(process_model_dir_name, process_group_id=test_process_group_id)
+ process_model_identifier = self.basic_test_setup(client, with_super_admin_user)
+ process_group_id, process_model_id = os.path.split(process_model_identifier)
+
response = client.get(
- f"/v1.0/process-groups/{test_process_group_id}",
+ f"/v1.0/process-groups/{process_group_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
- assert response.json["id"] == test_process_group_id
- assert response.json["process_models"][0]["id"] == process_model_dir_name
+ assert response.json["id"] == process_group_id
+ assert response.json["process_models"][0]["id"] == process_model_identifier
def test_get_process_model_when_found(
self,
@@ -805,18 +893,20 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_get_process_model_when_found."""
- test_process_group_id = "group_id1"
- process_model_dir_name = "hello_world"
- load_test_spec(process_model_dir_name, process_group_id=test_process_group_id)
+ process_model_identifier = self.basic_test_setup(
+ client, with_super_admin_user, bpmn_file_name="random_fact.bpmn"
+ )
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
+
response = client.get(
- f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}",
+ f"/v1.0/process-models/{modified_process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
- assert response.json["id"] == process_model_dir_name
+ assert response.json["id"] == process_model_identifier
assert len(response.json["files"]) == 1
- assert response.json["files"][0]["name"] == "hello_world.bpmn"
+ assert response.json["files"][0]["name"] == "random_fact.bpmn"
def test_get_process_model_when_not_found(
self,
@@ -828,8 +918,10 @@ class TestProcessApi(BaseTest):
"""Test_get_process_model_when_not_found."""
process_model_dir_name = "THIS_NO_EXISTS"
group_id = self.create_process_group(client, with_super_admin_user, "my_group")
+ bad_process_model_id = f"{group_id}/{process_model_dir_name}"
+ modified_bad_process_model_id = bad_process_model_id.replace("/", ":")
response = client.get(
- f"/v1.0/process-models/{group_id}/{process_model_dir_name}",
+ f"/v1.0/process-models/{modified_bad_process_model_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
@@ -844,12 +936,9 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_create."""
- test_process_group_id = "runs_without_input"
- test_process_model_id = "sample"
+ test_process_model_id = "runs_without_input/sample"
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance(
- client, test_process_group_id, test_process_model_id, headers
- )
+ response = self.create_process_instance(client, test_process_model_id, headers)
assert response.json is not None
assert response.json["updated_at_in_seconds"] is not None
assert response.json["status"] == "not_started"
@@ -866,16 +955,24 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_run."""
- process_group_id = "runs_without_input"
- process_model_id = "sample"
+ # process_model_id = "runs_without_input/sample"
+ process_model_identifier = self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id="runs_without_input",
+ process_model_id="sample",
+ bpmn_file_name=None,
+ bpmn_file_location="sample",
+ )
+
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance(
- client, process_group_id, process_model_id, headers
+ client, process_model_identifier, headers
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
@@ -883,7 +980,7 @@ class TestProcessApi(BaseTest):
assert type(response.json["updated_at_in_seconds"]) is int
assert response.json["updated_at_in_seconds"] > 0
assert response.json["status"] == "complete"
- assert response.json["process_model_identifier"] == process_model_id
+ assert response.json["process_model_identifier"] == process_model_identifier
assert (
response.json["data"]["current_user"]["username"]
== with_super_admin_user.username
@@ -901,23 +998,32 @@ class TestProcessApi(BaseTest):
"""Test_process_instance_show."""
process_group_id = "simple_script"
process_model_id = "simple_script"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ )
+ modified_process_model_identifier = process_model_identifier.replace("/", ":")
headers = self.logged_in_headers(with_super_admin_user)
create_response = self.create_process_instance(
- client, process_group_id, process_model_id, headers
+ client, process_model_identifier, headers
)
assert create_response.json is not None
process_instance_id = create_response.json["id"]
client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
show_response = client.get(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}",
+ f"/v1.0/process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert show_response.json is not None
file_system_root = FileSystemService.root_path()
- file_path = f"{file_system_root}/{process_group_id}/{process_model_id}/{process_model_id}.bpmn"
+ file_path = (
+ f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
+ )
with open(file_path) as f_open:
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
@@ -930,12 +1036,20 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_message_start_when_starting_process_instance."""
- # ensure process model is loaded in db
- load_test_spec(
- "message_receiver",
- process_model_source_directory="message_send_one_conversation",
- bpmn_file_name="message_receiver",
+ # ensure process model is loaded
+ process_group_id = "test_message_start"
+ process_model_id = "message_receiver"
+ bpmn_file_name = "message_receiver.bpmn"
+ bpmn_file_location = "message_send_one_conversation"
+ self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
+
message_model_identifier = "message_send"
payload = {
"topica": "the_topica_string",
@@ -971,11 +1085,19 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_message_start_when_providing_message_to_running_process_instance."""
- process_model = load_test_spec(
- "message_sender",
- process_model_source_directory="message_send_one_conversation",
- bpmn_file_name="message_sender",
+ process_group_id = "test_message_start"
+ process_model_id = "message_sender"
+ bpmn_file_name = "message_sender.bpmn"
+ bpmn_file_location = "message_send_one_conversation"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
+
message_model_identifier = "message_response"
payload = {
"the_payload": {
@@ -986,16 +1108,14 @@ class TestProcessApi(BaseTest):
}
response = self.create_process_instance(
client,
- process_model.process_group_id,
- process_model.id,
+ process_model_identifier,
self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_model.process_group_id}/"
- f"{process_model.id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
@@ -1033,31 +1153,36 @@ class TestProcessApi(BaseTest):
) -> None:
"""Test_message_start_when_providing_message_to_running_process_instance."""
# this task will wait on a catch event
- process_model = load_test_spec(
- "message_sender",
- process_model_source_directory="message_send_one_conversation",
- bpmn_file_name="message_sender",
+ process_group_id = "test_message_start"
+ process_model_id = "message_sender"
+ bpmn_file_name = "message_sender.bpmn"
+ bpmn_file_location = "message_send_one_conversation"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
+
response = self.create_process_instance(
client,
- process_model.process_group_id,
- process_model.id,
+ process_model_identifier,
self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_model.process_group_id}/"
- f"{process_model.id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
response = client.post(
- f"/v1.0/process-models/{process_model.process_group_id}/"
- f"{process_model.id}/process-instances/{process_instance_id}/terminate",
+ f"/v1.0/process-instances/{process_instance_id}/terminate",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -1079,22 +1204,32 @@ class TestProcessApi(BaseTest):
"""Test_process_instance_delete."""
process_group_id = "my_process_group"
process_model_id = "user_task"
+ bpmn_file_name = "user_task.bpmn"
+ bpmn_file_location = "user_task"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance(
- client, process_group_id, process_model_id, headers
+ client, process_model_identifier, headers
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
delete_response = client.delete(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}",
+ f"/v1.0/process-instances/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert delete_response.status_code == 200
@@ -1109,21 +1244,30 @@ class TestProcessApi(BaseTest):
"""Test_process_instance_run_user_task."""
process_group_id = "my_process_group"
process_model_id = "dynamic_enum_select_fields"
+ bpmn_file_location = "dynamic_enum_select_fields"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ # bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance(
- client, process_group_id, process_model_id, headers
+ client, process_model_identifier, headers
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
- # assert response.json['next_task'] is not None
+ assert response.json["next_task"] is not None
active_tasks = (
db.session.query(ActiveTaskModel)
@@ -1150,13 +1294,20 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_list_with_default_list."""
- test_process_group_id = "runs_without_input"
- process_model_dir_name = "sample"
- headers = self.logged_in_headers(with_super_admin_user)
- self.create_process_instance(
- client, test_process_group_id, process_model_dir_name, headers
+ process_group_id = "runs_without_input"
+ process_model_id = "sample"
+ bpmn_file_location = "sample"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_location=bpmn_file_location,
)
+ headers = self.logged_in_headers(with_super_admin_user)
+ self.create_process_instance(client, process_model_identifier, headers)
+
response = client.get(
"/v1.0/process-instances",
headers=self.logged_in_headers(with_super_admin_user),
@@ -1171,10 +1322,8 @@ class TestProcessApi(BaseTest):
process_instance_dict = response.json["results"][0]
assert type(process_instance_dict["id"]) is int
assert (
- process_instance_dict["process_model_identifier"] == process_model_dir_name
- )
- assert (
- process_instance_dict["process_group_identifier"] == test_process_group_id
+ process_instance_dict["process_model_identifier"]
+ == process_model_identifier
)
assert type(process_instance_dict["start_in_seconds"]) is int
assert process_instance_dict["start_in_seconds"] > 0
@@ -1189,24 +1338,24 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_list_with_paginated_items."""
- test_process_group_id = "runs_without_input"
- process_model_dir_name = "sample"
+ process_group_id = "runs_without_input"
+ process_model_id = "sample"
+ bpmn_file_name = "sample.bpmn"
+ bpmn_file_location = "sample"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
headers = self.logged_in_headers(with_super_admin_user)
- self.create_process_instance(
- client, test_process_group_id, process_model_dir_name, headers
- )
- self.create_process_instance(
- client, test_process_group_id, process_model_dir_name, headers
- )
- self.create_process_instance(
- client, test_process_group_id, process_model_dir_name, headers
- )
- self.create_process_instance(
- client, test_process_group_id, process_model_dir_name, headers
- )
- self.create_process_instance(
- client, test_process_group_id, process_model_dir_name, headers
- )
+ self.create_process_instance(client, process_model_identifier, headers)
+ self.create_process_instance(client, process_model_identifier, headers)
+ self.create_process_instance(client, process_model_identifier, headers)
+ self.create_process_instance(client, process_model_identifier, headers)
+ self.create_process_instance(client, process_model_identifier, headers)
response = client.get(
"/v1.0/process-instances?per_page=2&page=3",
@@ -1238,9 +1387,18 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_list_filter."""
- test_process_group_id = "runs_without_input"
- test_process_model_id = "sample"
- load_test_spec(test_process_model_id, process_group_id=test_process_group_id)
+ process_group_id = "runs_without_input"
+ process_model_id = "sample"
+ bpmn_file_name = "sample.bpmn"
+ bpmn_file_location = "sample"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
statuses = [status.value for status in ProcessInstanceStatus]
# create 5 instances with different status, and different start_in_seconds/end_in_seconds
@@ -1248,8 +1406,8 @@ class TestProcessApi(BaseTest):
process_instance = ProcessInstanceModel(
status=ProcessInstanceStatus[statuses[i]].value,
process_initiator=with_super_admin_user,
- process_model_identifier=test_process_model_id,
- process_group_identifier=test_process_group_id,
+ process_model_identifier=process_model_identifier,
+ process_group_identifier="test_process_group_id",
updated_at_in_seconds=round(time.time()),
start_in_seconds=(1000 * i) + 1000,
end_in_seconds=(1000 * i) + 2000,
@@ -1260,7 +1418,7 @@ class TestProcessApi(BaseTest):
# Without filtering we should get all 5 instances
response = client.get(
- f"/v1.0/process-instances?process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
+ f"/v1.0/process-instances?process_model_identifier={process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
@@ -1271,7 +1429,7 @@ class TestProcessApi(BaseTest):
# we should get 1 instance each time
for i in range(5):
response = client.get(
- f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
+ f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_model_identifier={process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
@@ -1280,7 +1438,7 @@ class TestProcessApi(BaseTest):
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
response = client.get(
- f"/v1.0/process-instances?process_status=not_started,complete&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
+ f"/v1.0/process-instances?process_status=not_started,complete&process_model_identifier={process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
@@ -1342,23 +1500,29 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_report_list."""
- process_group_identifier = "runs_without_input"
- process_model_identifier = "sample"
- self.logged_in_headers(with_super_admin_user)
- load_test_spec(
- process_model_identifier, process_group_id=process_group_identifier
+ process_group_id = "runs_without_input"
+ process_model_id = "sample"
+ bpmn_file_name = "sample.bpmn"
+ bpmn_file_location = "sample"
+ process_model_identifier = self.basic_test_setup( # noqa: F841
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
+ self.logged_in_headers(with_super_admin_user)
+
report_identifier = "testreport"
report_metadata = {"order_by": ["month"]}
ProcessInstanceReportModel.create_with_attributes(
identifier=report_identifier,
- process_group_identifier=process_group_identifier,
- process_model_identifier=process_model_identifier,
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
- f"/v1.0/process-models/{process_group_identifier}/{process_model_identifier}/process-instances/reports",
+ "/v1.0/process-instances/reports",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -1376,8 +1540,19 @@ class TestProcessApi(BaseTest):
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_process_instance_report_show_with_default_list."""
- test_process_group_id = "runs_without_input"
- process_model_dir_name = "sample"
+ process_group_id = "runs_without_input"
+ process_model_id = "sample"
+ # bpmn_file_name = "sample.bpmn"
+ # bpmn_file_location = "sample"
+ # process_model_identifier = self.basic_test_setup(
+ # client,
+ # with_super_admin_user,
+ # process_group_id=process_group_id,
+ # process_model_id=process_model_id,
+ # bpmn_file_name=bpmn_file_name,
+ # bpmn_file_location=bpmn_file_location
+ # )
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
report_metadata = {
"columns": [
@@ -1400,14 +1575,12 @@ class TestProcessApi(BaseTest):
ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
- process_group_identifier=test_process_group_id,
- process_model_identifier=process_model_dir_name,
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
- f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure",
+ "/v1.0/process-instances/reports/sure",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -1420,10 +1593,8 @@ class TestProcessApi(BaseTest):
process_instance_dict = response.json["results"][0]
assert type(process_instance_dict["id"]) is int
assert (
- process_instance_dict["process_model_identifier"] == process_model_dir_name
- )
- assert (
- process_instance_dict["process_group_identifier"] == test_process_group_id
+ process_instance_dict["process_model_identifier"]
+ == process_model_identifier
)
assert type(process_instance_dict["start_in_seconds"]) is int
assert process_instance_dict["start_in_seconds"] > 0
@@ -1438,9 +1609,6 @@ class TestProcessApi(BaseTest):
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_process_instance_report_show_with_default_list."""
- test_process_group_id = "runs_without_input"
- process_model_dir_name = "sample"
-
report_metadata = {
"filter_by": [
{
@@ -1453,14 +1621,12 @@ class TestProcessApi(BaseTest):
ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
- process_group_identifier=test_process_group_id,
- process_model_identifier=process_model_dir_name,
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
- f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure?grade_level=1",
+ "/v1.0/process-instances/reports/sure?grade_level=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@@ -1476,11 +1642,8 @@ class TestProcessApi(BaseTest):
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_process_instance_report_show_with_default_list."""
- test_process_group_id = "runs_without_input"
- process_model_dir_name = "sample"
-
response = client.get(
- f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure?grade_level=1",
+ "/v1.0/process-instances/reports/sure?grade_level=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 404
@@ -1490,15 +1653,12 @@ class TestProcessApi(BaseTest):
def setup_testing_instance(
self,
client: FlaskClient,
- process_group_id: str,
process_model_id: str,
with_super_admin_user: UserModel,
) -> Any:
"""Setup_testing_instance."""
headers = self.logged_in_headers(with_super_admin_user)
- response = self.create_process_instance(
- client, process_group_id, process_model_id, headers
- )
+ response = self.create_process_instance(client, process_model_id, headers)
process_instance = response.json
assert isinstance(process_instance, dict)
process_instance_id = process_instance["id"]
@@ -1514,9 +1674,19 @@ class TestProcessApi(BaseTest):
"""Test_error_handler."""
process_group_id = "data"
process_model_id = "error"
+ bpmn_file_name = "error.bpmn"
+ bpmn_file_location = "error"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
process_instance_id = self.setup_testing_instance(
- client, process_group_id, process_model_id, with_super_admin_user
+ client, process_model_identifier, with_super_admin_user
)
process = (
@@ -1528,7 +1698,7 @@ class TestProcessApi(BaseTest):
assert process.status == "not_started"
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
@@ -1558,12 +1728,22 @@ class TestProcessApi(BaseTest):
"""Test_error_handler_suspend."""
process_group_id = "data"
process_model_id = "error"
+ bpmn_file_name = "error.bpmn"
+ bpmn_file_location = "error"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
process_instance_id = self.setup_testing_instance(
- client, process_group_id, process_model_id, with_super_admin_user
+ client, process_model_identifier, with_super_admin_user
)
process_model = ProcessModelService().get_process_model(
- process_model_id, process_group_id
+ process_model_identifier
)
ProcessModelService().update_spec(
process_model,
@@ -1579,7 +1759,7 @@ class TestProcessApi(BaseTest):
assert process.status == "not_started"
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
@@ -1602,13 +1782,23 @@ class TestProcessApi(BaseTest):
"""Test_error_handler."""
process_group_id = "data"
process_model_id = "error"
+ bpmn_file_name = "error.bpmn"
+ bpmn_file_location = "error"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
process_instance_id = self.setup_testing_instance(
- client, process_group_id, process_model_id, with_super_admin_user
+ client, process_model_identifier, with_super_admin_user
)
process_model = ProcessModelService().get_process_model(
- process_model_id, process_group_id
+ process_model_identifier
)
ProcessModelService().update_spec(
process_model,
@@ -1619,7 +1809,7 @@ class TestProcessApi(BaseTest):
with mail.record_messages() as outbox:
response = client.post(
- f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 400
@@ -1651,17 +1841,26 @@ class TestProcessApi(BaseTest):
process_model_id = "hello_world"
file_name = "hello_world.svg"
file_data = b"abc123"
+ bpmn_file_name = "hello_world.bpmn"
+ bpmn_file_location = "hello_world"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
result = self.create_spec_file(
client,
- process_group_id=process_group_id,
- process_model_id=process_model_id,
+ process_model_id=process_model_identifier,
file_name=file_name,
file_data=file_data,
user=with_super_admin_user,
)
- assert result["process_group_id"] == process_group_id
- assert result["process_model_id"] == process_model_id
+
+ assert result["process_model_id"] == process_model_identifier
assert result["name"] == file_name
assert bytes(str(result["file_contents"]), "utf-8") == file_data
@@ -1673,11 +1872,23 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_message_instances_by_process_instance_id."""
- load_test_spec(
- "message_receiver",
- process_model_source_directory="message_send_one_conversation",
- bpmn_file_name="message_receiver",
+ process_group_id = "test_message_start"
+ process_model_id = "message_receiver"
+ bpmn_file_name = "message_receiver.bpmn"
+ bpmn_file_location = "message_send_one_conversation"
+ self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
+ # load_test_spec(
+ # "message_receiver",
+ # process_model_source_directory="message_send_one_conversation",
+ # bpmn_file_name="message_receiver",
+ # )
message_model_identifier = "message_send"
payload = {
"topica": "the_topica_string",
@@ -1753,16 +1964,29 @@ class TestProcessApi(BaseTest):
finance_group = GroupModel.query.filter_by(identifier="Finance Team").first()
assert finance_group is not None
- process_model = load_test_spec(
- process_model_id="model_with_lanes",
- bpmn_file_name="lanes.bpmn",
- process_group_id="finance",
+ process_group_id = "finance"
+ process_model_id = "model_with_lanes"
+ bpmn_file_name = "lanes.bpmn"
+ bpmn_file_location = "model_with_lanes"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
+ # process_model = load_test_spec(
+ # process_model_id="model_with_lanes",
+ # bpmn_file_name="lanes.bpmn",
+ # process_group_id="finance",
+ # )
+
response = self.create_process_instance(
client,
- process_model.process_group_id,
- process_model.id,
+ # process_model.process_group_id,
+ process_model_identifier,
headers=self.logged_in_headers(initiator_user),
)
assert response.status_code == 201
@@ -1770,7 +1994,7 @@ class TestProcessApi(BaseTest):
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
- f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/process-instances/{process_instance_id}/run",
+ f"/v1.0/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(initiator_user),
)
assert response.status_code == 200
@@ -1936,3 +2160,127 @@ class TestProcessApi(BaseTest):
# assert "pagingInfo" in rpc_json_data["result"]
#
# print("get_waku_messages")
+
+ def test_process_instance_suspend(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_process_instance_suspend."""
+ bpmn_file_name = "manual_task.bpmn"
+ bpmn_file_location = "manual_task"
+ process_model_identifier = self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_model_id="manual_task",
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
+
+ headers = self.logged_in_headers(with_super_admin_user)
+ response = self.create_process_instance(
+ client, process_model_identifier, headers
+ )
+ assert response.json is not None
+ process_instance_id = response.json["id"]
+
+ client.post(
+ f"/v1.0/process-instances/{process_instance_id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ process_instance = ProcessInstanceService().get_process_instance(
+ process_instance_id
+ )
+ assert process_instance.status == "user_input_required"
+
+ client.post(
+ f"/v1.0/process-instances/{process_instance_id}/suspend",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+ process_instance = ProcessInstanceService().get_process_instance(
+ process_instance_id
+ )
+ assert process_instance.status == "suspended"
+
+ # TODO: Why can I run a suspended process instance?
+ response = client.post(
+ f"/v1.0/process-instances/{process_instance_id}/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ )
+
+ # task = response.json['next_task']
+
+ print("test_process_instance_suspend")
+
+ def test_script_unit_test_run(
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
+ ) -> None:
+ """Test_script_unit_test_run."""
+ process_group_id = "test_group"
+ process_model_id = "simple_script"
+ bpmn_file_name = "simple_script.bpmn"
+ bpmn_file_location = "simple_script"
+ process_model_identifier = self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+
+ bpmn_file_data_bytes = self.get_test_data_file_contents(
+ bpmn_file_name, bpmn_file_location
+ )
+ self.create_spec_file(
+ client=client,
+ process_model_id=process_model_identifier,
+ process_model_location=process_model_identifier,
+ file_name=bpmn_file_name,
+ file_data=bpmn_file_data_bytes,
+ user=with_super_admin_user,
+ )
+
+ # python_script = _get_required_parameter_or_raise("python_script", body)
+ # input_json = _get_required_parameter_or_raise("input_json", body)
+ # expected_output_json = _get_required_parameter_or_raise(
+ # "expected_output_json", body
+ # )
+ python_script = "c = a + b"
+ input_json = {"a": 1, "b": 2}
+ expected_output_json = {"a": 1, "b": 2, "c": 3}
+ # bpmn_task_identifier = "Activity_CalculateNewData"
+
+ data = {
+ "python_script": python_script,
+ "input_json": input_json,
+ "expected_output_json": expected_output_json,
+ }
+
+ response = client.post( # noqa: F841
+ f"/v1.0/process-models/{process_group_id}/{process_model_id}/script-unit-tests/run",
+ headers=self.logged_in_headers(with_super_admin_user),
+ content_type="application/json",
+ data=json.dumps(data),
+ )
+
+ print("test_script_unit_test_run")
diff --git a/tests/spiffworkflow_backend/integration/test_secret_service.py b/tests/spiffworkflow_backend/integration/test_secret_service.py
index 3735ebc56..071ef6ccd 100644
--- a/tests/spiffworkflow_backend/integration/test_secret_service.py
+++ b/tests/spiffworkflow_backend/integration/test_secret_service.py
@@ -42,16 +42,18 @@ class SecretServiceTestHelpers(BaseTest):
self.test_process_group_id,
display_name=self.test_process_group_display_name,
)
+ process_model_identifier = (
+ f"{self.test_process_group_id}/{self.test_process_model_id}"
+ )
self.create_process_model_with_api(
client,
- process_group_id=self.test_process_group_id,
- process_model_id=self.test_process_model_id,
+ process_model_id=process_model_identifier,
process_model_display_name=self.test_process_model_display_name,
process_model_description=self.test_process_model_description,
user=user,
)
process_model_info = ProcessModelService().get_process_model(
- self.test_process_model_id, self.test_process_group_id
+ process_model_identifier
)
return process_model_info
diff --git a/tests/spiffworkflow_backend/scripts/test_get_group_members.py b/tests/spiffworkflow_backend/scripts/test_get_group_members.py
index 34a144db8..8a6046b5b 100644
--- a/tests/spiffworkflow_backend/scripts/test_get_group_members.py
+++ b/tests/spiffworkflow_backend/scripts/test_get_group_members.py
@@ -1,10 +1,12 @@
"""Test_get_localtime."""
from flask.app import Flask
+from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.group import GroupModel
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@@ -17,7 +19,9 @@ class TestGetGroupMembers(BaseTest):
def test_can_get_members_of_a_group(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_members_of_a_group."""
initiator_user = self.find_or_create_user("initiator_user")
@@ -34,9 +38,13 @@ class TestGetGroupMembers(BaseTest):
UserService.add_user_to_group(testuser2, group_a)
UserService.add_user_to_group(testuser3, group_b)
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
process_model = load_test_spec(
- process_model_id="get_group_members",
+ process_model_id="test_group/get_group_members",
bpmn_file_name="get_group_members.bpmn",
+ process_model_source_directory="get_group_members",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
diff --git a/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/tests/spiffworkflow_backend/scripts/test_get_localtime.py
index e3d1e50cc..9e65b9707 100644
--- a/tests/spiffworkflow_backend/scripts/test_get_localtime.py
+++ b/tests/spiffworkflow_backend/scripts/test_get_localtime.py
@@ -49,8 +49,18 @@ class TestGetLocaltime(BaseTest):
) -> None:
"""Test_process_instance_run."""
initiator_user = self.find_or_create_user("initiator_user")
+ self.add_permissions_to_user(
+ initiator_user,
+ target_uri="/v1.0/process-groups",
+ permission_names=["read", "create"],
+ )
+ self.create_process_group(
+ client=client, user=initiator_user, process_group_id="test_group"
+ )
process_model = load_test_spec(
- process_model_id="get_localtime", bpmn_file_name="get_localtime.bpmn"
+ process_model_id="test_group/get_localtime",
+ bpmn_file_name="get_localtime.bpmn",
+ process_model_source_directory="get_localtime",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
diff --git a/tests/spiffworkflow_backend/unit/test_authorization_service.py b/tests/spiffworkflow_backend/unit/test_authorization_service.py
index ff2ac9bcf..5d0a10be1 100644
--- a/tests/spiffworkflow_backend/unit/test_authorization_service.py
+++ b/tests/spiffworkflow_backend/unit/test_authorization_service.py
@@ -1,9 +1,10 @@
"""Test_message_service."""
import pytest
from flask import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
-from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.process_instance_processor import (
@@ -12,6 +13,7 @@ from spiffworkflow_backend.services.process_instance_processor import (
from spiffworkflow_backend.services.process_instance_service import (
ProcessInstanceService,
)
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestAuthorizationService(BaseTest):
@@ -89,7 +91,11 @@ class TestAuthorizationService(BaseTest):
)
def test_user_can_be_added_to_active_task_on_first_login(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_user_can_be_added_to_active_task_on_first_login."""
initiator_user = self.find_or_create_user("initiator_user")
@@ -98,8 +104,17 @@ class TestAuthorizationService(BaseTest):
self.find_or_create_user("testuser1")
AuthorizationService.import_permissions_from_yaml_file()
- process_model = load_test_spec(
- process_model_id="model_with_lanes", bpmn_file_name="lanes.bpmn"
+ process_model_identifier = self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id="test_group",
+ process_model_id="model_with_lanes",
+ bpmn_file_name="lanes.bpmn",
+ bpmn_file_location="model_with_lanes",
+ )
+
+ process_model = ProcessModelService().get_process_model(
+ process_model_id=process_model_identifier
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
diff --git a/tests/spiffworkflow_backend/unit/test_dot_notation.py b/tests/spiffworkflow_backend/unit/test_dot_notation.py
index 4446d4d96..ff37c3b5f 100644
--- a/tests/spiffworkflow_backend/unit/test_dot_notation.py
+++ b/tests/spiffworkflow_backend/unit/test_dot_notation.py
@@ -1,8 +1,9 @@
"""Test_various_bpmn_constructs."""
from flask.app import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
-from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@@ -15,21 +16,36 @@ class TestDotNotation(BaseTest):
"""TestVariousBpmnConstructs."""
def test_dot_notation(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_form_data_conversion_to_dot_dict."""
- process_model = load_test_spec(
- "test_dot_notation",
- bpmn_file_name="diagram.bpmn",
- process_model_source_directory="dot_notation",
+ process_group_id = "dot_notation_group"
+ process_model_id = "test_dot_notation"
+ bpmn_file_name = "diagram.bpmn"
+ bpmn_file_location = "dot_notation"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
)
- current_user = self.find_or_create_user()
- process_instance = self.create_process_instance_from_process_model(
- process_model
+ headers = self.logged_in_headers(with_super_admin_user)
+ response = self.create_process_instance(
+ client, process_model_identifier, headers
)
+ process_instance_id = response.json["id"]
+ process_instance = ProcessInstanceService().get_process_instance(
+ process_instance_id
+ )
+
processor = ProcessInstanceProcessor(process_instance)
-
processor.do_engine_steps(save=True)
user_task = processor.get_ready_user_tasks()[0]
@@ -41,7 +57,7 @@ class TestDotNotation(BaseTest):
"invoice.dueDate": "09/30/2022",
}
ProcessInstanceService.complete_form_task(
- processor, user_task, form_data, current_user
+ processor, user_task, form_data, with_super_admin_user
)
expected = {
diff --git a/tests/spiffworkflow_backend/unit/test_message_instance.py b/tests/spiffworkflow_backend/unit/test_message_instance.py
index 842d5ff4b..39b37f2c2 100644
--- a/tests/spiffworkflow_backend/unit/test_message_instance.py
+++ b/tests/spiffworkflow_backend/unit/test_message_instance.py
@@ -1,24 +1,52 @@
"""Test_message_instance."""
import pytest
from flask import Flask
+from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
-from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_model import MessageModel
+from spiffworkflow_backend.models.user import UserModel
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestMessageInstance(BaseTest):
"""TestMessageInstance."""
+ def setup_message_tests(self, client: FlaskClient, user: UserModel) -> str:
+ """Setup_message_tests."""
+ process_group_id = "test_group"
+ process_model_id = "hello_world"
+ bpmn_file_name = "hello_world.bpmn"
+ bpmn_file_location = "hello_world"
+ process_model_identifier = self.basic_test_setup(
+ client,
+ user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=bpmn_file_location,
+ )
+ return process_model_identifier
+
def test_can_create_message_instance(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_create_message_instance."""
message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier)
- process_model = load_test_spec("hello_world")
+ process_model_identifier = self.setup_message_tests(
+ client, with_super_admin_user
+ )
+
+ process_model = ProcessModelService().get_process_model(
+ process_model_id=process_model_identifier
+ )
process_instance = self.create_process_instance_from_process_model(
process_model, "waiting"
)
@@ -40,12 +68,22 @@ class TestMessageInstance(BaseTest):
assert queued_message_from_query is not None
def test_cannot_set_invalid_status(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_cannot_set_invalid_status."""
message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier)
- process_model = load_test_spec("hello_world")
+ process_model_identifier = self.setup_message_tests(
+ client, with_super_admin_user
+ )
+
+ process_model = ProcessModelService().get_process_model(
+ process_model_id=process_model_identifier
+ )
process_instance = self.create_process_instance_from_process_model(
process_model, "waiting"
)
@@ -76,12 +114,22 @@ class TestMessageInstance(BaseTest):
)
def test_cannot_set_invalid_message_type(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_cannot_set_invalid_message_type."""
message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier)
- process_model = load_test_spec("hello_world")
+ process_model_identifier = self.setup_message_tests(
+ client, with_super_admin_user
+ )
+
+ process_model = ProcessModelService().get_process_model(
+ process_model_id=process_model_identifier
+ )
process_instance = self.create_process_instance_from_process_model(
process_model, "waiting"
)
@@ -113,12 +161,22 @@ class TestMessageInstance(BaseTest):
)
def test_force_failure_cause_if_status_is_failure(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_force_failure_cause_if_status_is_failure."""
message_model_identifier = "message_model_one"
message_model = self.create_message_model(message_model_identifier)
- process_model = load_test_spec("hello_world")
+ process_model_identifier = self.setup_message_tests(
+ client, with_super_admin_user
+ )
+
+ process_model = ProcessModelService().get_process_model(
+ process_model_id=process_model_identifier
+ )
process_instance = self.create_process_instance_from_process_model(
process_model, "waiting"
)
@@ -154,7 +212,8 @@ class TestMessageInstance(BaseTest):
assert queued_message.id is not None
assert queued_message.failure_cause == "THIS TEST FAILURE"
- def create_message_model(self, message_model_identifier: str) -> MessageModel:
+ @staticmethod
+ def create_message_model(message_model_identifier: str) -> MessageModel:
"""Create_message_model."""
message_model = MessageModel(identifier=message_model_identifier)
db.session.add(message_model)
diff --git a/tests/spiffworkflow_backend/unit/test_message_service.py b/tests/spiffworkflow_backend/unit/test_message_service.py
index 38079c960..aa1f28053 100644
--- a/tests/spiffworkflow_backend/unit/test_message_service.py
+++ b/tests/spiffworkflow_backend/unit/test_message_service.py
@@ -1,5 +1,6 @@
"""Test_message_service."""
from flask import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@@ -9,6 +10,7 @@ from spiffworkflow_backend.models.message_correlation_message_instance import (
)
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.message_service import MessageService
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
@@ -22,25 +24,32 @@ class TestMessageService(BaseTest):
"""TestMessageService."""
def test_can_send_message_to_waiting_message(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_send_message_to_waiting_message."""
- process_model_sender = load_test_spec(
- "message_sender",
- process_model_source_directory="message_send_one_conversation",
- bpmn_file_name="message_sender",
+ process_group_id = "test_group"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
)
+
load_test_spec(
- "message_receiver",
+ "test_group/message_receiver",
process_model_source_directory="message_send_one_conversation",
- bpmn_file_name="message_receiver",
+ bpmn_file_name="message_receiver.bpmn",
+ )
+ process_model_sender = load_test_spec(
+ "test_group/message_sender",
+ process_model_source_directory="message_send_one_conversation",
+ bpmn_file_name="message_sender.bpmn",
)
- user = self.find_or_create_user()
process_instance_sender = ProcessInstanceService.create_process_instance(
process_model_sender.id,
- user,
- process_group_identifier=process_model_sender.process_group_id,
+ with_super_admin_user,
)
processor_sender = ProcessInstanceProcessor(process_instance_sender)
@@ -115,21 +124,30 @@ class TestMessageService(BaseTest):
assert process_instance.status == "complete"
def test_can_send_message_to_multiple_process_models(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_send_message_to_multiple_process_models."""
+ process_group_id = "test_group"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
+ )
+
process_model_sender = load_test_spec(
- "message_sender",
+ "test_group/message_sender",
process_model_source_directory="message_send_two_conversations",
bpmn_file_name="message_sender",
)
load_test_spec(
- "message_receiver_one",
+ "test_group/message_receiver_one",
process_model_source_directory="message_send_two_conversations",
bpmn_file_name="message_receiver_one",
)
load_test_spec(
- "message_receiver_two",
+ "test_group/message_receiver_two",
process_model_source_directory="message_send_two_conversations",
bpmn_file_name="message_receiver_two",
)
@@ -139,7 +157,7 @@ class TestMessageService(BaseTest):
process_instance_sender = ProcessInstanceService.create_process_instance(
process_model_sender.id,
user,
- process_group_identifier=process_model_sender.process_group_id,
+ # process_group_identifier=process_model_sender.process_group_id,
)
processor_sender = ProcessInstanceProcessor(process_instance_sender)
@@ -189,24 +207,24 @@ class TestMessageService(BaseTest):
assert len(process_instance_result) == 3
process_instance_receiver_one = ProcessInstanceModel.query.filter_by(
- process_model_identifier="message_receiver_one"
+ process_model_identifier="test_group/message_receiver_one"
).first()
assert process_instance_receiver_one is not None
process_instance_receiver_two = ProcessInstanceModel.query.filter_by(
- process_model_identifier="message_receiver_two"
+ process_model_identifier="test_group/message_receiver_two"
).first()
assert process_instance_receiver_two is not None
# just make sure it's a different process instance
assert (
process_instance_receiver_one.process_model_identifier
- == "message_receiver_one"
+ == "test_group/message_receiver_one"
)
assert process_instance_receiver_one.id != process_instance_sender.id
assert process_instance_receiver_one.status == "complete"
assert (
process_instance_receiver_two.process_model_identifier
- == "message_receiver_two"
+ == "test_group/message_receiver_two"
)
assert process_instance_receiver_two.id != process_instance_sender.id
assert process_instance_receiver_two.status == "complete"
diff --git a/tests/spiffworkflow_backend/unit/test_permissions.py b/tests/spiffworkflow_backend/unit/test_permissions.py
index 39f857e2f..117fd0af5 100644
--- a/tests/spiffworkflow_backend/unit/test_permissions.py
+++ b/tests/spiffworkflow_backend/unit/test_permissions.py
@@ -1,5 +1,6 @@
"""Test Permissions."""
from flask.app import Flask
+from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
@@ -8,6 +9,7 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
from spiffworkflow_backend.models.permission_target import PermissionTargetModel
from spiffworkflow_backend.models.principal import PrincipalModel
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.user_service import UserService
@@ -22,13 +24,21 @@ class TestPermissions(BaseTest):
"""TestPermissions."""
def test_user_can_be_given_permission_to_administer_process_group(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
process_group_id = "group-a"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
+ )
load_test_spec(
- "timers_intermediate_catch_event",
- process_group_id=process_group_id,
+ "group-a/timers_intermediate_catch_event",
+ bpmn_file_name="timers_intermediate_catch_event.bpmn",
+ process_model_source_directory="timers_intermediate_catch_event",
)
dan = self.find_or_create_user()
principal = dan.principal
@@ -55,8 +65,9 @@ class TestPermissions(BaseTest):
process_group_b_id = process_group_ids[1]
for process_group_id in process_group_ids:
load_test_spec(
- "timers_intermediate_catch_event",
- process_group_id=process_group_id,
+ f"{process_group_id}/timers_intermediate_catch_event",
+ bpmn_file_name="timers_intermediate_catch_event",
+ process_model_source_directory="timers_intermediate_catch_event",
)
group_a_admin = self.find_or_create_user()
@@ -86,11 +97,11 @@ class TestPermissions(BaseTest):
"""Test_user_can_be_granted_access_through_a_group."""
process_group_ids = ["group-a", "group-b"]
process_group_a_id = process_group_ids[0]
- process_group_ids[1]
for process_group_id in process_group_ids:
load_test_spec(
- "timers_intermediate_catch_event",
- process_group_id=process_group_id,
+ f"{process_group_id}/timers_intermediate_catch_event",
+ bpmn_file_name="timers_intermediate_catch_event.bpmn",
+ process_model_source_directory="timers_intermediate_catch_event",
)
user = self.find_or_create_user()
group = GroupModel(identifier="groupA")
@@ -127,8 +138,9 @@ class TestPermissions(BaseTest):
process_group_b_id = process_group_ids[1]
for process_group_id in process_group_ids:
load_test_spec(
- "timers_intermediate_catch_event",
- process_group_id=process_group_id,
+ f"{process_group_id}/timers_intermediate_catch_event",
+ bpmn_file_name="timers_intermediate_catch_event.bpmn",
+ process_model_source_directory="timers_intermediate_catch_event",
)
group_a_admin = self.find_or_create_user()
diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
index 543b99c02..ad7aefe34 100644
--- a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
+++ b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py
@@ -2,11 +2,13 @@
import pytest
from flask import g
from flask.app import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.authorization_service import (
UserDoesNotHaveAccessToTaskError,
@@ -50,9 +52,14 @@ class TestProcessInstanceProcessor(BaseTest):
def test_sets_permission_correctly_on_active_task(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_sets_permission_correctly_on_active_task."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
initiator_user = self.find_or_create_user("initiator_user")
finance_user = self.find_or_create_user("testuser2")
assert initiator_user.principal is not None
@@ -63,7 +70,9 @@ class TestProcessInstanceProcessor(BaseTest):
assert finance_group is not None
process_model = load_test_spec(
- process_model_id="model_with_lanes", bpmn_file_name="lanes.bpmn"
+ process_model_id="test_group/model_with_lanes",
+ bpmn_file_name="lanes.bpmn",
+ process_model_source_directory="model_with_lanes",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
@@ -123,9 +132,14 @@ class TestProcessInstanceProcessor(BaseTest):
def test_sets_permission_correctly_on_active_task_when_using_dict(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_sets_permission_correctly_on_active_task_when_using_dict."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
initiator_user = self.find_or_create_user("initiator_user")
finance_user_three = self.find_or_create_user("testuser3")
finance_user_four = self.find_or_create_user("testuser4")
@@ -138,8 +152,9 @@ class TestProcessInstanceProcessor(BaseTest):
assert finance_group is not None
process_model = load_test_spec(
- process_model_id="model_with_lanes",
+ process_model_id="test_group/model_with_lanes",
bpmn_file_name="lanes_with_owner_dict.bpmn",
+ process_model_source_directory="model_with_lanes",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=initiator_user
diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_report.py b/tests/spiffworkflow_backend/unit/test_process_instance_report.py
index acfac1380..482395076 100644
--- a/tests/spiffworkflow_backend/unit/test_process_instance_report.py
+++ b/tests/spiffworkflow_backend/unit/test_process_instance_report.py
@@ -128,8 +128,6 @@ def do_report_with_metadata_and_instances(
"""Do_report_with_metadata_and_instances."""
process_instance_report = ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
- process_group_identifier=process_instances[0].process_group_identifier,
- process_model_identifier=process_instances[0].process_model_identifier,
report_metadata=report_metadata,
user=BaseTest.find_or_create_user(),
)
diff --git a/tests/spiffworkflow_backend/unit/test_process_model.py b/tests/spiffworkflow_backend/unit/test_process_model.py
index 479db44a5..5b5b9f256 100644
--- a/tests/spiffworkflow_backend/unit/test_process_model.py
+++ b/tests/spiffworkflow_backend/unit/test_process_model.py
@@ -1,11 +1,13 @@
"""Process Model."""
from flask.app import Flask
+from flask.testing import FlaskClient
from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
from spiffworkflow_backend.models.process_model import ProcessModelInfo
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@@ -22,11 +24,19 @@ class TestProcessModel(BaseTest):
assert process_model_one.files == []
def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
process_model = load_test_spec(
- "call_activity_test",
+ "test_group/call_activity_test",
+ # bpmn_file_name="call_activity_test.bpmn",
process_model_source_directory="call_activity_same_directory",
)
@@ -38,11 +48,18 @@ class TestProcessModel(BaseTest):
assert process_instance.status == "complete"
def test_can_run_process_model_with_call_activities_when_not_in_same_directory(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
process_model = load_test_spec(
- "call_activity_nested",
+ "test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
@@ -54,7 +71,7 @@ class TestProcessModel(BaseTest):
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
- bpmn_file_name,
+ f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
@@ -66,11 +83,18 @@ class TestProcessModel(BaseTest):
assert process_instance.status == "complete"
def test_can_run_process_model_with_call_activities_when_process_identifier_is_not_in_database(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
process_model = load_test_spec(
- "call_activity_nested",
+ "test_group/call_activity_nested",
process_model_source_directory="call_activity_nested",
bpmn_file_name="call_activity_nested",
)
@@ -82,7 +106,7 @@ class TestProcessModel(BaseTest):
]
for bpmn_file_name in bpmn_file_names:
load_test_spec(
- bpmn_file_name,
+ f"test_group/{bpmn_file_name}",
process_model_source_directory="call_activity_nested",
bpmn_file_name=bpmn_file_name,
)
@@ -93,6 +117,7 @@ class TestProcessModel(BaseTest):
# delete all of the id lookup items to force to processor to find the correct
# process model when running the process
db.session.query(BpmnProcessIdLookup).delete()
+ db.session.commit()
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert process_instance.status == "complete"
diff --git a/tests/spiffworkflow_backend/unit/test_process_model_service.py b/tests/spiffworkflow_backend/unit/test_process_model_service.py
index 535dc03d3..438ef89d9 100644
--- a/tests/spiffworkflow_backend/unit/test_process_model_service.py
+++ b/tests/spiffworkflow_backend/unit/test_process_model_service.py
@@ -1,8 +1,10 @@
"""Test_process_model_service."""
from flask import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
@@ -10,11 +12,22 @@ class TestProcessModelService(BaseTest):
"""TestProcessModelService."""
def test_can_update_specified_attributes(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_update_specified_attributes."""
- process_model = load_test_spec("hello_world")
- assert process_model.display_name == "hello_world"
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
+ process_model = load_test_spec(
+ "test_group/hello_world",
+ bpmn_file_name="hello_world.bpmn",
+ process_model_source_directory="hello_world",
+ )
+ assert process_model.display_name == "test_group/hello_world"
primary_process_id = process_model.primary_process_id
assert primary_process_id == "Process_HelloWorld"
diff --git a/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py b/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py
index 9b6f1bb36..d31ea424f 100644
--- a/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py
+++ b/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py
@@ -1,10 +1,12 @@
"""Test_various_bpmn_constructs."""
import pytest
from flask.app import Flask
+from flask.testing import FlaskClient
from flask_bpmn.api.api_error import ApiError
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@@ -14,11 +16,18 @@ class TestOpenFile(BaseTest):
"""TestVariousBpmnConstructs."""
def test_dot_notation(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_form_data_conversion_to_dot_dict."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
process_model = load_test_spec(
- "dangerous",
+ "test_group/dangerous",
bpmn_file_name="read_etc_passwd.bpmn",
process_model_source_directory="dangerous-scripts",
)
@@ -38,11 +47,18 @@ class TestImportModule(BaseTest):
"""TestVariousBpmnConstructs."""
def test_dot_notation(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_form_data_conversion_to_dot_dict."""
+ self.create_process_group(
+ client, with_super_admin_user, "test_group", "test_group"
+ )
process_model = load_test_spec(
- "dangerous",
+ "test_group/dangerous",
bpmn_file_name="read_env.bpmn",
process_model_source_directory="dangerous-scripts",
)
diff --git a/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py b/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py
index 69c548516..9ece043a5 100644
--- a/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py
+++ b/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py
@@ -1,8 +1,10 @@
"""Test Permissions."""
from flask.app import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
@@ -16,21 +18,31 @@ class TestScriptUnitTestRunner(BaseTest):
def test_takes_data_and_returns_expected_result(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_takes_data_and_returns_expected_result."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "test_logging_spiff_logger"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
+ )
process_model_id = "simple_script"
- load_test_spec(process_model_id, process_group_id=process_group_id)
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ load_test_spec(
+ process_model_identifier,
+ bpmn_file_name=process_model_id,
+ process_model_source_directory=process_model_id,
+ )
bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
- process_model_id, process_group_id
+ process_model_identifier
)
)
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
- "Activity_RunScript", bpmn_process_instance
+ "Activity_CalculateNewData", bpmn_process_instance
)
assert task is not None
@@ -48,21 +60,32 @@ class TestScriptUnitTestRunner(BaseTest):
def test_fails_when_expected_output_does_not_match_actual_output(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_fails_when_expected_output_does_not_match_actual_output."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "test_logging_spiff_logger"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
+ )
+
process_model_id = "simple_script"
- load_test_spec(process_model_id, process_group_id=process_group_id)
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ load_test_spec(
+ process_model_identifier,
+ bpmn_file_name=process_model_id,
+ process_model_source_directory=process_model_id,
+ )
bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
- process_model_id, process_group_id
+ process_model_identifier
)
)
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
- "Activity_RunScript", bpmn_process_instance
+ "Activity_CalculateNewData", bpmn_process_instance
)
assert task is not None
@@ -80,17 +103,28 @@ class TestScriptUnitTestRunner(BaseTest):
def test_script_with_unit_tests_when_hey_is_passed_in(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_script_with_unit_tests_when_hey_is_passed_in."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "script_with_unit_tests"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
+ )
+
process_model_id = "script_with_unit_tests"
- load_test_spec(process_model_id, process_group_id=process_group_id)
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+ load_test_spec(
+ process_model_identifier,
+ bpmn_file_name=process_model_id,
+ process_model_source_directory=process_model_id,
+ )
bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
- process_model_id, process_group_id
+ process_model_identifier
)
)
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
@@ -110,17 +144,29 @@ class TestScriptUnitTestRunner(BaseTest):
def test_script_with_unit_tests_when_hey_is_not_passed_in(
self,
app: Flask,
+ client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_script_with_unit_tests_when_hey_is_not_passed_in."""
app.config["THREAD_LOCAL_DATA"].process_instance_id = None
process_group_id = "script_with_unit_tests"
+ self.create_process_group(
+ client, with_super_admin_user, process_group_id, process_group_id
+ )
+
process_model_id = "script_with_unit_tests"
- load_test_spec(process_model_id, process_group_id=process_group_id)
+ process_model_identifier = f"{process_group_id}/{process_model_id}"
+
+ load_test_spec(
+ process_model_identifier,
+ bpmn_file_name=process_model_id,
+ process_model_source_directory=process_model_id,
+ )
bpmn_process_instance = (
ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model(
- process_model_id, process_group_id
+ process_model_identifier
)
)
task = ProcessInstanceProcessor.get_task_by_bpmn_identifier(
diff --git a/tests/spiffworkflow_backend/unit/test_spec_file_service.py b/tests/spiffworkflow_backend/unit/test_spec_file_service.py
index 85adb298d..d74acb47b 100644
--- a/tests/spiffworkflow_backend/unit/test_spec_file_service.py
+++ b/tests/spiffworkflow_backend/unit/test_spec_file_service.py
@@ -3,6 +3,7 @@ import os
import pytest
from flask import Flask
+from flask.testing import FlaskClient
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
@@ -10,6 +11,7 @@ from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.spec_file_service import SpecFileService
@@ -17,18 +19,29 @@ from spiffworkflow_backend.services.spec_file_service import SpecFileService
class TestSpecFileService(BaseTest):
"""TestSpecFileService."""
+ process_group_id = "test_process_group_id"
+ process_model_id = "call_activity_nested"
+ bpmn_file_name = "call_activity_nested.bpmn"
+
call_activity_nested_relative_file_path = os.path.join(
- "test_process_group_id", "call_activity_nested", "call_activity_nested.bpmn"
+ process_group_id, process_model_id, bpmn_file_name
)
def test_can_store_process_ids_for_lookup(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_can_store_process_ids_for_lookup."""
- load_test_spec(
- "call_activity_nested",
- process_model_source_directory="call_activity_nested",
- bpmn_file_name="call_activity_nested",
+ self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=self.process_group_id,
+ process_model_id=self.process_model_id,
+ bpmn_file_name=self.bpmn_file_name,
+ bpmn_file_location="call_activity_nested",
)
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all()
assert len(bpmn_process_id_lookups) == 1
@@ -39,14 +52,21 @@ class TestSpecFileService(BaseTest):
)
def test_fails_to_save_duplicate_process_id(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_fails_to_save_duplicate_process_id."""
bpmn_process_identifier = "Level1"
- load_test_spec(
- "call_activity_nested",
- process_model_source_directory="call_activity_nested",
- bpmn_file_name="call_activity_nested",
+ self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=self.process_group_id,
+ process_model_id=self.process_model_id,
+ bpmn_file_name=self.bpmn_file_name,
+ bpmn_file_location=self.process_model_id,
)
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all()
assert len(bpmn_process_id_lookups) == 1
@@ -69,25 +89,30 @@ class TestSpecFileService(BaseTest):
)
def test_updates_relative_file_path_when_appropriate(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_updates_relative_file_path_when_appropriate."""
bpmn_process_identifier = "Level1"
- bpmn_file_relative_path = os.path.join(
- "test_process_group_id", "call_activity_nested", "new_bpmn_file.bpmn"
- )
process_id_lookup = BpmnProcessIdLookup(
bpmn_process_identifier=bpmn_process_identifier,
- bpmn_file_relative_path=bpmn_file_relative_path,
+ bpmn_file_relative_path=self.call_activity_nested_relative_file_path,
)
db.session.add(process_id_lookup)
db.session.commit()
- load_test_spec(
- "call_activity_nested",
- process_model_source_directory="call_activity_nested",
- bpmn_file_name="call_activity_nested",
+ self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=self.process_group_id,
+ process_model_id=self.process_model_id,
+ bpmn_file_name=self.bpmn_file_name,
+ bpmn_file_location=self.process_model_id,
)
+
bpmn_process_id_lookups = BpmnProcessIdLookup.query.all()
assert len(bpmn_process_id_lookups) == 1
assert (
@@ -100,7 +125,11 @@ class TestSpecFileService(BaseTest):
)
def test_load_reference_information(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_load_reference_information.
@@ -113,12 +142,22 @@ class TestSpecFileService(BaseTest):
a DMN file can (theoretically) contain many decisions. So this
is an array.
"""
- load_test_spec(
- "call_activity_nested",
- process_model_source_directory="call_activity_nested",
+ process_group_id = "test_group"
+ process_model_id = "call_activity_nested"
+ process_model_identifier = self.basic_test_setup(
+ client=client,
+ user=with_super_admin_user,
+ process_group_id=process_group_id,
+ process_model_id=process_model_id,
+ # bpmn_file_name=bpmn_file_name,
+ bpmn_file_location=process_model_id,
)
+ # load_test_spec(
+ # ,
+ # process_model_source_directory="call_activity_nested",
+ # )
process_model_info = ProcessModelService().get_process_model(
- "call_activity_nested"
+ process_model_identifier
)
files = SpecFileService.get_files(process_model_info)
diff --git a/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py b/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py
index c97803d83..c655d3ffd 100644
--- a/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py
+++ b/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py
@@ -1,23 +1,35 @@
"""Test_various_bpmn_constructs."""
from flask.app import Flask
+from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
-from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
+from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
+from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestVariousBpmnConstructs(BaseTest):
"""TestVariousBpmnConstructs."""
def test_running_process_with_timer_intermediate_catch_event(
- self, app: Flask, with_db_and_bpmn_file_cleanup: None
+ self,
+ app: Flask,
+ client: FlaskClient,
+ with_db_and_bpmn_file_cleanup: None,
+ with_super_admin_user: UserModel,
) -> None:
"""Test_running_process_with_timer_intermediate_catch_event."""
- process_model = load_test_spec(
- "timers_intermediate_catch_event",
- process_model_source_directory="timer_intermediate_catch_event",
+ process_model_identifier = self.basic_test_setup(
+ client,
+ with_super_admin_user,
+ "test_group",
+ "timer_intermediate_catch_event",
+ )
+
+ process_model = ProcessModelService().get_process_model(
+ process_model_id=process_model_identifier
)
process_instance = self.create_process_instance_from_process_model(