mirror of
https://github.com/sartography/spiffworkflow-backend.git
synced 2025-02-24 13:28:31 +00:00
Merge pull request #75 from sartography/feature/basic_message_queue_support
Feature/basic message queue support
This commit is contained in:
commit
df7bc9de7f
@ -39,6 +39,7 @@ if [[ "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" == "true" ]]; then
|
||||
fi
|
||||
|
||||
export IS_GUNICORN="true"
|
||||
export PROCESS_WAITING_MESSAGES="true"
|
||||
|
||||
# THIS MUST BE THE LAST COMMAND!
|
||||
exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$port" --workers="$workers" --timeout 90 --capture-output --access-logfile '-' --log-level debug wsgi:app
|
||||
|
@ -22,5 +22,5 @@ export APPLICATION_ROOT="/"
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then
|
||||
./bin/boot_server_in_docker
|
||||
else
|
||||
FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
|
||||
PROCESS_WAITING_MESSAGES="true" FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000
|
||||
fi
|
||||
|
24
bin/run_sql
Executable file
24
bin/run_sql
Executable file
@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
|
||||
tables=(
|
||||
message_model
|
||||
message_instance
|
||||
message_correlation
|
||||
message_correlation_property
|
||||
message_correlation_message_instance
|
||||
)
|
||||
|
||||
for i in "${tables[@]}" ;do
|
||||
echo "$i"
|
||||
mysql -uroot -e "select * from spiffworkflow_backend_testing.${i}"
|
||||
done
|
||||
|
||||
mysql -uroot -e "select id,process_model_identifier,process_group_identifier,status from spiffworkflow_backend_testing.process_instance"
|
@ -62,6 +62,7 @@ services:
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development}
|
||||
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false}
|
||||
- PROCESS_WAITING_MESSAGES=true
|
||||
ports:
|
||||
- "7000:7000"
|
||||
network_mode: host
|
||||
|
@ -1,8 +1,8 @@
|
||||
"""empty message
|
||||
|
||||
Revision ID: b4f678040235
|
||||
Revision ID: 0c8e71337903
|
||||
Revises:
|
||||
Create Date: 2022-07-25 09:46:39.406847
|
||||
Create Date: 2022-08-23 12:26:12.767126
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b4f678040235'
|
||||
revision = '0c8e71337903'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
@ -31,19 +31,51 @@ def upgrade():
|
||||
sa.Column('new_name_two', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('message_model',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('identifier', sa.String(length=50), nullable=True),
|
||||
sa.Column('name', sa.String(length=50), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_message_model_identifier'), 'message_model', ['identifier'], unique=True)
|
||||
op.create_index(op.f('ix_message_model_name'), 'message_model', ['name'], unique=True)
|
||||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=50), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('uid', sa.String(length=50), nullable=True),
|
||||
sa.Column('service', sa.String(length=50), nullable=False),
|
||||
sa.Column('service_id', sa.String(length=100), nullable=False),
|
||||
sa.Column('name', sa.String(length=50), nullable=True),
|
||||
sa.Column('email', sa.String(length=50), nullable=True),
|
||||
sa.Column('service_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('service', 'service_id', name='service_key'),
|
||||
sa.UniqueConstraint('uid'),
|
||||
sa.UniqueConstraint('username'),
|
||||
sa.UniqueConstraint('service', 'service_id')
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_table('message_correlation_property',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('identifier', sa.String(length=50), nullable=True),
|
||||
sa.Column('message_model_id', sa.Integer(), nullable=False),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('identifier', 'message_model_id', name='message_correlation_property_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_message_correlation_property_identifier'), 'message_correlation_property', ['identifier'], unique=False)
|
||||
op.create_table('message_triggerable_process_model',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_model_id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_model_identifier', sa.String(length=50), nullable=False),
|
||||
sa.Column('process_group_identifier', sa.String(length=50), nullable=False),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('message_model_id')
|
||||
)
|
||||
op.create_index(op.f('ix_message_triggerable_process_model_process_group_identifier'), 'message_triggerable_process_model', ['process_group_identifier'], unique=False)
|
||||
op.create_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), 'message_triggerable_process_model', ['process_model_identifier'], unique=False)
|
||||
op.create_table('principal',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
@ -129,11 +161,40 @@ def upgrade():
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('user_uid', sa.String(length=50), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('archived', sa.Boolean(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('message_correlation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_correlation_property_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('value', sa.String(length=255), nullable=False),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['message_correlation_property_id'], ['message_correlation_property.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('process_instance_id', 'message_correlation_property_id', 'name', name='message_instance_id_name_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_message_correlation_message_correlation_property_id'), 'message_correlation', ['message_correlation_property_id'], unique=False)
|
||||
op.create_index(op.f('ix_message_correlation_name'), 'message_correlation', ['name'], unique=False)
|
||||
op.create_index(op.f('ix_message_correlation_process_instance_id'), 'message_correlation', ['process_instance_id'], unique=False)
|
||||
op.create_index(op.f('ix_message_correlation_value'), 'message_correlation', ['value'], unique=False)
|
||||
op.create_table('message_instance',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_model_id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_type', sa.String(length=20), nullable=False),
|
||||
sa.Column('payload', sa.JSON(), nullable=True),
|
||||
sa.Column('status', sa.String(length=20), nullable=False),
|
||||
sa.Column('failure_cause', sa.Text(), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('task_event',
|
||||
@ -171,13 +232,33 @@ def upgrade():
|
||||
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('message_correlation_message_instance',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_correlation_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['message_correlation_id'], ['message_correlation.id'], ),
|
||||
sa.ForeignKeyConstraint(['message_instance_id'], ['message_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('message_instance_id', 'message_correlation_id', name='message_correlation_message_instance_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), 'message_correlation_message_instance', ['message_correlation_id'], unique=False)
|
||||
op.create_index(op.f('ix_message_correlation_message_instance_message_instance_id'), 'message_correlation_message_instance', ['message_instance_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
|
||||
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
|
||||
op.drop_table('message_correlation_message_instance')
|
||||
op.drop_table('data_store')
|
||||
op.drop_table('task_event')
|
||||
op.drop_table('message_instance')
|
||||
op.drop_index(op.f('ix_message_correlation_value'), table_name='message_correlation')
|
||||
op.drop_index(op.f('ix_message_correlation_process_instance_id'), table_name='message_correlation')
|
||||
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
|
||||
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
|
||||
op.drop_table('message_correlation')
|
||||
op.drop_table('file')
|
||||
op.drop_table('active_task')
|
||||
op.drop_table('user_group_assignment')
|
||||
@ -189,7 +270,15 @@ def downgrade():
|
||||
op.drop_index(op.f('ix_process_instance_process_group_identifier'), table_name='process_instance')
|
||||
op.drop_table('process_instance')
|
||||
op.drop_table('principal')
|
||||
op.drop_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), table_name='message_triggerable_process_model')
|
||||
op.drop_index(op.f('ix_message_triggerable_process_model_process_group_identifier'), table_name='message_triggerable_process_model')
|
||||
op.drop_table('message_triggerable_process_model')
|
||||
op.drop_index(op.f('ix_message_correlation_property_identifier'), table_name='message_correlation_property')
|
||||
op.drop_table('message_correlation_property')
|
||||
op.drop_table('user')
|
||||
op.drop_index(op.f('ix_message_model_name'), table_name='message_model')
|
||||
op.drop_index(op.f('ix_message_model_identifier'), table_name='message_model')
|
||||
op.drop_table('message_model')
|
||||
op.drop_table('group')
|
||||
op.drop_table('admin_session')
|
||||
# ### end Alembic commands ###
|
183
poetry.lock
generated
183
poetry.lock
generated
@ -43,6 +43,32 @@ python-versions = "*"
|
||||
[package.extras]
|
||||
dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "apscheduler"
|
||||
version = "3.9.1"
|
||||
description = "In-process task scheduler with Cron-like capabilities"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
|
||||
[package.dependencies]
|
||||
pytz = "*"
|
||||
six = ">=1.4.0"
|
||||
tzlocal = ">=2.0,<3.0.0 || >=4.0.0"
|
||||
|
||||
[package.extras]
|
||||
asyncio = ["trollius"]
|
||||
doc = ["sphinx", "sphinx-rtd-theme"]
|
||||
gevent = ["gevent"]
|
||||
mongodb = ["pymongo (>=3.0)"]
|
||||
redis = ["redis (>=3.0)"]
|
||||
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
||||
sqlalchemy = ["sqlalchemy (>=0.8)"]
|
||||
testing = ["pytest", "pytest-cov", "pytest-tornado5", "mock", "pytest-asyncio (<0.6)", "pytest-asyncio"]
|
||||
tornado = ["tornado (>=4.3)"]
|
||||
twisted = ["twisted"]
|
||||
zookeeper = ["kazoo"]
|
||||
|
||||
[[package]]
|
||||
name = "astroid"
|
||||
version = "2.11.6"
|
||||
@ -383,7 +409,7 @@ tests = ["decorator (>=5,<6)", "pytest (>=6,<7)", "pytest-cov (>=2,<3)", "testfi
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "6.4.2"
|
||||
version = "6.4.4"
|
||||
description = "Code coverage measurement for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -618,18 +644,23 @@ develop = false
|
||||
[package.dependencies]
|
||||
click = "^8.0.1"
|
||||
flask = "*"
|
||||
flask-admin = "*"
|
||||
flask-bcrypt = "*"
|
||||
flask-cors = "*"
|
||||
flask-mail = "*"
|
||||
flask-marshmallow = "*"
|
||||
flask-migrate = "*"
|
||||
sentry-sdk = "1.7.1"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
flask-restful = "*"
|
||||
sentry-sdk = "1.9.0"
|
||||
sphinx-autoapi = "^1.9.0"
|
||||
spiffworkflow = "*"
|
||||
werkzeug = "*"
|
||||
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "feature/with-spiff-properties"
|
||||
resolved_reference = "c7497aabb039420d9e7c68a50d7a4b3e74100e81"
|
||||
reference = "main"
|
||||
resolved_reference = "bf533cbea4b5efa8a520718937d8d1f29b7d09d9"
|
||||
|
||||
[[package]]
|
||||
name = "flask-cors"
|
||||
@ -1556,7 +1587,7 @@ requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "1.7.1"
|
||||
version = "1.9.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -1574,6 +1605,7 @@ celery = ["celery (>=3)"]
|
||||
chalice = ["chalice (>=1.16.0)"]
|
||||
django = ["django (>=1.8)"]
|
||||
falcon = ["falcon (>=1.4)"]
|
||||
fastapi = ["fastapi (>=0.79.0)"]
|
||||
flask = ["flask (>=0.11)", "blinker (>=1.1)"]
|
||||
httpx = ["httpx (>=0.16.0)"]
|
||||
pure_eval = ["pure-eval", "executing", "asttokens"]
|
||||
@ -1582,6 +1614,7 @@ quart = ["quart (>=0.16.1)", "blinker (>=1.1)"]
|
||||
rq = ["rq (>=0.6)"]
|
||||
sanic = ["sanic (>=0.8)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||
starlette = ["starlette (>=0.19.1)"]
|
||||
tornado = ["tornado (>=5)"]
|
||||
|
||||
[[package]]
|
||||
@ -1650,11 +1683,11 @@ test = ["pytest (>=4.6)", "html5lib", "cython", "typed-ast"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-autoapi"
|
||||
version = "1.8.4"
|
||||
version = "1.9.0"
|
||||
description = "Sphinx API documentation generator"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=2.7"
|
||||
@ -1801,8 +1834,8 @@ pytz = "*"
|
||||
[package.source]
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "feature/spiff_properties"
|
||||
resolved_reference = "e108aa12da008bdd8d0319e182d28fbd3afb4c67"
|
||||
reference = "feature/message-correlations"
|
||||
resolved_reference = "d93093386730742fbc38fd2dab923bf0e44e0176"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
@ -2075,7 +2108,7 @@ email = ["email-validator"]
|
||||
|
||||
[[package]]
|
||||
name = "xdoctest"
|
||||
version = "1.0.1"
|
||||
version = "1.0.2"
|
||||
description = "A rewrite of the builtin doctest module"
|
||||
category = "dev"
|
||||
optional = false
|
||||
@ -2090,12 +2123,12 @@ six = "*"
|
||||
tests = ["pytest-cov", "pytest", "typing", "pytest", "pytest-cov", "pytest", "pytest-cov", "pytest", "pytest", "pytest-cov", "pytest", "scikit-build", "pybind11", "ninja", "codecov", "cmake"]
|
||||
tests-strict = ["pytest-cov (==3.0.0)", "pytest (==6.2.5)", "typing (==3.7.4)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest-cov (==2.9.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "scikit-build (==0.11.1)", "pybind11 (==2.7.1)", "ninja (==1.10.2)", "codecov (==2.0.15)", "cmake (==3.21.2)"]
|
||||
runtime-strict = ["six (==1.11.0)"]
|
||||
optional = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "jupyter-core", "jinja2", "jedi", "attrs", "pygments", "ipython-genutils", "debugpy", "debugpy", "debugpy", "ipykernel", "debugpy", "ipython", "jupyter-client", "pygments", "tomli", "debugpy", "colorama"]
|
||||
optional-strict = ["ipykernel (==6.0.0)", "IPython (==7.23.1)", "jupyter-client (==7.0.0)", "nbconvert (==6.0.0)", "jupyter-core (==4.7.0)", "jinja2 (==3.0.0)", "jedi (==0.16)", "attrs (==19.2.0)", "Pygments (==2.4.1)", "ipython-genutils (==0.2.0)", "debugpy (==1.6.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "ipykernel (==5.2.0)", "debugpy (==1.0.0)", "IPython (==7.10.0)", "jupyter-client (==6.1.5)", "Pygments (==2.0.0)", "tomli (==0.2.0)", "debugpy (==1.3.0)", "colorama (==0.4.1)"]
|
||||
jupyter = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "jupyter-core", "jinja2", "jedi", "attrs", "ipython-genutils", "debugpy", "debugpy", "debugpy", "ipykernel", "debugpy", "ipython", "jupyter-client", "debugpy"]
|
||||
optional = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "jinja2", "jupyter-core", "jedi", "attrs", "pygments", "ipython-genutils", "debugpy", "debugpy", "debugpy", "ipykernel", "debugpy", "ipython", "jupyter-client", "pygments", "tomli", "debugpy", "colorama"]
|
||||
optional-strict = ["ipykernel (==6.0.0)", "IPython (==7.23.1)", "jupyter-client (==7.0.0)", "nbconvert (==6.0.0)", "jinja2 (==3.0.0)", "jupyter-core (==4.7.0)", "jedi (==0.16)", "attrs (==19.2.0)", "Pygments (==2.4.1)", "ipython-genutils (==0.2.0)", "debugpy (==1.6.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "ipykernel (==5.2.0)", "debugpy (==1.0.0)", "IPython (==7.10.0)", "jupyter-client (==6.1.5)", "Pygments (==2.0.0)", "tomli (==0.2.0)", "debugpy (==1.3.0)", "colorama (==0.4.1)"]
|
||||
jupyter = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "jinja2", "jupyter-core", "jedi", "attrs", "ipython-genutils", "debugpy", "debugpy", "debugpy", "ipykernel", "debugpy", "ipython", "jupyter-client", "debugpy"]
|
||||
colors = ["pygments", "pygments", "colorama"]
|
||||
all = ["ipykernel", "ipython", "jupyter-client", "pytest-cov", "nbconvert", "jupyter-core", "jinja2", "jedi", "attrs", "pygments", "pytest", "ipython-genutils", "debugpy", "typing", "debugpy", "debugpy", "pytest", "ipykernel", "debugpy", "ipython", "jupyter-client", "pytest-cov", "pytest", "pytest-cov", "pytest", "pygments", "pytest", "debugpy", "pytest-cov", "pytest", "colorama", "six", "scikit-build", "pybind11", "ninja", "codecov", "cmake"]
|
||||
all-strict = ["ipykernel (==6.0.0)", "IPython (==7.23.1)", "jupyter-client (==7.0.0)", "pytest-cov (==3.0.0)", "nbconvert (==6.0.0)", "jupyter-core (==4.7.0)", "jinja2 (==3.0.0)", "jedi (==0.16)", "attrs (==19.2.0)", "Pygments (==2.4.1)", "pytest (==6.2.5)", "ipython-genutils (==0.2.0)", "debugpy (==1.6.0)", "typing (==3.7.4)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "pytest (==4.6.0)", "ipykernel (==5.2.0)", "debugpy (==1.0.0)", "IPython (==7.10.0)", "jupyter-client (==6.1.5)", "pytest (==4.6.0)", "pytest-cov (==2.9.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "Pygments (==2.0.0)", "pytest (==4.6.0)", "debugpy (==1.3.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "colorama (==0.4.1)", "six (==1.11.0)", "scikit-build (==0.11.1)", "pybind11 (==2.7.1)", "ninja (==1.10.2)", "codecov (==2.0.15)", "cmake (==3.21.2)"]
|
||||
all = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "pytest-cov", "jinja2", "jupyter-core", "jedi", "attrs", "pygments", "pytest", "ipython-genutils", "debugpy", "typing", "debugpy", "debugpy", "pytest", "ipykernel", "debugpy", "ipython", "jupyter-client", "pytest-cov", "pytest", "pytest-cov", "pytest", "pygments", "pytest", "debugpy", "pytest-cov", "pytest", "colorama", "six", "scikit-build", "pybind11", "ninja", "codecov", "cmake"]
|
||||
all-strict = ["ipykernel (==6.0.0)", "IPython (==7.23.1)", "jupyter-client (==7.0.0)", "nbconvert (==6.0.0)", "pytest-cov (==3.0.0)", "jinja2 (==3.0.0)", "jupyter-core (==4.7.0)", "jedi (==0.16)", "attrs (==19.2.0)", "Pygments (==2.4.1)", "pytest (==6.2.5)", "ipython-genutils (==0.2.0)", "debugpy (==1.6.0)", "typing (==3.7.4)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "pytest (==4.6.0)", "ipykernel (==5.2.0)", "debugpy (==1.0.0)", "IPython (==7.10.0)", "jupyter-client (==6.1.5)", "pytest (==4.6.0)", "pytest-cov (==2.9.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "Pygments (==2.0.0)", "pytest (==4.6.0)", "debugpy (==1.3.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "colorama (==0.4.1)", "six (==1.11.0)", "scikit-build (==0.11.1)", "pybind11 (==2.7.1)", "ninja (==1.10.2)", "codecov (==2.0.15)", "cmake (==3.21.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
@ -2112,7 +2145,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "d5a3a36e18fc130235b2610426058283487b25632c523c11aea97879cdda6172"
|
||||
content-hash = "e4053464943f4d60140841fa7f538a1d3dfc3f7c99fb2e459eb682a4508fd372"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
@ -2131,6 +2164,7 @@ aniso8601 = [
|
||||
{file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"},
|
||||
{file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
|
||||
]
|
||||
apscheduler = []
|
||||
astroid = [
|
||||
{file = "astroid-2.11.6-py3-none-any.whl", hash = "sha256:ba33a82a9a9c06a5ceed98180c5aab16e29c285b828d94696bf32d6015ea82a9"},
|
||||
{file = "astroid-2.11.6.tar.gz", hash = "sha256:4f933d0bf5e408b03a6feb5d23793740c27e07340605f236496cd6ce552043d6"},
|
||||
@ -2313,7 +2347,10 @@ flake8 = [
|
||||
flake8-bandit = [
|
||||
{file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"},
|
||||
]
|
||||
flake8-bugbear = []
|
||||
flake8-bugbear = [
|
||||
{file = "flake8-bugbear-22.7.1.tar.gz", hash = "sha256:e450976a07e4f9d6c043d4f72b17ec1baf717fe37f7997009c8ae58064f88305"},
|
||||
{file = "flake8_bugbear-22.7.1-py3-none-any.whl", hash = "sha256:db5d7a831ef4412a224b26c708967ff816818cabae415e76b8c58df156c4b8e5"},
|
||||
]
|
||||
flake8-docstrings = [
|
||||
{file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"},
|
||||
{file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"},
|
||||
@ -2542,7 +2579,79 @@ libcst = [
|
||||
livereload = [
|
||||
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
||||
]
|
||||
lxml = []
|
||||
lxml = [
|
||||
{file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"},
|
||||
{file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"},
|
||||
{file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"},
|
||||
{file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"},
|
||||
{file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"},
|
||||
{file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"},
|
||||
{file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:49a866923e69bc7da45a0565636243707c22752fc38f6b9d5c8428a86121022c"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"},
|
||||
{file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"},
|
||||
{file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"},
|
||||
{file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"},
|
||||
{file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"},
|
||||
{file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"},
|
||||
{file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"},
|
||||
{file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"},
|
||||
{file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"},
|
||||
{file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"},
|
||||
{file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"},
|
||||
{file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"},
|
||||
{file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"},
|
||||
{file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"},
|
||||
{file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"},
|
||||
{file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"},
|
||||
{file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"},
|
||||
{file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"},
|
||||
{file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"},
|
||||
{file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"},
|
||||
{file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"},
|
||||
]
|
||||
mako = [
|
||||
{file = "Mako-1.2.0-py3-none-any.whl", hash = "sha256:23aab11fdbbb0f1051b93793a58323ff937e98e34aece1c4219675122e57e4ba"},
|
||||
{file = "Mako-1.2.0.tar.gz", hash = "sha256:9a7c7e922b87db3686210cf49d5d767033a41d4010b284e747682c92bddd8b39"},
|
||||
@ -2728,21 +2837,7 @@ py = [
|
||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pyasn1 = []
|
||||
pycodestyle = [
|
||||
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
|
||||
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
|
||||
@ -2947,10 +3042,7 @@ requests = [
|
||||
restructuredtext-lint = [
|
||||
{file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"},
|
||||
]
|
||||
rsa = [
|
||||
{file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"},
|
||||
{file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"},
|
||||
]
|
||||
rsa = []
|
||||
"ruamel.yaml" = [
|
||||
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
|
||||
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
|
||||
@ -3007,10 +3099,7 @@ sphinx = [
|
||||
{file = "Sphinx-5.0.2-py3-none-any.whl", hash = "sha256:d3e57663eed1d7c5c50895d191fdeda0b54ded6f44d5621b50709466c338d1e8"},
|
||||
{file = "Sphinx-5.0.2.tar.gz", hash = "sha256:b18e978ea7565720f26019c702cd85c84376e948370f1cd43d60265010e1c7b0"},
|
||||
]
|
||||
sphinx-autoapi = [
|
||||
{file = "sphinx-autoapi-1.8.4.tar.gz", hash = "sha256:8c4ec5fbedc1e6e8f4692bcc4fcd1abcfb9e8dfca8a4ded60ad811a743c22ccc"},
|
||||
{file = "sphinx_autoapi-1.8.4-py2.py3-none-any.whl", hash = "sha256:007bf9e24cd2aa0ac0561f67e8bcd6a6e2e8911ef4b4fd54aaba799d8832c8d0"},
|
||||
]
|
||||
sphinx-autoapi = []
|
||||
sphinx-autobuild = [
|
||||
{file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"},
|
||||
{file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"},
|
||||
@ -3019,7 +3108,10 @@ sphinx-basic-ng = [
|
||||
{file = "sphinx_basic_ng-0.0.1a11-py3-none-any.whl", hash = "sha256:9aecb5345816998789ef76658a83e3c0a12aafa14b17d40e28cd4aaeb94d1517"},
|
||||
{file = "sphinx_basic_ng-0.0.1a11.tar.gz", hash = "sha256:bf9a8fda0379c7d2ab51c9543f2b18e014b77fb295b49d64f3c1a910c863b34f"},
|
||||
]
|
||||
sphinx-click = []
|
||||
sphinx-click = [
|
||||
{file = "sphinx-click-4.3.0.tar.gz", hash = "sha256:bd4db5d3c1bec345f07af07b8e28a76cfc5006d997984e38ae246bbf8b9a3b38"},
|
||||
{file = "sphinx_click-4.3.0-py3-none-any.whl", hash = "sha256:23e85a3cb0b728a421ea773699f6acadefae171d1a764a51dd8ec5981503ccbe"},
|
||||
]
|
||||
sphinxcontrib-applehelp = [
|
||||
{file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},
|
||||
{file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"},
|
||||
@ -3160,7 +3252,10 @@ types-pytz = [
|
||||
]
|
||||
types-requests = []
|
||||
types-urllib3 = []
|
||||
typing-extensions = []
|
||||
typing-extensions = [
|
||||
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
|
||||
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
|
||||
]
|
||||
typing-inspect = [
|
||||
{file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"},
|
||||
{file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"},
|
||||
|
@ -27,13 +27,13 @@ flask-marshmallow = "*"
|
||||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/spiff_properties"}
|
||||
# spiffworkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "1.7.1"
|
||||
spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/message-correlations"}
|
||||
# spiffworkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"}
|
||||
sentry-sdk = "1.9.0"
|
||||
sphinx-autoapi = "^1.8.4"
|
||||
# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"}
|
||||
# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"}
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "feature/with-spiff-properties"}
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
mysql-connector-python = "^8.0.29"
|
||||
pytest-flask = "^1.2.0"
|
||||
pytest-flask-sqlalchemy = "^1.1.0"
|
||||
@ -47,6 +47,7 @@ PyJWT = "^2.4.0"
|
||||
gunicorn = "^20.1.0"
|
||||
types-pytz = "^2022.1.1"
|
||||
python-keycloak = "^1.9.1"
|
||||
APScheduler = "^3.9.1"
|
||||
types-requests = "^2.28.6"
|
||||
|
||||
|
||||
|
@ -5,6 +5,7 @@ from typing import Any
|
||||
import connexion # type: ignore
|
||||
import flask.app
|
||||
import flask.json
|
||||
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
|
||||
from flask_bpmn.api.api_error import api_error_blueprint
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import migrate
|
||||
@ -16,6 +17,7 @@ from spiffworkflow_backend.config import setup_config
|
||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||
from spiffworkflow_backend.services.message_service import MessageServiceWithAppContext
|
||||
|
||||
|
||||
class MyJSONEncoder(flask.json.JSONEncoder):
|
||||
@ -28,6 +30,17 @@ class MyJSONEncoder(flask.json.JSONEncoder):
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def start_scheduler(app: flask.app.Flask) -> None:
|
||||
"""Start_scheduler."""
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_job(
|
||||
MessageServiceWithAppContext(app).process_message_instances_with_app_context,
|
||||
"interval",
|
||||
minutes=1,
|
||||
)
|
||||
scheduler.start()
|
||||
|
||||
|
||||
def create_app() -> flask.app.Flask:
|
||||
"""Create_app."""
|
||||
# We need to create the sqlite database in a known location.
|
||||
@ -71,4 +84,7 @@ def create_app() -> flask.app.Flask:
|
||||
|
||||
app.json_encoder = MyJSONEncoder
|
||||
|
||||
if app.config["PROCESS_WAITING_MESSAGES"]:
|
||||
start_scheduler(app)
|
||||
|
||||
return app # type: ignore
|
||||
|
@ -13,10 +13,12 @@ CORS_ALLOW_ORIGINS = re.split(
|
||||
r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT)
|
||||
)
|
||||
|
||||
PROCESS_WAITING_MESSAGES = (
|
||||
environ.get("PROCESS_WAITING_MESSAGES", default="false") == "true"
|
||||
)
|
||||
SPIFFWORKFLOW_FRONTEND_URL = environ.get(
|
||||
"SPIFFWORKFLOW_FRONTEND_URL", default="http://localhost:7001"
|
||||
)
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_URL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000"
|
||||
)
|
||||
|
@ -7,6 +7,16 @@ from flask_bpmn.models.db import add_listeners
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.data_store import DataStoreModel # noqa: F401
|
||||
from spiffworkflow_backend.models.file import FileModel # noqa: F401
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.message_instance import (
|
||||
MessageInstanceModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.message_model import MessageModel # noqa: F401
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
MessageTriggerableProcessModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401
|
||||
from spiffworkflow_backend.models.process_instance import (
|
||||
ProcessInstanceModel,
|
||||
|
38
src/spiffworkflow_backend/models/message_correlation.py
Normal file
38
src/spiffworkflow_backend/models/message_correlation.py
Normal file
@ -0,0 +1,38 @@
|
||||
"""Message_correlation."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageCorrelationModel(SpiffworkflowBaseDBModel):
|
||||
"""Message Correlations to relate queued messages together."""
|
||||
|
||||
__tablename__ = "message_correlation"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"process_instance_id",
|
||||
"message_correlation_property_id",
|
||||
"name",
|
||||
name="message_instance_id_name_unique",
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
message_correlation_property_id = db.Column(
|
||||
ForeignKey(MessageCorrelationPropertyModel.id), nullable=False, index=True
|
||||
)
|
||||
name = db.Column(db.String(255), nullable=False, index=True)
|
||||
value = db.Column(db.String(255), nullable=False, index=True)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
@ -0,0 +1,32 @@
|
||||
"""Message_correlation_message_instance."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageCorrelationMessageInstanceModel(SpiffworkflowBaseDBModel):
|
||||
"""MessageCorrelationMessageInstanceModel."""
|
||||
|
||||
__tablename__ = "message_correlation_message_instance"
|
||||
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"message_instance_id",
|
||||
"message_correlation_id",
|
||||
name="message_correlation_message_instance_unique",
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
message_instance_id = db.Column(
|
||||
ForeignKey(MessageInstanceModel.id), nullable=False, index=True
|
||||
)
|
||||
message_correlation_id = db.Column(
|
||||
ForeignKey(MessageCorrelationModel.id), nullable=False, index=True
|
||||
)
|
@ -0,0 +1,25 @@
|
||||
"""Message_correlation_property."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
class MessageCorrelationPropertyModel(SpiffworkflowBaseDBModel):
|
||||
"""MessageCorrelationPropertyModel."""
|
||||
|
||||
__tablename__ = "message_correlation_property"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"identifier",
|
||||
"message_model_id",
|
||||
name="message_correlation_property_unique",
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
identifier = db.Column(db.String(50), index=True)
|
||||
message_model_id = db.Column(ForeignKey(MessageModel.id), nullable=False)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
95
src/spiffworkflow_backend/models/message_instance.py
Normal file
95
src/spiffworkflow_backend/models/message_instance.py
Normal file
@ -0,0 +1,95 @@
|
||||
"""Message_instance."""
|
||||
import enum
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm import validates
|
||||
from sqlalchemy.orm.events import event
|
||||
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
class MessageTypes(enum.Enum):
|
||||
"""MessageTypes."""
|
||||
|
||||
send = "send"
|
||||
receive = "receive"
|
||||
|
||||
|
||||
class MessageStatuses(enum.Enum):
|
||||
"""MessageStatuses."""
|
||||
|
||||
ready = "ready"
|
||||
running = "running"
|
||||
completed = "completed"
|
||||
failed = "failed"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
||||
"""Messages from a process instance that are ready to send to a receiving task."""
|
||||
|
||||
__tablename__ = "message_instance"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
message_model_id = db.Column(ForeignKey(MessageModel.id), nullable=False)
|
||||
message_model = relationship("MessageModel")
|
||||
|
||||
message_type = db.Column(db.String(20), nullable=False)
|
||||
payload = db.Column(db.JSON)
|
||||
status = db.Column(db.String(20), nullable=False, default="ready")
|
||||
failure_cause = db.Column(db.Text())
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
def validate_enum_field(
|
||||
self, key: str, value: Any, enum_variable: enum.EnumMeta
|
||||
) -> Any:
|
||||
"""Validate_enum_field."""
|
||||
try:
|
||||
m_type = getattr(enum_variable, value, None)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"{self.__class__.__name__}: invalid {key}: {value}"
|
||||
) from e
|
||||
|
||||
if m_type is None:
|
||||
raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}")
|
||||
|
||||
return m_type.value
|
||||
|
||||
@validates("message_type")
|
||||
def validate_message_type(self, key: str, value: Any) -> Any:
|
||||
"""Validate_message_type."""
|
||||
return self.validate_enum_field(key, value, MessageTypes)
|
||||
|
||||
@validates("status")
|
||||
def validate_status(self, key: str, value: Any) -> Any:
|
||||
"""Validate_status."""
|
||||
return self.validate_enum_field(key, value, MessageStatuses)
|
||||
|
||||
|
||||
# This runs for ALL db flushes for ANY model, not just this one even if it's in the MessageInstanceModel class
|
||||
# so this may not be worth it or there may be a better way to do it
|
||||
#
|
||||
# https://stackoverflow.com/questions/32555829/flask-validates-decorator-multiple-fields-simultaneously/33025472#33025472
|
||||
# https://docs.sqlalchemy.org/en/14/orm/session_events.html#before-flush
|
||||
@event.listens_for(Session, "before_flush") # type: ignore
|
||||
def ensure_failure_cause_is_set_if_message_instance_failed(
|
||||
session: Any, _flush_context: Optional[Any], _instances: Optional[Any]
|
||||
) -> None:
|
||||
"""Ensure_failure_cause_is_set_if_message_instance_failed."""
|
||||
for instance in session.new:
|
||||
if isinstance(instance, MessageInstanceModel):
|
||||
if instance.status == "failed" and instance.failure_cause is None:
|
||||
raise ValueError(
|
||||
f"{instance.__class__.__name__}: failure_cause must be set if status is failed"
|
||||
)
|
13
src/spiffworkflow_backend/models/message_model.py
Normal file
13
src/spiffworkflow_backend/models/message_model.py
Normal file
@ -0,0 +1,13 @@
|
||||
"""Message_model."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
class MessageModel(SpiffworkflowBaseDBModel):
|
||||
"""MessageModel."""
|
||||
|
||||
__tablename__ = "message_model"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
identifier = db.Column(db.String(50), unique=True, index=True)
|
||||
name = db.Column(db.String(50), unique=True, index=True)
|
@ -0,0 +1,22 @@
|
||||
"""Message_correlation_property."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel):
|
||||
"""MessageTriggerableProcessModel."""
|
||||
|
||||
__tablename__ = "message_triggerable_process_model"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
message_model_id = db.Column(
|
||||
ForeignKey(MessageModel.id), nullable=False, unique=True
|
||||
)
|
||||
process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True)
|
||||
process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True)
|
||||
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
@ -7,8 +7,6 @@ import marshmallow
|
||||
from marshmallow import Schema
|
||||
from marshmallow_enum import EnumField # type: ignore
|
||||
|
||||
# from SpiffWorkflow.camunda.specs.UserTask import Form # type: ignore
|
||||
|
||||
|
||||
class MultiInstanceType(enum.Enum):
|
||||
"""MultiInstanceType."""
|
||||
|
@ -24,12 +24,12 @@ class UserModel(SpiffworkflowBaseDBModel):
|
||||
__tablename__ = "user"
|
||||
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
username = db.Column(db.String(50), nullable=False, unique=True)
|
||||
username = db.Column(db.String(255), nullable=False, unique=True)
|
||||
uid = db.Column(db.String(50), unique=True)
|
||||
service = db.Column(db.String(50), nullable=False, unique=False)
|
||||
service_id = db.Column(db.String(), nullable=False, unique=False)
|
||||
name = db.Column(db.String(50))
|
||||
email = db.Column(db.String(50))
|
||||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||
name = db.Column(db.String(255))
|
||||
email = db.Column(db.String(255))
|
||||
user_group_assignments = relationship(UserGroupAssignmentModel, cascade="delete")
|
||||
groups = relationship( # type: ignore
|
||||
GroupModel,
|
||||
|
@ -46,10 +46,6 @@ from spiffworkflow_backend.services.process_instance_service import (
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
# from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||
# from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter # type: ignore
|
||||
# from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
||||
|
||||
process_api_blueprint = Blueprint("process_api", __name__)
|
||||
|
||||
|
||||
@ -596,69 +592,53 @@ def process_instance_task_list(
|
||||
|
||||
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
|
||||
"""Task_show."""
|
||||
principal = find_principal_or_raise()
|
||||
|
||||
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
|
||||
process_model = get_process_model(
|
||||
process_instance.process_model_identifier,
|
||||
process_instance.process_group_identifier,
|
||||
)
|
||||
|
||||
active_task_assigned_to_me = ActiveTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id,
|
||||
task_id=task_id,
|
||||
assigned_principal_id=principal.id,
|
||||
).first()
|
||||
|
||||
form_schema_file_name = ""
|
||||
form_ui_schema_file_name = ""
|
||||
task = None
|
||||
if active_task_assigned_to_me:
|
||||
form_schema_file_name = active_task_assigned_to_me.form_file_name
|
||||
form_ui_schema_file_name = active_task_assigned_to_me.ui_form_file_name
|
||||
active_task_assigned_to_me.process_model_identifier = (
|
||||
process_instance.process_model_identifier
|
||||
)
|
||||
task = ActiveTaskModel.to_task(active_task_assigned_to_me)
|
||||
else:
|
||||
spiff_task = get_spiff_task_from_process_instance(task_id, process_instance)
|
||||
extensions = spiff_task.task_spec.extensions
|
||||
spiff_task = get_spiff_task_from_process_instance(task_id, process_instance)
|
||||
extensions = spiff_task.task_spec.extensions
|
||||
|
||||
if "properties" in extensions:
|
||||
properties = extensions["properties"]
|
||||
if "formJsonSchemaFilename" in properties:
|
||||
form_schema_file_name = properties["formJsonSchemaFilename"]
|
||||
if "formUiSchemaFilename" in properties:
|
||||
form_ui_schema_file_name = properties["formUiSchemaFilename"]
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
|
||||
task.data = spiff_task.data
|
||||
if "properties" in extensions:
|
||||
properties = extensions["properties"]
|
||||
if "formJsonSchemaFilename" in properties:
|
||||
form_schema_file_name = properties["formJsonSchemaFilename"]
|
||||
if "formUiSchemaFilename" in properties:
|
||||
form_ui_schema_file_name = properties["formUiSchemaFilename"]
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
|
||||
task.data = spiff_task.data
|
||||
|
||||
if form_schema_file_name is None:
|
||||
raise (
|
||||
ApiError(
|
||||
code="missing_form_file",
|
||||
message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}",
|
||||
status_code=500,
|
||||
if task.type == "UserTask":
|
||||
if not form_schema_file_name:
|
||||
raise (
|
||||
ApiError(
|
||||
code="missing_form_file",
|
||||
message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}",
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
form_contents = prepare_form_data(
|
||||
form_schema_file_name,
|
||||
task.data,
|
||||
process_model,
|
||||
)
|
||||
|
||||
if form_contents:
|
||||
task.form_schema = form_contents
|
||||
|
||||
if form_ui_schema_file_name:
|
||||
ui_form_contents = prepare_form_data(
|
||||
form_ui_schema_file_name,
|
||||
form_contents = prepare_form_data(
|
||||
form_schema_file_name,
|
||||
task.data,
|
||||
process_model,
|
||||
)
|
||||
if ui_form_contents:
|
||||
task.form_ui_schema = ui_form_contents
|
||||
|
||||
if form_contents:
|
||||
task.form_schema = form_contents
|
||||
|
||||
if form_ui_schema_file_name:
|
||||
ui_form_contents = prepare_form_data(
|
||||
form_ui_schema_file_name,
|
||||
task.data,
|
||||
process_model,
|
||||
)
|
||||
if ui_form_contents:
|
||||
task.form_ui_schema = ui_form_contents
|
||||
|
||||
return make_response(jsonify(task), 200)
|
||||
|
||||
|
230
src/spiffworkflow_backend/services/message_service.py
Normal file
230
src/spiffworkflow_backend/services/message_service.py
Normal file
@ -0,0 +1,230 @@
|
||||
"""Message_service."""
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
import flask
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy import select
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import (
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
MessageTriggerableProcessModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
class MessageServiceWithAppContext:
|
||||
"""Wrapper for Message Service.
|
||||
|
||||
This wrappers is to facilitate running the MessageService from the scheduler
|
||||
since we need to specify the app context then.
|
||||
"""
|
||||
|
||||
def __init__(self, app: flask.app.Flask):
|
||||
"""__init__."""
|
||||
self.app = app
|
||||
|
||||
def process_message_instances_with_app_context(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
MessageService().process_message_instances()
|
||||
|
||||
|
||||
class MessageServiceError(Exception):
|
||||
"""MessageServiceError."""
|
||||
|
||||
|
||||
class MessageService:
|
||||
"""MessageService."""
|
||||
|
||||
def process_message_instances(self) -> None:
|
||||
"""Process_message_instances."""
|
||||
message_instances_send = MessageInstanceModel.query.filter_by(
|
||||
message_type="send", status="ready"
|
||||
).all()
|
||||
message_instances_receive = MessageInstanceModel.query.filter_by(
|
||||
message_type="receive"
|
||||
).all()
|
||||
for message_instance_send in message_instances_send:
|
||||
# check again in case another background process picked up the message
|
||||
# while the previous one was running
|
||||
if message_instance_send.status != "ready":
|
||||
continue
|
||||
|
||||
message_instance_send.status = "running"
|
||||
db.session.add(message_instance_send)
|
||||
db.session.commit()
|
||||
|
||||
message_instance_receive = None
|
||||
try:
|
||||
message_instance_receive = self.get_message_instance_receive(
|
||||
message_instance_send, message_instances_receive
|
||||
)
|
||||
if message_instance_receive is None:
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_model_id=message_instance_send.message_model_id
|
||||
).first()
|
||||
)
|
||||
if message_triggerable_process_model:
|
||||
system_user = UserService().find_or_create_user(
|
||||
service="internal",
|
||||
service_id="system_user",
|
||||
username="system_user",
|
||||
)
|
||||
process_instance_receive = ProcessInstanceService.create_process_instance(
|
||||
message_triggerable_process_model.process_model_identifier,
|
||||
system_user,
|
||||
process_group_identifier=message_triggerable_process_model.process_group_identifier,
|
||||
)
|
||||
processor_receive = ProcessInstanceProcessor(
|
||||
process_instance_receive
|
||||
)
|
||||
processor_receive.do_engine_steps()
|
||||
processor_receive.bpmn_process_instance.catch_bpmn_message(
|
||||
message_instance_send.message_model.name,
|
||||
message_instance_send.payload,
|
||||
correlations={},
|
||||
)
|
||||
processor_receive.save()
|
||||
processor_receive.do_engine_steps()
|
||||
processor_receive.save()
|
||||
message_instance_send.status = "completed"
|
||||
else:
|
||||
# if we can't get a queued message then put it back in the queue
|
||||
message_instance_send.status = "ready"
|
||||
|
||||
else:
|
||||
self.process_message_receive(
|
||||
message_instance_send, message_instance_receive
|
||||
)
|
||||
message_instance_receive.status = "completed"
|
||||
db.session.add(message_instance_receive)
|
||||
message_instance_send.status = "completed"
|
||||
|
||||
db.session.add(message_instance_send)
|
||||
db.session.commit()
|
||||
except Exception as exception:
|
||||
db.session.rollback()
|
||||
message_instance_send.status = "failed"
|
||||
message_instance_send.failure_cause = str(exception)
|
||||
db.session.add(message_instance_send)
|
||||
|
||||
if message_instance_receive:
|
||||
message_instance_receive.status = "failed"
|
||||
message_instance_receive.failure_cause = str(exception)
|
||||
db.session.add(message_instance_receive)
|
||||
|
||||
db.session.commit()
|
||||
raise exception
|
||||
|
||||
def process_message_receive(
|
||||
self,
|
||||
message_instance_send: MessageInstanceModel,
|
||||
message_instance_receive: MessageInstanceModel,
|
||||
) -> None:
|
||||
"""Process_message_receive."""
|
||||
process_instance_receive = ProcessInstanceModel.query.filter_by(
|
||||
id=message_instance_receive.process_instance_id
|
||||
).first()
|
||||
if process_instance_receive is None:
|
||||
raise MessageServiceError(
|
||||
(
|
||||
f"Process instance cannot be found for queued message: {message_instance_receive.id}."
|
||||
f"Tried with id {message_instance_receive.process_instance_id}",
|
||||
)
|
||||
)
|
||||
|
||||
processor_receive = ProcessInstanceProcessor(process_instance_receive)
|
||||
processor_receive.bpmn_process_instance.catch_bpmn_message(
|
||||
message_instance_send.message_model.name,
|
||||
message_instance_send.payload,
|
||||
correlations={},
|
||||
)
|
||||
processor_receive.do_engine_steps()
|
||||
processor_receive.save()
|
||||
|
||||
def get_message_instance_receive(
|
||||
self,
|
||||
message_instance_send: MessageInstanceModel,
|
||||
message_instances_receive: list[MessageInstanceModel],
|
||||
) -> Optional[MessageInstanceModel]:
|
||||
"""Get_message_instance_receive."""
|
||||
message_correlations_send = (
|
||||
MessageCorrelationModel.query.join(MessageCorrelationMessageInstanceModel)
|
||||
.filter_by(message_instance_id=message_instance_send.id)
|
||||
.all()
|
||||
)
|
||||
|
||||
message_correlation_filter = []
|
||||
for message_correlation_send in message_correlations_send:
|
||||
message_correlation_filter.append(
|
||||
and_(
|
||||
MessageCorrelationModel.name == message_correlation_send.name,
|
||||
MessageCorrelationModel.value == message_correlation_send.value,
|
||||
MessageCorrelationModel.message_correlation_property_id
|
||||
== message_correlation_send.message_correlation_property_id,
|
||||
)
|
||||
)
|
||||
|
||||
for message_instance_receive in message_instances_receive:
|
||||
|
||||
# sqlalchemy supports select / where statements like active record apparantly
|
||||
# https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions
|
||||
message_correlation_select = (
|
||||
select([db.func.count()])
|
||||
.select_from(MessageCorrelationModel) # type: ignore
|
||||
.where(
|
||||
and_(
|
||||
MessageCorrelationModel.process_instance_id
|
||||
== message_instance_receive.process_instance_id,
|
||||
or_(*message_correlation_filter),
|
||||
)
|
||||
)
|
||||
.join(MessageCorrelationMessageInstanceModel) # type: ignore
|
||||
.filter_by(
|
||||
message_instance_id=message_instance_receive.id,
|
||||
)
|
||||
)
|
||||
message_correlations_receive = db.session.execute(
|
||||
message_correlation_select
|
||||
)
|
||||
|
||||
# since the query matches on name, value, and message_instance_receive.id, if the counts
|
||||
# message correlations found are the same, then this should be the relevant message
|
||||
if (
|
||||
message_correlations_receive.scalar() == len(message_correlations_send)
|
||||
and message_instance_receive.message_model_id
|
||||
== message_instance_send.message_model_id
|
||||
):
|
||||
return message_instance_receive
|
||||
|
||||
return None
|
||||
|
||||
def get_process_instance_for_message_instance(
|
||||
self, message_instance: MessageInstanceModel
|
||||
) -> Any:
|
||||
"""Get_process_instance_for_message_instance."""
|
||||
process_instance = ProcessInstanceModel.query.filter_by(
|
||||
id=message_instance.process_instance_id
|
||||
).first()
|
||||
if process_instance is None:
|
||||
raise MessageServiceError(
|
||||
f"Process instance cannot be found for message: {message_instance.id}."
|
||||
f"Tried with id {message_instance.process_instance_id}"
|
||||
)
|
||||
|
||||
return process_instance
|
@ -29,12 +29,33 @@ from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter # type: igno
|
||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||
from SpiffWorkflow.specs import WorkflowSpec # type: ignore
|
||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer import UserTaskConverter # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer import BoundaryEventConverter # type: ignore
|
||||
from SpiffWorkflow.spiff.serializer import CallActivityTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer import EndEventConverter
|
||||
from SpiffWorkflow.spiff.serializer import IntermediateCatchEventConverter
|
||||
from SpiffWorkflow.spiff.serializer import IntermediateThrowEventConverter
|
||||
from SpiffWorkflow.spiff.serializer import ManualTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer import NoneTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer import ReceiveTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer import SendTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer import StartEventConverter
|
||||
from SpiffWorkflow.spiff.serializer import SubWorkflowTaskConverter
|
||||
from SpiffWorkflow.spiff.serializer import TransactionSubprocessConverter
|
||||
from SpiffWorkflow.spiff.serializer import UserTaskConverter
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.file import File
|
||||
from spiffworkflow_backend.models.file import FileType
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import (
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageModel
|
||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
@ -102,7 +123,22 @@ class ProcessInstanceProcessor:
|
||||
_script_engine = CustomBpmnScriptEngine()
|
||||
SERIALIZER_VERSION = "1.0-CRC"
|
||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(
|
||||
[UserTaskConverter, BusinessRuleTaskConverter]
|
||||
[
|
||||
BoundaryEventConverter,
|
||||
BusinessRuleTaskConverter,
|
||||
CallActivityTaskConverter,
|
||||
EndEventConverter,
|
||||
IntermediateCatchEventConverter,
|
||||
IntermediateThrowEventConverter,
|
||||
ManualTaskConverter,
|
||||
NoneTaskConverter,
|
||||
ReceiveTaskConverter,
|
||||
SendTaskConverter,
|
||||
StartEventConverter,
|
||||
SubWorkflowTaskConverter,
|
||||
TransactionSubprocessConverter,
|
||||
UserTaskConverter,
|
||||
]
|
||||
)
|
||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||
_old_serializer = BpmnSerializer()
|
||||
@ -384,7 +420,7 @@ class ProcessInstanceProcessor:
|
||||
task_name=ready_or_waiting_task.task_spec.name,
|
||||
task_title=ready_or_waiting_task.task_spec.description,
|
||||
task_type=ready_or_waiting_task.task_spec.__class__.__name__,
|
||||
task_status=ready_or_waiting_task.state.name,
|
||||
task_status=ready_or_waiting_task.get_state_name(),
|
||||
task_data=json.dumps(ready_or_waiting_task.data),
|
||||
)
|
||||
db.session.add(active_task)
|
||||
@ -454,11 +490,143 @@ class ProcessInstanceProcessor:
|
||||
"""Get_status."""
|
||||
return self.status_of(self.bpmn_process_instance)
|
||||
|
||||
# messages have one correlation key (possibly wrong)
|
||||
# correlation keys may have many correlation properties
|
||||
def process_bpmn_messages(self) -> None:
|
||||
"""Process_bpmn_messages."""
|
||||
bpmn_messages = self.bpmn_process_instance.get_bpmn_messages()
|
||||
for bpmn_message in bpmn_messages:
|
||||
# only message sends are in get_bpmn_messages
|
||||
message_type = "send"
|
||||
message_model = MessageModel.query.filter_by(name=bpmn_message.name).first()
|
||||
if message_model is None:
|
||||
raise ApiError(
|
||||
"invalid_message_name",
|
||||
f"Invalid message name: {bpmn_message.name}.",
|
||||
)
|
||||
|
||||
if not bpmn_message.correlations:
|
||||
raise ApiError(
|
||||
"message_correlations_missing",
|
||||
f"Could not find any message correlations bpmn_message: {bpmn_message}",
|
||||
)
|
||||
|
||||
message_correlations = []
|
||||
for (
|
||||
message_correlation_key,
|
||||
message_correlation_properties,
|
||||
) in bpmn_message.correlations.items():
|
||||
for (
|
||||
message_correlation_property_identifier,
|
||||
message_correlation_property_value,
|
||||
) in message_correlation_properties.items():
|
||||
message_correlation_property = (
|
||||
MessageCorrelationPropertyModel.query.filter_by(
|
||||
identifier=message_correlation_property_identifier,
|
||||
).first()
|
||||
)
|
||||
if message_correlation_property is None:
|
||||
raise ApiError(
|
||||
"message_correlations_missing_from_process",
|
||||
"Could not find a known message correlation with identifier:"
|
||||
f"{message_correlation_property_identifier}",
|
||||
)
|
||||
message_correlations.append(
|
||||
{
|
||||
"message_correlation_property": message_correlation_property,
|
||||
"name": message_correlation_key,
|
||||
"value": message_correlation_property_value,
|
||||
}
|
||||
)
|
||||
|
||||
message_instance = MessageInstanceModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
message_type=message_type,
|
||||
message_model_id=message_model.id,
|
||||
payload=bpmn_message.payload,
|
||||
)
|
||||
db.session.add(message_instance)
|
||||
db.session.commit()
|
||||
|
||||
for message_correlation in message_correlations:
|
||||
message_correlation = MessageCorrelationModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
message_correlation_property_id=message_correlation[
|
||||
"message_correlation_property"
|
||||
].id,
|
||||
name=message_correlation["name"],
|
||||
value=message_correlation["value"],
|
||||
)
|
||||
db.session.add(message_correlation)
|
||||
db.session.commit()
|
||||
message_correlation_message_instance = (
|
||||
MessageCorrelationMessageInstanceModel(
|
||||
message_instance_id=message_instance.id,
|
||||
message_correlation_id=message_correlation.id,
|
||||
)
|
||||
)
|
||||
db.session.add(message_correlation_message_instance)
|
||||
db.session.commit()
|
||||
|
||||
def queue_waiting_receive_messages(self) -> None:
|
||||
"""Queue_waiting_receive_messages."""
|
||||
waiting_tasks = self.get_all_waiting_tasks()
|
||||
for waiting_task in waiting_tasks:
|
||||
if waiting_task.task_spec.__class__.__name__ in [
|
||||
"IntermediateCatchEvent",
|
||||
"ReceiveTask",
|
||||
]:
|
||||
message_model = MessageModel.query.filter_by(
|
||||
name=waiting_task.task_spec.event_definition.name
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ApiError(
|
||||
"invalid_message_name",
|
||||
f"Invalid message name: {waiting_task.task_spec.event_definition.name}.",
|
||||
)
|
||||
|
||||
message_instance = MessageInstanceModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
message_type="receive",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(message_instance)
|
||||
|
||||
for (
|
||||
spiff_correlation_property
|
||||
) in waiting_task.task_spec.event_definition.correlation_properties:
|
||||
# NOTE: we may have to cycle through keys here
|
||||
# not sure yet if it's valid for a property to be associated with multiple keys
|
||||
correlation_key_name = spiff_correlation_property.correlation_keys[
|
||||
0
|
||||
]
|
||||
message_correlation = (
|
||||
MessageCorrelationModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
name=correlation_key_name,
|
||||
)
|
||||
.join(MessageCorrelationPropertyModel)
|
||||
.filter_by(identifier=spiff_correlation_property.name)
|
||||
.first()
|
||||
)
|
||||
message_correlation_message_instance = (
|
||||
MessageCorrelationMessageInstanceModel(
|
||||
message_instance_id=message_instance.id,
|
||||
message_correlation_id=message_correlation.id,
|
||||
)
|
||||
)
|
||||
db.session.add(message_correlation_message_instance)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
def do_engine_steps(self, exit_at: None = None) -> None:
|
||||
"""Do_engine_steps."""
|
||||
try:
|
||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
||||
self.bpmn_process_instance.do_engine_steps(exit_at=exit_at)
|
||||
self.process_bpmn_messages()
|
||||
self.queue_waiting_receive_messages()
|
||||
|
||||
except WorkflowTaskExecException as we:
|
||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||
|
||||
@ -614,11 +782,24 @@ class ProcessInstanceProcessor:
|
||||
and t.state in [TaskState.COMPLETED, TaskState.CANCELLED]
|
||||
]
|
||||
|
||||
def get_all_waiting_tasks(self) -> list[SpiffTask]:
|
||||
"""Get_all_ready_or_waiting_tasks."""
|
||||
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
return [t for t in all_tasks if t.state in [TaskState.WAITING]]
|
||||
|
||||
def get_all_ready_or_waiting_tasks(self) -> list[SpiffTask]:
|
||||
"""Get_all_ready_or_waiting_tasks."""
|
||||
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]]
|
||||
|
||||
def get_task_by_id(self, task_id: str) -> SpiffTask:
|
||||
"""Get_task_by_id."""
|
||||
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
for task in all_tasks:
|
||||
if task.id == task_id:
|
||||
return task
|
||||
return None
|
||||
|
||||
def get_nav_item(self, task: SpiffTask) -> Any:
|
||||
"""Get_nav_item."""
|
||||
for nav_item in self.bpmn_process_instance.get_nav_list():
|
||||
|
@ -10,13 +10,12 @@ from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.events import StartEvent
|
||||
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.UserTask import UserTask # type: ignore
|
||||
from SpiffWorkflow.camunda.specs.UserTask import EnumFormField # type: ignore
|
||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask # type: ignore
|
||||
from SpiffWorkflow.specs import CancelTask # type: ignore
|
||||
from SpiffWorkflow.specs import StartTask
|
||||
from SpiffWorkflow.spiff.specs.manual_task import ManualTask # type: ignore
|
||||
from SpiffWorkflow.spiff.specs.user_task import UserTask # type: ignore
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||
|
||||
@ -443,218 +442,4 @@ class ProcessInstanceService:
|
||||
properties=props,
|
||||
)
|
||||
|
||||
# # Only process the form and documentation if requested.
|
||||
# # The task should be in a completed or a ready state, and should
|
||||
# # not be a previously completed MI Task.
|
||||
# if add_docs_and_forms:
|
||||
# task.data = spiff_task.data
|
||||
# if (
|
||||
# hasattr(spiff_task.task_spec, "form")
|
||||
# and spiff_task.task_spec.form is not None
|
||||
# ):
|
||||
# task.form = spiff_task.task_spec.form
|
||||
# for i, field in enumerate(task.form.fields):
|
||||
# task.form.fields[i] = ProcessInstanceService.process_options(
|
||||
# spiff_task, field
|
||||
# )
|
||||
# # If there is a default value, set it.
|
||||
# # if field.id not in task.data and ProcessInstanceService.get_default_value(field, spiff_task) is not None:
|
||||
# # task.data[field.id] = ProcessInstanceService.get_default_value(field, spiff_task)
|
||||
# # task.documentation = ProcessInstanceService._process_documentation(spiff_task)
|
||||
# task.documentation = (
|
||||
# spiff_task.task_spec.documentation
|
||||
# if hasattr(spiff_task.task_spec, "documentation")
|
||||
# else None
|
||||
# )
|
||||
|
||||
# All ready tasks should have a valid name, and this can be computed for
|
||||
# some tasks, particularly multi-instance tasks that all have the same spec
|
||||
# but need different labels.
|
||||
# if spiff_task.state == TaskState.READY:
|
||||
# task.properties = ProcessInstanceService._process_properties(spiff_task, props)
|
||||
#
|
||||
# task.title = ProcessInstanceService.__calculate_title(spiff_task)
|
||||
|
||||
# if task.properties and "clear_data" in task.properties:
|
||||
# if task.form and task.properties["clear_data"] == "True":
|
||||
# for i in range(len(task.form.fields)):
|
||||
# task.data.pop(task.form.fields[i].id, None)
|
||||
|
||||
# # Pass help text through the Jinja parser
|
||||
# if task.form and task.form.fields:
|
||||
# for field in task.form.fields:
|
||||
# if field.properties:
|
||||
# for field_property in field.properties:
|
||||
# if field_property.id == "help":
|
||||
# jinja_text = JinjaService().get_content(
|
||||
# field_property.value, task.data
|
||||
# )
|
||||
# field_property.value = jinja_text
|
||||
|
||||
return task
|
||||
|
||||
# @staticmethod
|
||||
# def _process_properties(spiff_task, props):
|
||||
# """Runs all the property values through the Jinja2 processor to inject data."""
|
||||
# for k, v in props.items():
|
||||
# try:
|
||||
# props[k] = JinjaService.get_content(v, spiff_task.data)
|
||||
# except jinja2.exceptions.TemplateError as ue:
|
||||
# app.logger.error(
|
||||
# f"Failed to process task property {str(ue)}", exc_info=True
|
||||
# )
|
||||
# return props
|
||||
|
||||
@staticmethod
|
||||
def process_options(spiff_task: SpiffTask, field: EnumFormField) -> EnumFormField:
|
||||
"""Process_options."""
|
||||
if field.type != Task.FIELD_TYPE_ENUM:
|
||||
return field
|
||||
|
||||
if hasattr(field, "options") and len(field.options) > 1:
|
||||
return field
|
||||
elif not (
|
||||
field.has_property(Task.FIELD_PROP_VALUE_COLUMN)
|
||||
or field.has_property(Task.FIELD_PROP_LABEL_COLUMN)
|
||||
):
|
||||
raise ApiError.from_task(
|
||||
"invalid_enum",
|
||||
f"For enumerations, you must include options, or a way to generate options from"
|
||||
f" a spreadsheet or data set. Please set either a spreadsheet name or data name,"
|
||||
f" along with the value and label columns to use from these sources. Valid params"
|
||||
f" include: "
|
||||
f"{Task.FIELD_PROP_SPREADSHEET_NAME}, "
|
||||
f"{Task.FIELD_PROP_DATA_NAME}, "
|
||||
f"{Task.FIELD_PROP_VALUE_COLUMN}, "
|
||||
f"{Task.FIELD_PROP_LABEL_COLUMN}",
|
||||
task=spiff_task,
|
||||
)
|
||||
|
||||
if field.has_property(Task.FIELD_PROP_SPREADSHEET_NAME):
|
||||
# lookup_model = LookupService.get_lookup_model(spiff_task, field)
|
||||
# data = (
|
||||
# db.session.query(LookupDataModel)
|
||||
# .filter(LookupDataModel.lookup_file_model == lookup_model)
|
||||
# .all()
|
||||
# )
|
||||
# for d in data:
|
||||
# field.add_option(d.value, d.label)
|
||||
...
|
||||
elif field.has_property(Task.FIELD_PROP_DATA_NAME):
|
||||
field.options = ProcessInstanceService.get_options_from_task_data(
|
||||
spiff_task, field
|
||||
)
|
||||
|
||||
return field
|
||||
|
||||
@staticmethod
|
||||
def get_options_from_task_data(spiff_task: SpiffTask, field: EnumFormField) -> List:
|
||||
"""Get_options_from_task_data."""
|
||||
prop = field.get_property(Task.FIELD_PROP_DATA_NAME)
|
||||
if prop not in spiff_task.data:
|
||||
raise ApiError.from_task(
|
||||
"invalid_enum",
|
||||
f"For enumerations based on task data, task data must have "
|
||||
f"a property called {prop}",
|
||||
task=spiff_task,
|
||||
)
|
||||
# Get the enum options from the task data
|
||||
data_model = spiff_task.data[prop]
|
||||
value_column = field.get_property(Task.FIELD_PROP_VALUE_COLUMN)
|
||||
label_column = field.get_property(Task.FIELD_PROP_LABEL_COLUMN)
|
||||
items = data_model.items() if isinstance(data_model, dict) else data_model
|
||||
options: List[Any] = []
|
||||
for item in items:
|
||||
if value_column not in item:
|
||||
raise ApiError.from_task(
|
||||
"invalid_enum",
|
||||
f"The value column '{value_column}' does not exist for item {item}",
|
||||
task=spiff_task,
|
||||
)
|
||||
if label_column not in item:
|
||||
raise ApiError.from_task(
|
||||
"invalid_enum",
|
||||
f"The label column '{label_column}' does not exist for item {item}",
|
||||
task=spiff_task,
|
||||
)
|
||||
|
||||
# options.append(
|
||||
# Box(
|
||||
# {"id": item[value_column], "name": item[label_column], "data": item}
|
||||
# )
|
||||
# )
|
||||
return options
|
||||
|
||||
# @staticmethod
|
||||
# def _process_documentation(spiff_task):
|
||||
# """Runs the given documentation string through the Jinja2 processor to inject data
|
||||
# create loops, etc... - If a markdown file exists with the same name as the task id,
|
||||
# it will use that file instead of the documentation."""
|
||||
# documentation = (
|
||||
# spiff_task.task_spec.documentation
|
||||
# if hasattr(spiff_task.task_spec, "documentation")
|
||||
# else ""
|
||||
# )
|
||||
#
|
||||
# try:
|
||||
# doc_file_name = spiff_task.task_spec.name + ".md"
|
||||
#
|
||||
# workflow_id = WorkflowService.workflow_id_from_spiff_task(spiff_task)
|
||||
# workflow = (
|
||||
# db.session.query(WorkflowModel)
|
||||
# .filter(WorkflowModel.id == workflow_id)
|
||||
# .first()
|
||||
# )
|
||||
# spec_service = WorkflowSpecService()
|
||||
# data = SpecFileService.get_data(
|
||||
# spec_service.get_spec(workflow.workflow_spec_id), doc_file_name
|
||||
# )
|
||||
# raw_doc = data.decode("utf-8")
|
||||
# except ApiError:
|
||||
# raw_doc = documentation
|
||||
#
|
||||
# if not raw_doc:
|
||||
# return ""
|
||||
#
|
||||
# try:
|
||||
# return JinjaService.get_content(raw_doc, spiff_task.data)
|
||||
# except jinja2.exceptions.TemplateSyntaxError as tse:
|
||||
# lines = tse.source.splitlines()
|
||||
# error_line = ""
|
||||
# if len(lines) >= tse.lineno - 1:
|
||||
# error_line = tse.source.splitlines()[tse.lineno - 1]
|
||||
# raise ApiError.from_task(
|
||||
# code="template_error",
|
||||
# message="Jinja Template Error: %s" % str(tse),
|
||||
# task=spiff_task,
|
||||
# line_number=tse.lineno,
|
||||
# error_line=error_line,
|
||||
# )
|
||||
# except jinja2.exceptions.TemplateError as te:
|
||||
# # Figure out the line number in the template that caused the error.
|
||||
# cl, exc, tb = sys.exc_info()
|
||||
# line_number = None
|
||||
# error_line = None
|
||||
# for frame_summary in traceback.extract_tb(tb):
|
||||
# if frame_summary.filename == "<template>":
|
||||
# line_number = frame_summary.lineno
|
||||
# lines = documentation.splitlines()
|
||||
# error_line = ""
|
||||
# if len(lines) > line_number:
|
||||
# error_line = lines[line_number - 1]
|
||||
# raise ApiError.from_task(
|
||||
# code="template_error",
|
||||
# message="Jinja Template Error: %s" % str(te),
|
||||
# task=spiff_task,
|
||||
# line_number=line_number,
|
||||
# error_line=error_line,
|
||||
# )
|
||||
# except TypeError as te:
|
||||
# raise ApiError.from_task(
|
||||
# code="template_error",
|
||||
# message="Jinja Template Error: %s" % str(te),
|
||||
# task=spiff_task,
|
||||
# ) from te
|
||||
# except Exception as e:
|
||||
# # app.logger.error(str(e), exc_info=True)
|
||||
# ...
|
||||
|
@ -4,9 +4,9 @@ import shutil
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from lxml import etree # type: ignore
|
||||
from lxml.etree import _Element # type: ignore
|
||||
from lxml.etree import Element as EtreeElement
|
||||
@ -14,6 +14,13 @@ from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException #
|
||||
|
||||
from spiffworkflow_backend.models.file import File
|
||||
from spiffworkflow_backend.models.file import FileType
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
MessageTriggerableProcessModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
|
||||
@ -28,16 +35,16 @@ class SpecFileService(FileSystemService):
|
||||
|
||||
@staticmethod
|
||||
def get_files(
|
||||
workflow_spec: ProcessModelInfo,
|
||||
process_model_info: ProcessModelInfo,
|
||||
file_name: Optional[str] = None,
|
||||
include_libraries: bool = False,
|
||||
extension_filter: str = "",
|
||||
) -> List[File]:
|
||||
"""Return all files associated with a workflow specification."""
|
||||
path = SpecFileService.workflow_path(workflow_spec)
|
||||
path = SpecFileService.workflow_path(process_model_info)
|
||||
files = SpecFileService._get_files(path, file_name)
|
||||
if include_libraries:
|
||||
for lib_name in workflow_spec.libraries:
|
||||
for lib_name in process_model_info.libraries:
|
||||
lib_path = SpecFileService.library_path(lib_name)
|
||||
files.extend(SpecFileService._get_files(lib_path, file_name))
|
||||
|
||||
@ -50,37 +57,38 @@ class SpecFileService(FileSystemService):
|
||||
|
||||
@staticmethod
|
||||
def add_file(
|
||||
workflow_spec: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
) -> File:
|
||||
"""Add_file."""
|
||||
# Same as update
|
||||
return SpecFileService.update_file(workflow_spec, file_name, binary_data)
|
||||
return SpecFileService.update_file(process_model_info, file_name, binary_data)
|
||||
|
||||
@staticmethod
|
||||
def update_file(
|
||||
workflow_spec: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes
|
||||
) -> File:
|
||||
"""Update_file."""
|
||||
SpecFileService.assert_valid_file_name(file_name)
|
||||
file_path = SpecFileService.file_path(workflow_spec, file_name)
|
||||
file_path = SpecFileService.file_path(process_model_info, file_name)
|
||||
SpecFileService.write_file_data_to_system(file_path, binary_data)
|
||||
file = SpecFileService.to_file_object(file_name, file_path)
|
||||
if file_name == workflow_spec.primary_file_name:
|
||||
SpecFileService.set_primary_bpmn(workflow_spec, file_name, binary_data)
|
||||
elif workflow_spec.primary_file_name is None and file.type == str(
|
||||
if file_name == process_model_info.primary_file_name:
|
||||
SpecFileService.set_primary_bpmn(process_model_info, file_name, binary_data)
|
||||
elif process_model_info.primary_file_name is None and file.type == str(
|
||||
FileType.bpmn
|
||||
):
|
||||
# If no primary process exists, make this pirmary process.
|
||||
SpecFileService.set_primary_bpmn(workflow_spec, file_name, binary_data)
|
||||
SpecFileService.set_primary_bpmn(process_model_info, file_name, binary_data)
|
||||
|
||||
return file
|
||||
|
||||
@staticmethod
|
||||
def get_data(workflow_spec: ProcessModelInfo, file_name: str) -> bytes:
|
||||
def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes:
|
||||
"""Get_data."""
|
||||
file_path = SpecFileService.file_path(workflow_spec, file_name)
|
||||
file_path = SpecFileService.file_path(process_model_info, file_name)
|
||||
if not os.path.exists(file_path):
|
||||
# If the file isn't here, it may be in a library
|
||||
for lib in workflow_spec.libraries:
|
||||
for lib in process_model_info.libraries:
|
||||
file_path = SpecFileService.library_path(lib)
|
||||
file_path = os.path.join(file_path, file_name)
|
||||
if os.path.exists(file_path):
|
||||
@ -88,7 +96,7 @@ class SpecFileService(FileSystemService):
|
||||
if not os.path.exists(file_path):
|
||||
raise ApiError(
|
||||
"unknown_file",
|
||||
f"No file found with name {file_name} in {workflow_spec.display_name}",
|
||||
f"No file found with name {file_name} in {process_model_info.display_name}",
|
||||
)
|
||||
with open(file_path, "rb") as f_handle:
|
||||
spec_file_data = f_handle.read()
|
||||
@ -131,9 +139,9 @@ class SpecFileService(FileSystemService):
|
||||
|
||||
@staticmethod
|
||||
def set_primary_bpmn(
|
||||
workflow_spec: ProcessModelInfo,
|
||||
process_model_info: ProcessModelInfo,
|
||||
file_name: str,
|
||||
binary_data: Union[bytes, None] = None,
|
||||
binary_data: Optional[bytes] = None,
|
||||
) -> None:
|
||||
"""Set_primary_bpmn."""
|
||||
# If this is a BPMN, extract the process id, and determine if it is contains swim lanes.
|
||||
@ -141,12 +149,15 @@ class SpecFileService(FileSystemService):
|
||||
file_type = FileType[extension]
|
||||
if file_type == FileType.bpmn:
|
||||
if not binary_data:
|
||||
binary_data = SpecFileService.get_data(workflow_spec, file_name)
|
||||
binary_data = SpecFileService.get_data(process_model_info, file_name)
|
||||
try:
|
||||
bpmn: EtreeElement = etree.fromstring(binary_data)
|
||||
workflow_spec.primary_process_id = SpecFileService.get_process_id(bpmn)
|
||||
workflow_spec.primary_file_name = file_name
|
||||
workflow_spec.is_review = SpecFileService.has_swimlane(bpmn)
|
||||
process_model_info.primary_process_id = SpecFileService.get_process_id(
|
||||
bpmn
|
||||
)
|
||||
process_model_info.primary_file_name = file_name
|
||||
process_model_info.is_review = SpecFileService.has_swimlane(bpmn)
|
||||
SpecFileService.check_for_message_models(bpmn, process_model_info)
|
||||
|
||||
except etree.XMLSyntaxError as xse:
|
||||
raise ApiError(
|
||||
@ -214,3 +225,123 @@ class SpecFileService(FileSystemService):
|
||||
)
|
||||
|
||||
return str(process_elements[0].attrib["id"])
|
||||
|
||||
@staticmethod
|
||||
def check_for_message_models(
|
||||
et_root: _Element, process_model_info: ProcessModelInfo
|
||||
) -> None:
|
||||
"""Check_for_message_models."""
|
||||
for child in et_root:
|
||||
if child.tag.endswith("message"):
|
||||
message_model_identifier = child.attrib.get("id")
|
||||
message_name = child.attrib.get("name")
|
||||
if message_model_identifier is None:
|
||||
raise ValidationException(
|
||||
"Message identifier is missing from bpmn xml"
|
||||
)
|
||||
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
message_model = MessageModel(
|
||||
identifier=message_model_identifier, name=message_name
|
||||
)
|
||||
db.session.add(message_model)
|
||||
db.session.commit()
|
||||
|
||||
for child in et_root:
|
||||
if child.tag.endswith("}process"):
|
||||
message_event_definitions = child.xpath(
|
||||
"//bpmn:startEvent/bpmn:messageEventDefinition",
|
||||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if message_event_definitions:
|
||||
message_event_definition = message_event_definitions[0]
|
||||
message_model_identifier = message_event_definition.attrib.get(
|
||||
"messageRef"
|
||||
)
|
||||
if message_model_identifier is None:
|
||||
raise ValidationException(
|
||||
"Could not find messageRef from message event definition: {message_event_definition}"
|
||||
)
|
||||
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ValidationException(
|
||||
f"Could not find message model with identifier '{message_model_identifier}'"
|
||||
f"specified by message event definition: {message_event_definition}"
|
||||
)
|
||||
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_model_id=message_model.id,
|
||||
).first()
|
||||
)
|
||||
|
||||
if message_triggerable_process_model is None:
|
||||
message_triggerable_process_model = MessageTriggerableProcessModel(
|
||||
message_model_id=message_model.id,
|
||||
process_model_identifier=process_model_info.id,
|
||||
process_group_identifier=process_model_info.process_group_id,
|
||||
)
|
||||
db.session.add(message_triggerable_process_model)
|
||||
db.session.commit()
|
||||
else:
|
||||
if (
|
||||
message_triggerable_process_model.process_model_identifier
|
||||
!= process_model_info.id
|
||||
or message_triggerable_process_model.process_group_identifier
|
||||
!= process_model_info.process_group_id
|
||||
):
|
||||
raise ValidationException(
|
||||
"Message model is already used to start process model"
|
||||
f"'{process_model_info.process_group_id}/{process_model_info.id}'"
|
||||
)
|
||||
|
||||
for child in et_root:
|
||||
if child.tag.endswith("correlationProperty"):
|
||||
correlation_identifier = child.attrib.get("id")
|
||||
if correlation_identifier is None:
|
||||
raise ValidationException(
|
||||
"Correlation identifier is missing from bpmn xml"
|
||||
)
|
||||
correlation_property_retrieval_expressions = child.xpath(
|
||||
"//bpmn:correlationPropertyRetrievalExpression",
|
||||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if not correlation_property_retrieval_expressions:
|
||||
raise ValidationException(
|
||||
"Correlation is missing correlation property retrieval expressions: {correlation_identifier}"
|
||||
)
|
||||
|
||||
for cpre in correlation_property_retrieval_expressions:
|
||||
message_model_identifier = cpre.attrib.get("messageRef")
|
||||
if message_model_identifier is None:
|
||||
raise ValidationException(
|
||||
f"Message identifier is missing from correlation property: {correlation_identifier}"
|
||||
)
|
||||
message_model = MessageModel.query.filter_by(
|
||||
identifier=message_model_identifier
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ValidationException(
|
||||
f"Could not find message model with identifier '{message_model_identifier}'"
|
||||
f"specified by correlation property: {cpre}"
|
||||
)
|
||||
|
||||
message_correlation_property = (
|
||||
MessageCorrelationPropertyModel.query.filter_by(
|
||||
identifier=correlation_identifier,
|
||||
message_model_id=message_model.id,
|
||||
).first()
|
||||
)
|
||||
if message_correlation_property is None:
|
||||
message_correlation_property = MessageCorrelationPropertyModel(
|
||||
identifier=correlation_identifier,
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(message_correlation_property)
|
||||
db.session.commit()
|
||||
|
@ -68,6 +68,32 @@ class UserService:
|
||||
)
|
||||
)
|
||||
|
||||
def find_or_create_user(
|
||||
self,
|
||||
service: str,
|
||||
service_id: str,
|
||||
name: Optional[str] = None,
|
||||
username: Optional[str] = None,
|
||||
email: Optional[str] = None,
|
||||
) -> UserModel:
|
||||
"""Find_or_create_user."""
|
||||
user_model: UserModel
|
||||
try:
|
||||
user_model = self.create_user(
|
||||
service=service,
|
||||
service_id=service_id,
|
||||
name=name,
|
||||
username=username,
|
||||
email=email,
|
||||
)
|
||||
except ApiError:
|
||||
user_model = (
|
||||
UserModel.query.filter(UserModel.service == service)
|
||||
.filter(UserModel.service_id == service_id)
|
||||
.first()
|
||||
)
|
||||
return user_model
|
||||
|
||||
# Returns true if the current user is logged in.
|
||||
@staticmethod
|
||||
def has_user() -> bool:
|
||||
|
99
tests/data/message_receiver/message_receiver.bpmn
Normal file
99
tests/data/message_receiver/message_receiver.bpmn
Normal file
@ -0,0 +1,99 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:collaboration id="Collaboration_0oye1os">
|
||||
<bpmn:participant id="message_receiver" name="Message Receiver" processRef="message_receiver_process" />
|
||||
<bpmn:participant id="message_sender" name="Message Sender" />
|
||||
<bpmn:messageFlow id="message_send_flow" name="Message Send Flow" sourceRef="message_sender" targetRef="receive_message" />
|
||||
<bpmn:messageFlow id="Flow_0ds946g" sourceRef="send_message_response" targetRef="message_sender" />
|
||||
<bpmn:correlationKey name="message_correlation_key">
|
||||
<bpmn:correlationPropertyRef>message_correlation_property_topica</bpmn:correlationPropertyRef>
|
||||
<bpmn:correlationPropertyRef>message_correlation_property_topicb</bpmn:correlationPropertyRef>
|
||||
</bpmn:correlationKey>
|
||||
</bpmn:collaboration>
|
||||
<bpmn:correlationProperty id="message_correlation_property_topica" name="Message Correlation Property TopicA">
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
|
||||
<bpmn:formalExpression>topica</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
|
||||
<bpmn:formalExpression>the_payload.topica</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
</bpmn:correlationProperty>
|
||||
<bpmn:correlationProperty id="message_correlation_property_topicb" name="Message Correlation Property TopicB">
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
|
||||
<bpmn:formalExpression>topicb</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
|
||||
<bpmn:formalExpression>the_payload.topicb</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
</bpmn:correlationProperty>
|
||||
<bpmn:message id="message_send" name="Message Send">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:messageVariable>the_payload</spiffworkflow:messageVariable>
|
||||
</bpmn:extensionElements>
|
||||
</bpmn:message>
|
||||
<bpmn:message id="message_response" name="Message Response">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:messagePayload>{ "the_payload": {
|
||||
"topica": the_payload.topica,
|
||||
"topicb": the_payload.topicb,
|
||||
}}</spiffworkflow:messagePayload>
|
||||
</bpmn:extensionElements>
|
||||
</bpmn:message>
|
||||
<bpmn:process id="message_receiver_process" name="Message Receiver Process" isExecutable="true">
|
||||
<bpmn:sequenceFlow id="Flow_0fruoax" sourceRef="receive_message" targetRef="send_message_response" />
|
||||
<bpmn:sequenceFlow id="Flow_11r9uiw" sourceRef="send_message_response" targetRef="Event_0q5otqd" />
|
||||
<bpmn:endEvent id="Event_0q5otqd">
|
||||
<bpmn:incoming>Flow_11r9uiw</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sendTask id="send_message_response" name="Send Message Reponse" messageRef="message_response">
|
||||
<bpmn:incoming>Flow_0fruoax</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_11r9uiw</bpmn:outgoing>
|
||||
</bpmn:sendTask>
|
||||
<bpmn:startEvent id="receive_message" name="Receive Message">
|
||||
<bpmn:outgoing>Flow_0fruoax</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_08u7ksn" messageRef="message_send" />
|
||||
</bpmn:startEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_0oye1os">
|
||||
<bpmndi:BPMNShape id="Participant_0mr0gg1_di" bpmnElement="message_receiver" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="350" width="480" height="230" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_11r9uiw_di" bpmnElement="Flow_11r9uiw">
|
||||
<di:waypoint x="480" y="480" />
|
||||
<di:waypoint x="512" y="480" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0fruoax_di" bpmnElement="Flow_0fruoax">
|
||||
<di:waypoint x="208" y="480" />
|
||||
<di:waypoint x="380" y="480" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_0q5otqd_di" bpmnElement="Event_0q5otqd">
|
||||
<dc:Bounds x="512" y="462" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_148b9w9_di" bpmnElement="send_message_response">
|
||||
<dc:Bounds x="380" y="440" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1rgz6f0_di" bpmnElement="receive_message">
|
||||
<dc:Bounds x="172" y="462" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="149" y="505" width="88" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Participant_0xvqrmk_di" bpmnElement="message_sender" isHorizontal="true">
|
||||
<dc:Bounds x="130" y="250" width="360" height="60" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1ueajoz_di" bpmnElement="message_send_flow">
|
||||
<di:waypoint x="190" y="310" />
|
||||
<di:waypoint x="190" y="462" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="193" y="408" width="74" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0ds946g_di" bpmnElement="Flow_0ds946g">
|
||||
<di:waypoint x="430" y="440" />
|
||||
<di:waypoint x="430" y="310" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
136
tests/data/message_sender/message_sender.bpmn
Normal file
136
tests/data/message_sender/message_sender.bpmn
Normal file
@ -0,0 +1,136 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:collaboration id="Collaboration_0oye1os">
|
||||
<bpmn:participant id="message_initiator" name="Message Initiator" processRef="message_send_process" />
|
||||
<bpmn:participant id="message-receiver" name="Message Receiver" />
|
||||
<bpmn:messageFlow id="message_send_flow" name="Message Send Flow" sourceRef="send_message" targetRef="message-receiver" />
|
||||
<bpmn:messageFlow id="message_response_flow" name="Message Response Flow" sourceRef="message-receiver" targetRef="receive_message_response" />
|
||||
<bpmn:correlationKey name="message_correlation_key">
|
||||
<bpmn:correlationPropertyRef>message_correlation_property_topica</bpmn:correlationPropertyRef>
|
||||
<bpmn:correlationPropertyRef>message_correlation_property_topicb</bpmn:correlationPropertyRef>
|
||||
</bpmn:correlationKey>
|
||||
</bpmn:collaboration>
|
||||
<bpmn:correlationProperty id="message_correlation_property_topica" name="Message Correlation Property TopicA">
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
|
||||
<bpmn:formalExpression>topica</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
|
||||
<bpmn:formalExpression>the_payload.topica</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
</bpmn:correlationProperty>
|
||||
<bpmn:correlationProperty id="message_correlation_property_topicb" name="Message Correlation Property TopicB">
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_send">
|
||||
<bpmn:formalExpression>topicb</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
<bpmn:correlationPropertyRetrievalExpression messageRef="message_response">
|
||||
<bpmn:formalExpression>the_payload.topicb</bpmn:formalExpression>
|
||||
</bpmn:correlationPropertyRetrievalExpression>
|
||||
</bpmn:correlationProperty>
|
||||
<bpmn:process id="message_send_process" name="Message Send Process" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_10conab</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_037vpjk" sourceRef="send_message" targetRef="receive_message_response" />
|
||||
<bpmn:sequenceFlow id="Flow_1qgz6p0" sourceRef="receive_message_response" targetRef="Event_0kndoyu" />
|
||||
<bpmn:sequenceFlow id="Flow_10conab" sourceRef="StartEvent_1" targetRef="set_topic" />
|
||||
<bpmn:endEvent id="Event_0kndoyu">
|
||||
<bpmn:incoming>Flow_1qgz6p0</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:intermediateCatchEvent id="receive_message_response" name="Receive Message Response">
|
||||
<bpmn:incoming>Flow_037vpjk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1qgz6p0</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1l3n0zr" messageRef="message_response" />
|
||||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:sendTask id="send_message" name="Send Message" messageRef="message_send">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:preScript>the_topic = "first_conversation" </spiffworkflow:preScript>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_1ihr88m</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_037vpjk</bpmn:outgoing>
|
||||
</bpmn:sendTask>
|
||||
<bpmn:sequenceFlow id="Flow_1ihr88m" sourceRef="set_topic" targetRef="send_message" />
|
||||
<bpmn:scriptTask id="set_topic" name="Set Topic" scriptFormat="python">
|
||||
<bpmn:incoming>Flow_10conab</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ihr88m</bpmn:outgoing>
|
||||
<bpmn:script>import time
|
||||
timestamp = time.time()
|
||||
the_topica = f"first_conversation_a_{timestamp}"
|
||||
the_topicb = f"first_conversation_b_{timestamp}"
|
||||
del time</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmn:message id="message_send" name="Message Send">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:messagePayload>{
|
||||
"topica": the_topica,
|
||||
"topicb": the_topicb,
|
||||
}</spiffworkflow:messagePayload>
|
||||
</bpmn:extensionElements>
|
||||
</bpmn:message>
|
||||
<bpmn:message id="message_response" name="Message Response">
|
||||
<bpmn:extensionElements>
|
||||
<spiffworkflow:messageVariable>the_payload</spiffworkflow:messageVariable>
|
||||
</bpmn:extensionElements>
|
||||
</bpmn:message>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_0oye1os">
|
||||
<bpmndi:BPMNShape id="Participant_0bjh770_di" bpmnElement="message_initiator" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="52" width="600" height="338" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1ihr88m_di" bpmnElement="Flow_1ihr88m">
|
||||
<di:waypoint x="350" y="177" />
|
||||
<di:waypoint x="390" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_10conab_di" bpmnElement="Flow_10conab">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="250" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1qgz6p0_di" bpmnElement="Flow_1qgz6p0">
|
||||
<di:waypoint x="568" y="177" />
|
||||
<di:waypoint x="622" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_037vpjk_di" bpmnElement="Flow_037vpjk">
|
||||
<di:waypoint x="490" y="177" />
|
||||
<di:waypoint x="532" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0kndoyu_di" bpmnElement="Event_0kndoyu">
|
||||
<dc:Bounds x="622" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0yt48xb_di" bpmnElement="receive_message_response">
|
||||
<dc:Bounds x="532" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="507" y="129" width="88" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0vm33bu_di" bpmnElement="send_message">
|
||||
<dc:Bounds x="390" y="137" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1t3nq1h_di" bpmnElement="set_topic">
|
||||
<dc:Bounds x="250" y="137" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Participant_158b3ei_di" bpmnElement="message-receiver" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="350" width="600" height="60" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1ueajoz_di" bpmnElement="message_send_flow">
|
||||
<di:waypoint x="410" y="217" />
|
||||
<di:waypoint x="410" y="350" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="413" y="302" width="74" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1n96n67_di" bpmnElement="message_response_flow">
|
||||
<di:waypoint x="550" y="350" />
|
||||
<di:waypoint x="550" y="195" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="552" y="294" width="76" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
@ -1,7 +1,12 @@
|
||||
"""Base_test."""
|
||||
import time
|
||||
|
||||
from flask.app import Flask
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
@ -49,3 +54,22 @@ class BaseTest:
|
||||
# username, password
|
||||
# )
|
||||
# return public_access_token
|
||||
|
||||
def create_process_instance_from_process_model(
|
||||
self, process_model: ProcessModelInfo, status: str
|
||||
) -> ProcessInstanceModel:
|
||||
"""Create_process_instance_from_process_model."""
|
||||
user = self.find_or_create_user()
|
||||
current_time = round(time.time())
|
||||
process_instance = ProcessInstanceModel(
|
||||
status=status,
|
||||
process_initiator=user,
|
||||
process_model_identifier=process_model.id,
|
||||
process_group_identifier=process_model.process_group_id,
|
||||
updated_at_in_seconds=round(time.time()),
|
||||
start_in_seconds=current_time - (3600 * 1),
|
||||
end_in_seconds=current_time - (3600 * 1 - 20),
|
||||
)
|
||||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
return process_instance
|
||||
|
@ -66,7 +66,7 @@ class ExampleDataLoader:
|
||||
file = open(file_path, "rb")
|
||||
data = file.read()
|
||||
SpecFileService.add_file(
|
||||
workflow_spec=spec, file_name=filename, binary_data=data
|
||||
process_model_info=spec, file_name=filename, binary_data=data
|
||||
)
|
||||
if is_primary:
|
||||
SpecFileService.set_primary_bpmn(spec, filename, data)
|
||||
|
162
tests/spiffworkflow_backend/unit/test_message_instance.py
Normal file
162
tests/spiffworkflow_backend/unit/test_message_instance.py
Normal file
@ -0,0 +1,162 @@
|
||||
"""Test_message_instance."""
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
|
||||
|
||||
class TestMessageInstance(BaseTest):
|
||||
"""TestMessageInstance."""
|
||||
|
||||
def test_can_create_message_instance(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_can_create_message_instance."""
|
||||
message_model_identifier = "message_model_one"
|
||||
message_model = self.create_message_model(message_model_identifier)
|
||||
process_model = load_test_spec("hello_world")
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model, "waiting"
|
||||
)
|
||||
|
||||
queued_message = MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="send",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(queued_message)
|
||||
db.session.commit()
|
||||
|
||||
assert queued_message.status == "ready"
|
||||
assert queued_message.failure_cause is None
|
||||
|
||||
queued_message_from_query = MessageInstanceModel.query.filter_by(
|
||||
id=queued_message.id
|
||||
).first()
|
||||
assert queued_message_from_query is not None
|
||||
|
||||
def test_cannot_set_invalid_status(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_cannot_set_invalid_status."""
|
||||
message_model_identifier = "message_model_one"
|
||||
message_model = self.create_message_model(message_model_identifier)
|
||||
process_model = load_test_spec("hello_world")
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model, "waiting"
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError) as exception:
|
||||
MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="send",
|
||||
message_model_id=message_model.id,
|
||||
status="BAD_STATUS",
|
||||
)
|
||||
assert (
|
||||
str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS"
|
||||
)
|
||||
|
||||
queued_message = MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="send",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(queued_message)
|
||||
db.session.commit()
|
||||
|
||||
with pytest.raises(ValueError) as exception:
|
||||
queued_message.status = "BAD_STATUS"
|
||||
assert (
|
||||
str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS"
|
||||
)
|
||||
|
||||
def test_cannot_set_invalid_message_type(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_cannot_set_invalid_message_type."""
|
||||
message_model_identifier = "message_model_one"
|
||||
message_model = self.create_message_model(message_model_identifier)
|
||||
process_model = load_test_spec("hello_world")
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model, "waiting"
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError) as exception:
|
||||
MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="BAD_MESSAGE_TYPE",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
assert (
|
||||
str(exception.value)
|
||||
== "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE"
|
||||
)
|
||||
|
||||
queued_message = MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="send",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(queued_message)
|
||||
db.session.commit()
|
||||
|
||||
with pytest.raises(ValueError) as exception:
|
||||
queued_message.message_type = "BAD_MESSAGE_TYPE"
|
||||
assert (
|
||||
str(exception.value)
|
||||
== "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE"
|
||||
)
|
||||
|
||||
def test_force_failure_cause_if_status_is_failure(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_force_failure_cause_if_status_is_failure."""
|
||||
message_model_identifier = "message_model_one"
|
||||
message_model = self.create_message_model(message_model_identifier)
|
||||
process_model = load_test_spec("hello_world")
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model, "waiting"
|
||||
)
|
||||
|
||||
queued_message = MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="send",
|
||||
message_model_id=message_model.id,
|
||||
status="failed",
|
||||
)
|
||||
db.session.add(queued_message)
|
||||
with pytest.raises(ValueError) as exception:
|
||||
db.session.commit()
|
||||
assert (
|
||||
str(exception.value)
|
||||
== "MessageInstanceModel: failure_cause must be set if status is failed"
|
||||
)
|
||||
assert queued_message.id is None
|
||||
db.session.remove()
|
||||
|
||||
queued_message = MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
message_type="send",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(queued_message)
|
||||
db.session.commit()
|
||||
|
||||
queued_message.status = "failed"
|
||||
queued_message.failure_cause = "THIS TEST FAILURE"
|
||||
db.session.add(queued_message)
|
||||
db.session.commit()
|
||||
assert queued_message.id is not None
|
||||
assert queued_message.failure_cause == "THIS TEST FAILURE"
|
||||
|
||||
def create_message_model(self, message_model_identifier: str) -> MessageModel:
|
||||
"""Create_message_model."""
|
||||
message_model = MessageModel(identifier=message_model_identifier)
|
||||
db.session.add(message_model)
|
||||
db.session.commit()
|
||||
return message_model
|
109
tests/spiffworkflow_backend/unit/test_message_service.py
Normal file
109
tests/spiffworkflow_backend/unit/test_message_service.py
Normal file
@ -0,0 +1,109 @@
|
||||
"""Test_message_service."""
|
||||
from flask import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import (
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
class TestMessageService(BaseTest):
|
||||
"""TestMessageService."""
|
||||
|
||||
def test_can_send_message_to_waiting_message(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_can_send_message_to_waiting_message."""
|
||||
process_model_sender = load_test_spec("message_sender")
|
||||
load_test_spec("message_receiver")
|
||||
system_user = UserService().find_or_create_user(
|
||||
service="internal", service_id="system_user"
|
||||
)
|
||||
|
||||
process_instance_sender = ProcessInstanceService.create_process_instance(
|
||||
process_model_sender.id,
|
||||
system_user,
|
||||
process_group_identifier=process_model_sender.process_group_id,
|
||||
)
|
||||
processor_sender = ProcessInstanceProcessor(process_instance_sender)
|
||||
processor_sender.do_engine_steps()
|
||||
processor_sender.save()
|
||||
|
||||
message_instance_result = MessageInstanceModel.query.all()
|
||||
assert len(message_instance_result) == 2
|
||||
# ensure both message instances are for the same process instance
|
||||
# it will be send_message and receive_message_response
|
||||
assert (
|
||||
message_instance_result[0].process_instance_id
|
||||
== message_instance_result[1].process_instance_id
|
||||
)
|
||||
|
||||
message_instance_sender = message_instance_result[0]
|
||||
assert message_instance_sender.process_instance_id == process_instance_sender.id
|
||||
message_correlations = MessageCorrelationModel.query.all()
|
||||
assert len(message_correlations) == 2
|
||||
assert message_correlations[0].process_instance_id == process_instance_sender.id
|
||||
message_correlations_message_instances = (
|
||||
MessageCorrelationMessageInstanceModel.query.all()
|
||||
)
|
||||
assert len(message_correlations_message_instances) == 4
|
||||
assert (
|
||||
message_correlations_message_instances[0].message_instance_id
|
||||
== message_instance_sender.id
|
||||
)
|
||||
assert (
|
||||
message_correlations_message_instances[1].message_instance_id
|
||||
== message_instance_sender.id
|
||||
)
|
||||
assert (
|
||||
message_correlations_message_instances[2].message_instance_id
|
||||
== message_instance_result[1].id
|
||||
)
|
||||
assert (
|
||||
message_correlations_message_instances[3].message_instance_id
|
||||
== message_instance_result[1].id
|
||||
)
|
||||
|
||||
# process first message
|
||||
MessageService().process_message_instances()
|
||||
assert message_instance_sender.status == "completed"
|
||||
|
||||
process_instance_result = ProcessInstanceModel.query.all()
|
||||
|
||||
assert len(process_instance_result) == 2
|
||||
process_instance_receiver = process_instance_result[1]
|
||||
|
||||
# just make sure it's a different process instance
|
||||
assert process_instance_receiver.id != process_instance_sender.id
|
||||
assert process_instance_receiver.status == "complete"
|
||||
|
||||
message_instance_result = MessageInstanceModel.query.all()
|
||||
assert len(message_instance_result) == 3
|
||||
message_instance_receiver = message_instance_result[1]
|
||||
assert message_instance_receiver.id != message_instance_sender.id
|
||||
assert message_instance_receiver.status == "ready"
|
||||
|
||||
# process second message
|
||||
MessageService().process_message_instances()
|
||||
|
||||
message_instance_result = MessageInstanceModel.query.all()
|
||||
assert len(message_instance_result) == 3
|
||||
for message_instance in message_instance_result:
|
||||
assert message_instance.status == "completed"
|
||||
|
||||
process_instance_result = ProcessInstanceModel.query.all()
|
||||
assert len(process_instance_result) == 2
|
||||
for process_instance in process_instance_result:
|
||||
assert process_instance.status == "complete"
|
13
tests/spiffworkflow_backend/unit/test_spec_file_service.py
Normal file
13
tests/spiffworkflow_backend/unit/test_spec_file_service.py
Normal file
@ -0,0 +1,13 @@
|
||||
"""Test_message_service."""
|
||||
from flask import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
|
||||
class TestSpecFileService(BaseTest):
|
||||
"""TestSpecFileService."""
|
||||
|
||||
def test_can_check_for_messages_in_bpmn_xml(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_can_check_for_messages_in_bpmn_xml."""
|
||||
assert True
|
Loading…
x
Reference in New Issue
Block a user