Merge remote-tracking branch 'origin/main' into feature/script-unit-tests
This commit is contained in:
commit
556c9f1353
|
@ -1,4 +1,4 @@
|
|||
FROM ghcr.io/sartography/python:3.9
|
||||
FROM ghcr.io/sartography/python:3.10
|
||||
|
||||
RUN pip install poetry
|
||||
RUN useradd _gunicorn --no-create-home --user-group
|
||||
|
|
|
@ -29,10 +29,10 @@ if [[ "${1:-}" == "clean" ]]; then
|
|||
if [[ "${SPIFF_DATABASE_TYPE:-}" == "postgres" ]]; then
|
||||
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "select 1"; then
|
||||
docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres
|
||||
|
||||
sleep 4 # classy
|
||||
|
||||
# create other db
|
||||
fi
|
||||
if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_development -c "select 1"; then
|
||||
# create other db. spiffworkflow_backend_testing came with the docker run.
|
||||
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_development;"
|
||||
fi
|
||||
fi
|
||||
|
|
|
@ -21,4 +21,5 @@ for i in "${tables[@]}" ;do
|
|||
mysql -uroot -e "select * from spiffworkflow_backend_development.${i}"
|
||||
done
|
||||
|
||||
mysql -uroot -e "select id,process_model_identifier,process_group_identifier,status from spiffworkflow_backend_testing.process_instance"
|
||||
echo "process_instance"
|
||||
mysql -uroot -e "select id,process_model_identifier,process_group_identifier,status from spiffworkflow_backend_development.process_instance"
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 00a59d952198
|
||||
Revision ID: 8fca9cdfb5be
|
||||
Revises:
|
||||
Create Date: 2022-09-19 09:01:56.805355
|
||||
Create Date: 2022-09-26 10:38:30.015462
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '00a59d952198'
|
||||
revision = '8fca9cdfb5be'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -126,6 +126,15 @@ def upgrade():
|
|||
op.create_index(op.f('ix_process_instance_report_identifier'), 'process_instance_report', ['identifier'], unique=False)
|
||||
op.create_index(op.f('ix_process_instance_report_process_group_identifier'), 'process_instance_report', ['process_group_identifier'], unique=False)
|
||||
op.create_index(op.f('ix_process_instance_report_process_model_identifier'), 'process_instance_report', ['process_model_identifier'], unique=False)
|
||||
op.create_table('secret',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('key', sa.String(length=50), nullable=False),
|
||||
sa.Column('value', sa.String(length=255), nullable=False),
|
||||
sa.Column('creator_user_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['creator_user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('key')
|
||||
)
|
||||
op.create_table('user_group_assignment',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
|
@ -205,11 +214,21 @@ def upgrade():
|
|||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('secret_allowed_process',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('secret_id', sa.Integer(), nullable=False),
|
||||
sa.Column('allowed_relative_path', sa.String(length=500), nullable=False),
|
||||
sa.ForeignKeyConstraint(['secret_id'], ['secret.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('secret_id', 'allowed_relative_path', name='unique_secret_path')
|
||||
)
|
||||
op.create_table('spiff_logging',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False),
|
||||
sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False),
|
||||
sa.Column('bpmn_task_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('bpmn_task_type', sa.String(length=255), nullable=True),
|
||||
sa.Column('spiff_task_guid', sa.String(length=50), nullable=False),
|
||||
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||
sa.Column('message', sa.String(length=255), nullable=True),
|
||||
|
@ -275,6 +294,7 @@ def downgrade():
|
|||
op.drop_table('data_store')
|
||||
op.drop_table('task_event')
|
||||
op.drop_table('spiff_logging')
|
||||
op.drop_table('secret_allowed_process')
|
||||
op.drop_table('message_instance')
|
||||
op.drop_index(op.f('ix_message_correlation_value'), table_name='message_correlation')
|
||||
op.drop_index(op.f('ix_message_correlation_process_instance_id'), table_name='message_correlation')
|
||||
|
@ -284,6 +304,7 @@ def downgrade():
|
|||
op.drop_table('file')
|
||||
op.drop_table('active_task')
|
||||
op.drop_table('user_group_assignment')
|
||||
op.drop_table('secret')
|
||||
op.drop_index(op.f('ix_process_instance_report_process_model_identifier'), table_name='process_instance_report')
|
||||
op.drop_index(op.f('ix_process_instance_report_process_group_identifier'), table_name='process_instance_report')
|
||||
op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report')
|
|
@ -53,6 +53,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
|||
|
||||
[package.dependencies]
|
||||
pytz = "*"
|
||||
setuptools = ">=0.7"
|
||||
six = ">=1.4.0"
|
||||
tzlocal = ">=2.0,<3.0.0 || >=4.0.0"
|
||||
|
||||
|
@ -64,7 +65,7 @@ mongodb = ["pymongo (>=3.0)"]
|
|||
redis = ["redis (>=3.0)"]
|
||||
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
||||
sqlalchemy = ["sqlalchemy (>=0.8)"]
|
||||
testing = ["pytest", "pytest-cov", "pytest-tornado5", "mock", "pytest-asyncio (<0.6)", "pytest-asyncio"]
|
||||
testing = ["mock", "pytest", "pytest-asyncio", "pytest-asyncio (<0.6)", "pytest-cov", "pytest-tornado5"]
|
||||
tornado = ["tornado (>=4.3)"]
|
||||
twisted = ["twisted"]
|
||||
zookeeper = ["kazoo"]
|
||||
|
@ -79,6 +80,7 @@ python-versions = ">=3.6.2"
|
|||
|
||||
[package.dependencies]
|
||||
lazy-object-proxy = ">=1.4.0"
|
||||
setuptools = ">=20.0"
|
||||
typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""}
|
||||
wrapt = ">=1.11,<2"
|
||||
|
||||
|
@ -99,10 +101,10 @@ optional = false
|
|||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
|
||||
[package.extras]
|
||||
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
|
||||
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
|
||||
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
|
||||
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
|
||||
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"]
|
||||
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"]
|
||||
|
||||
[[package]]
|
||||
name = "babel"
|
||||
|
@ -132,7 +134,7 @@ stevedore = ">=1.20.0"
|
|||
[package.extras]
|
||||
test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"]
|
||||
toml = ["toml"]
|
||||
yaml = ["pyyaml"]
|
||||
yaml = ["PyYAML"]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
|
@ -247,7 +249,7 @@ s3 = ["boto3 (>=1.9.125)"]
|
|||
slmq = ["softlayer-messaging (>=1.0.3)"]
|
||||
solar = ["ephem"]
|
||||
sqlalchemy = ["sqlalchemy"]
|
||||
sqs = ["kombu"]
|
||||
sqs = ["kombu[sqs]"]
|
||||
tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"]
|
||||
yaml = ["PyYAML (>=3.10)"]
|
||||
zookeeper = ["kazoo (>=1.3.1)"]
|
||||
|
@ -333,7 +335,7 @@ python-versions = "*"
|
|||
click = ">=4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest (>=3.6)", "pytest-cov", "wheel", "coveralls"]
|
||||
dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "click-repl"
|
||||
|
@ -377,8 +379,8 @@ optional = false
|
|||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "jaraco.tidelift (>=1.4)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "types-backports", "pytest-black (>=0.3.7)", "pytest-mypy"]
|
||||
docs = ["jaraco.packaging (>=8.2)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"]
|
||||
testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy", "types-backports"]
|
||||
|
||||
[[package]]
|
||||
name = "connexion"
|
||||
|
@ -401,11 +403,11 @@ swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra
|
|||
werkzeug = ">=1.0,<3"
|
||||
|
||||
[package.extras]
|
||||
aiohttp = ["aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "MarkupSafe (>=0.23)"]
|
||||
aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"]
|
||||
docs = ["sphinx-autoapi (==1.8.1)"]
|
||||
flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"]
|
||||
swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"]
|
||||
tests = ["decorator (>=5,<6)", "pytest (>=6,<7)", "pytest-cov (>=2,<3)", "testfixtures (>=6,<7)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "swagger-ui-bundle (>=0.0.2,<0.1)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "MarkupSafe (>=0.23)", "pytest-aiohttp", "aiohttp-remotes"]
|
||||
tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
|
@ -444,7 +446,7 @@ regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27,<2022.3.15"
|
|||
tzlocal = "*"
|
||||
|
||||
[package.extras]
|
||||
calendars = ["convertdate", "hijri-converter", "convertdate"]
|
||||
calendars = ["convertdate", "convertdate", "hijri-converter"]
|
||||
fasttext = ["fasttext"]
|
||||
langdetect = ["langdetect"]
|
||||
|
||||
|
@ -700,11 +702,11 @@ marshmallow = ">=2.0.0"
|
|||
six = ">=1.9.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["flask-sqlalchemy", "pytest", "mock", "flake8 (==3.8.3)", "pre-commit (>=2.4,<3.0)", "tox", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "flake8-bugbear (==20.1.4)", "marshmallow-sqlalchemy (>=0.13.0)"]
|
||||
docs = ["marshmallow-sqlalchemy (>=0.13.0)", "Sphinx (==3.2.1)", "sphinx-issues (==1.2.0)"]
|
||||
lint = ["flake8 (==3.8.3)", "pre-commit (>=2.4,<3.0)", "flake8-bugbear (==20.1.4)"]
|
||||
sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "marshmallow-sqlalchemy (>=0.13.0)"]
|
||||
tests = ["flask-sqlalchemy", "pytest", "mock", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "marshmallow-sqlalchemy (>=0.13.0)"]
|
||||
dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"]
|
||||
docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"]
|
||||
lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"]
|
||||
sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"]
|
||||
tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "flask-migrate"
|
||||
|
@ -793,7 +795,7 @@ optional = false
|
|||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx"]
|
||||
docs = ["Sphinx"]
|
||||
|
||||
[[package]]
|
||||
name = "gunicorn"
|
||||
|
@ -803,6 +805,9 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
|
||||
[package.dependencies]
|
||||
setuptools = ">=3.0"
|
||||
|
||||
[package.extras]
|
||||
eventlet = ["eventlet (>=0.24.1)"]
|
||||
gevent = ["gevent (>=1.4.0)"]
|
||||
|
@ -848,9 +853,9 @@ python-versions = ">=3.7"
|
|||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"]
|
||||
perf = ["ipython"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"]
|
||||
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "inflection"
|
||||
|
@ -956,7 +961,7 @@ typing-extensions = ">=3.7.4.2"
|
|||
typing-inspect = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "setuptools-scm (>=6.0.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)", "setuptools-rust (>=0.12.1)", "slotscheck (>=0.7.1)", "jinja2 (==3.0.3)", "pyre-check (==0.9.9)"]
|
||||
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
|
||||
|
||||
[[package]]
|
||||
name = "livereload"
|
||||
|
@ -981,7 +986,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
|
|||
[package.extras]
|
||||
cssselect = ["cssselect (>=0.7)"]
|
||||
html5 = ["html5lib"]
|
||||
htmlsoup = ["beautifulsoup4"]
|
||||
htmlsoup = ["BeautifulSoup4"]
|
||||
source = ["Cython (>=0.29.7)"]
|
||||
|
||||
[[package]]
|
||||
|
@ -996,7 +1001,7 @@ python-versions = ">=3.7"
|
|||
MarkupSafe = ">=0.9.2"
|
||||
|
||||
[package.extras]
|
||||
babel = ["babel"]
|
||||
babel = ["Babel"]
|
||||
lingua = ["lingua"]
|
||||
testing = ["pytest"]
|
||||
|
||||
|
@ -1020,9 +1025,9 @@ python-versions = ">=3.7"
|
|||
packaging = ">=17.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest", "pytz", "simplejson", "mypy (==0.960)", "flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "pre-commit (>=2.4,<3.0)", "tox"]
|
||||
docs = ["sphinx (==4.5.0)", "sphinx-issues (==3.0.1)", "alabaster (==0.7.12)", "sphinx-version-warning (==1.1.2)", "autodocsumm (==0.2.8)"]
|
||||
lint = ["mypy (==0.960)", "flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "pre-commit (>=2.4,<3.0)"]
|
||||
dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "mypy (==0.960)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"]
|
||||
docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.8)", "sphinx (==4.5.0)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"]
|
||||
lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.4.25)", "mypy (==0.960)", "pre-commit (>=2.4,<3.0)"]
|
||||
tests = ["pytest", "pytz", "simplejson"]
|
||||
|
||||
[[package]]
|
||||
|
@ -1049,8 +1054,8 @@ marshmallow = ">=3.0.0"
|
|||
SQLAlchemy = ">=1.3.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest", "pytest-lazy-fixture (>=0.6.2)", "flake8 (==4.0.1)", "flake8-bugbear (==22.1.11)", "pre-commit (>=2.0,<3.0)", "tox"]
|
||||
docs = ["sphinx (==4.4.0)", "alabaster (==0.7.12)", "sphinx-issues (==3.0.1)"]
|
||||
dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.1.11)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"]
|
||||
docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"]
|
||||
lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.1.11)", "pre-commit (>=2.0,<3.0)"]
|
||||
tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"]
|
||||
|
||||
|
@ -1171,8 +1176,8 @@ optional = false
|
|||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
|
||||
test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
|
||||
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"]
|
||||
test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
|
@ -1313,9 +1318,9 @@ python-versions = ">=3.6"
|
|||
|
||||
[package.extras]
|
||||
crypto = ["cryptography (>=3.3.1)"]
|
||||
dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"]
|
||||
dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||
docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"]
|
||||
tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"]
|
||||
tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyparsing"
|
||||
|
@ -1326,7 +1331,7 @@ optional = false
|
|||
python-versions = ">=3.6.8"
|
||||
|
||||
[package.extras]
|
||||
diagrams = ["railroad-diagrams", "jinja2"]
|
||||
diagrams = ["jinja2", "railroad-diagrams"]
|
||||
|
||||
[[package]]
|
||||
name = "pyrsistent"
|
||||
|
@ -1371,7 +1376,7 @@ pytest = ">=5.2"
|
|||
Werkzeug = ">=0.7"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "sphinx-rtd-theme"]
|
||||
docs = ["Sphinx", "sphinx-rtd-theme"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-flask-sqlalchemy"
|
||||
|
@ -1389,7 +1394,7 @@ pytest-mock = ">=1.6.2"
|
|||
SQLAlchemy = ">=1.2.2"
|
||||
|
||||
[package.extras]
|
||||
tests = ["pytest-postgresql (>=2.4.0,<4.0.0)", "psycopg2-binary", "pytest (>=6.0.1)"]
|
||||
tests = ["psycopg2-binary", "pytest (>=6.0.1)", "pytest-postgresql (>=2.4.0,<4.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-mock"
|
||||
|
@ -1403,7 +1408,7 @@ python-versions = ">=3.7"
|
|||
pytest = ">=5.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox", "pytest-asyncio"]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
|
@ -1431,8 +1436,8 @@ rsa = "*"
|
|||
|
||||
[package.extras]
|
||||
cryptography = ["cryptography (>=3.4.0)"]
|
||||
pycrypto = ["pycrypto (>=2.6.0,<2.7.0)", "pyasn1"]
|
||||
pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)", "pyasn1"]
|
||||
pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"]
|
||||
pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "python-keycloak"
|
||||
|
@ -1596,6 +1601,7 @@ dparse = ">=0.5.1"
|
|||
packaging = ">=21.0"
|
||||
requests = "*"
|
||||
"ruamel.yaml" = ">=0.17.21"
|
||||
setuptools = ">=19.3"
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
|
@ -1618,17 +1624,30 @@ chalice = ["chalice (>=1.16.0)"]
|
|||
django = ["django (>=1.8)"]
|
||||
falcon = ["falcon (>=1.4)"]
|
||||
fastapi = ["fastapi (>=0.79.0)"]
|
||||
flask = ["flask (>=0.11)", "blinker (>=1.1)"]
|
||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||
httpx = ["httpx (>=0.16.0)"]
|
||||
pure_eval = ["pure-eval", "executing", "asttokens"]
|
||||
pure_eval = ["asttokens", "executing", "pure-eval"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
quart = ["quart (>=0.16.1)", "blinker (>=1.1)"]
|
||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||
rq = ["rq (>=0.6)"]
|
||||
sanic = ["sanic (>=0.8)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||
starlette = ["starlette (>=0.19.1)"]
|
||||
tornado = ["tornado (>=5)"]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "65.3.0"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
|
@ -1690,8 +1709,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5"
|
|||
|
||||
[package.extras]
|
||||
docs = ["sphinxcontrib-websupport"]
|
||||
lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "docutils-stubs", "types-typed-ast", "types-requests"]
|
||||
test = ["pytest (>=4.6)", "html5lib", "cython", "typed-ast"]
|
||||
lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.950)", "types-requests", "types-typed-ast"]
|
||||
test = ["cython", "html5lib", "pytest (>=4.6)", "typed-ast"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-autoapi"
|
||||
|
@ -1741,7 +1760,7 @@ python-versions = ">=3.7"
|
|||
sphinx = ">=4.0,<6.0"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs", "ipython"]
|
||||
docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinx-click"
|
||||
|
@ -1765,7 +1784,7 @@ optional = false
|
|||
python-versions = ">=3.5"
|
||||
|
||||
[package.extras]
|
||||
lint = ["flake8", "mypy", "docutils-stubs"]
|
||||
lint = ["docutils-stubs", "flake8", "mypy"]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
|
@ -1777,7 +1796,7 @@ optional = false
|
|||
python-versions = ">=3.5"
|
||||
|
||||
[package.extras]
|
||||
lint = ["flake8", "mypy", "docutils-stubs"]
|
||||
lint = ["docutils-stubs", "flake8", "mypy"]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
|
@ -1789,8 +1808,8 @@ optional = false
|
|||
python-versions = ">=3.6"
|
||||
|
||||
[package.extras]
|
||||
lint = ["flake8", "mypy", "docutils-stubs"]
|
||||
test = ["pytest", "html5lib"]
|
||||
lint = ["docutils-stubs", "flake8", "mypy"]
|
||||
test = ["html5lib", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinxcontrib-jsmath"
|
||||
|
@ -1801,7 +1820,7 @@ optional = false
|
|||
python-versions = ">=3.5"
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest", "flake8", "mypy"]
|
||||
test = ["flake8", "mypy", "pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "sphinxcontrib-qthelp"
|
||||
|
@ -1812,7 +1831,7 @@ optional = false
|
|||
python-versions = ">=3.5"
|
||||
|
||||
[package.extras]
|
||||
lint = ["flake8", "mypy", "docutils-stubs"]
|
||||
lint = ["docutils-stubs", "flake8", "mypy"]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
|
@ -1824,13 +1843,13 @@ optional = false
|
|||
python-versions = ">=3.5"
|
||||
|
||||
[package.extras]
|
||||
lint = ["flake8", "mypy", "docutils-stubs"]
|
||||
lint = ["docutils-stubs", "flake8", "mypy"]
|
||||
test = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.1.7"
|
||||
description = ""
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1847,7 +1866,7 @@ pytz = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "4d0976c98a9dc07d604c06fe0b7e3b21fb97c5b3"
|
||||
resolved_reference = "dec9b4b942378d030ae73f1365dfbf108e6f7f8c"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -1861,25 +1880,25 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
|||
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
|
||||
|
||||
[package.extras]
|
||||
aiomysql = ["greenlet (!=0.4.17)", "aiomysql"]
|
||||
aiosqlite = ["typing_extensions (!=3.10.0.1)", "greenlet (!=0.4.17)", "aiosqlite"]
|
||||
aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
|
||||
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
|
||||
asyncio = ["greenlet (!=0.4.17)"]
|
||||
asyncmy = ["greenlet (!=0.4.17)", "asyncmy (>=0.2.3,!=0.2.4)"]
|
||||
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
|
||||
mariadb_connector = ["mariadb (>=1.0.1)"]
|
||||
mssql = ["pyodbc"]
|
||||
mssql_pymssql = ["pymssql"]
|
||||
mssql_pyodbc = ["pyodbc"]
|
||||
mypy = ["sqlalchemy2-stubs", "mypy (>=0.910)"]
|
||||
mysql = ["mysqlclient (>=1.4.0,<2)", "mysqlclient (>=1.4.0)"]
|
||||
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
|
||||
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
|
||||
mysql_connector = ["mysql-connector-python"]
|
||||
oracle = ["cx_oracle (>=7,<8)", "cx_oracle (>=7)"]
|
||||
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
|
||||
postgresql = ["psycopg2 (>=2.7)"]
|
||||
postgresql_asyncpg = ["greenlet (!=0.4.17)", "asyncpg"]
|
||||
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
|
||||
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
|
||||
postgresql_psycopg2binary = ["psycopg2-binary"]
|
||||
postgresql_psycopg2cffi = ["psycopg2cffi"]
|
||||
pymysql = ["pymysql (<1)", "pymysql"]
|
||||
sqlcipher = ["sqlcipher3-binary"]
|
||||
pymysql = ["pymysql", "pymysql (<1)"]
|
||||
sqlcipher = ["sqlcipher3_binary"]
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy-stubs"
|
||||
|
@ -1956,8 +1975,8 @@ optional = false
|
|||
python-versions = ">=3.5.3"
|
||||
|
||||
[package.extras]
|
||||
doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"]
|
||||
test = ["pytest", "typing-extensions", "mypy"]
|
||||
doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
||||
test = ["mypy", "pytest", "typing-extensions"]
|
||||
|
||||
[[package]]
|
||||
name = "types-pytz"
|
||||
|
@ -2028,7 +2047,7 @@ tzdata = {version = "*", markers = "platform_system == \"Windows\""}
|
|||
|
||||
[package.extras]
|
||||
devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"]
|
||||
test = ["pytest-mock (>=3.3)", "pytest (>=4.3)"]
|
||||
test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
|
@ -2047,8 +2066,8 @@ optional = false
|
|||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
|
@ -2075,7 +2094,7 @@ six = ">=1.9.0,<2"
|
|||
|
||||
[package.extras]
|
||||
docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"]
|
||||
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"]
|
||||
testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"]
|
||||
|
||||
[[package]]
|
||||
name = "wcwidth"
|
||||
|
@ -2132,15 +2151,15 @@ Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0
|
|||
six = "*"
|
||||
|
||||
[package.extras]
|
||||
tests = ["pytest-cov", "pytest", "typing", "pytest", "pytest-cov", "pytest", "pytest-cov", "pytest", "pytest", "pytest-cov", "pytest", "scikit-build", "pybind11", "ninja", "codecov", "cmake"]
|
||||
tests-strict = ["pytest-cov (==3.0.0)", "pytest (==6.2.5)", "typing (==3.7.4)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest-cov (==2.9.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "scikit-build (==0.11.1)", "pybind11 (==2.7.1)", "ninja (==1.10.2)", "codecov (==2.0.15)", "cmake (==3.21.2)"]
|
||||
all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"]
|
||||
all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"]
|
||||
colors = ["Pygments", "Pygments", "colorama"]
|
||||
jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"]
|
||||
optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"]
|
||||
optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"]
|
||||
runtime-strict = ["six (==1.11.0)"]
|
||||
optional = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "jinja2", "jupyter-core", "jedi", "attrs", "pygments", "ipython-genutils", "debugpy", "debugpy", "debugpy", "ipykernel", "debugpy", "ipython", "jupyter-client", "pygments", "tomli", "debugpy", "colorama", "pyflakes"]
|
||||
optional-strict = ["ipykernel (==6.0.0)", "IPython (==7.23.1)", "jupyter-client (==7.0.0)", "nbconvert (==6.0.0)", "jinja2 (==3.0.0)", "jupyter-core (==4.7.0)", "jedi (==0.16)", "attrs (==19.2.0)", "Pygments (==2.4.1)", "ipython-genutils (==0.2.0)", "debugpy (==1.6.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "ipykernel (==5.2.0)", "debugpy (==1.0.0)", "IPython (==7.10.0)", "jupyter-client (==6.1.5)", "Pygments (==2.0.0)", "tomli (==0.2.0)", "debugpy (==1.3.0)", "colorama (==0.4.1)", "pyflakes (==2.2.0)"]
|
||||
jupyter = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "jinja2", "jupyter-core", "jedi", "attrs", "ipython-genutils", "debugpy", "debugpy", "debugpy", "ipykernel", "debugpy", "ipython", "jupyter-client", "debugpy"]
|
||||
colors = ["pygments", "pygments", "colorama"]
|
||||
all = ["ipykernel", "ipython", "jupyter-client", "nbconvert", "pytest-cov", "jinja2", "jupyter-core", "jedi", "attrs", "pygments", "pytest", "ipython-genutils", "debugpy", "typing", "debugpy", "debugpy", "pytest", "ipykernel", "debugpy", "ipython", "jupyter-client", "pytest-cov", "pytest", "pytest-cov", "pytest", "pygments", "pytest", "debugpy", "pytest-cov", "pytest", "colorama", "six", "scikit-build", "pybind11", "ninja", "codecov", "cmake"]
|
||||
all-strict = ["ipykernel (==6.0.0)", "IPython (==7.23.1)", "jupyter-client (==7.0.0)", "nbconvert (==6.0.0)", "pytest-cov (==3.0.0)", "jinja2 (==3.0.0)", "jupyter-core (==4.7.0)", "jedi (==0.16)", "attrs (==19.2.0)", "Pygments (==2.4.1)", "pytest (==6.2.5)", "ipython-genutils (==0.2.0)", "debugpy (==1.6.0)", "typing (==3.7.4)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "pytest (==4.6.0)", "ipykernel (==5.2.0)", "debugpy (==1.0.0)", "IPython (==7.10.0)", "jupyter-client (==6.1.5)", "pytest (==4.6.0)", "pytest-cov (==2.9.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "Pygments (==2.0.0)", "pytest (==4.6.0)", "debugpy (==1.3.0)", "pytest (==4.6.0)", "pytest-cov (==2.8.1)", "colorama (==0.4.1)", "six (==1.11.0)", "scikit-build (==0.11.1)", "pybind11 (==2.7.1)", "ninja (==1.10.2)", "codecov (==2.0.15)", "cmake (==3.21.2)"]
|
||||
tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"]
|
||||
tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
|
@ -2151,8 +2170,8 @@ optional = false
|
|||
python-versions = ">=3.7"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"]
|
||||
testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"]
|
||||
docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"]
|
||||
testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
|
@ -2176,7 +2195,10 @@ aniso8601 = [
|
|||
{file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"},
|
||||
{file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"},
|
||||
]
|
||||
apscheduler = []
|
||||
apscheduler = [
|
||||
{file = "APScheduler-3.9.1-py2.py3-none-any.whl", hash = "sha256:ddc25a0ddd899de44d7f451f4375fb971887e65af51e41e5dcf681f59b8b2c9a"},
|
||||
{file = "APScheduler-3.9.1.tar.gz", hash = "sha256:65e6574b6395498d371d045f2a8a7e4f7d50c6ad21ef7313d15b1c7cf20df1e3"},
|
||||
]
|
||||
astroid = [
|
||||
{file = "astroid-2.11.6-py3-none-any.whl", hash = "sha256:ba33a82a9a9c06a5ceed98180c5aab16e29c285b828d94696bf32d6015ea82a9"},
|
||||
{file = "astroid-2.11.6.tar.gz", hash = "sha256:4f933d0bf5e408b03a6feb5d23793740c27e07340605f236496cd6ce552043d6"},
|
||||
|
@ -2218,7 +2240,31 @@ billiard = [
|
|||
{file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"},
|
||||
{file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"},
|
||||
]
|
||||
black = []
|
||||
black = [
|
||||
{file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"},
|
||||
{file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"},
|
||||
{file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"},
|
||||
{file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"},
|
||||
{file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"},
|
||||
{file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"},
|
||||
{file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"},
|
||||
{file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"},
|
||||
{file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"},
|
||||
{file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"},
|
||||
{file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"},
|
||||
{file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"},
|
||||
{file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"},
|
||||
{file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"},
|
||||
{file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"},
|
||||
{file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"},
|
||||
{file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"},
|
||||
{file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"},
|
||||
{file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"},
|
||||
{file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"},
|
||||
{file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"},
|
||||
{file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"},
|
||||
{file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"},
|
||||
]
|
||||
blinker = [
|
||||
{file = "blinker-1.4.tar.gz", hash = "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"},
|
||||
]
|
||||
|
@ -2290,7 +2336,10 @@ charset-normalizer = [
|
|||
{file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"},
|
||||
{file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"},
|
||||
]
|
||||
classify-imports = []
|
||||
classify-imports = [
|
||||
{file = "classify_imports-4.1.0-py2.py3-none-any.whl", hash = "sha256:45436d3c4c886ca9092a2c90551b392ba120360e7a782574169ddeb866bbc08a"},
|
||||
{file = "classify_imports-4.1.0.tar.gz", hash = "sha256:69ddc4320690c26aa8baa66bf7e0fa0eecfda49d99cf71a59dee0b57dac82616"},
|
||||
]
|
||||
click = [
|
||||
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
|
||||
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
|
||||
|
@ -2319,8 +2368,62 @@ configparser = [
|
|||
{file = "configparser-5.2.0-py3-none-any.whl", hash = "sha256:e8b39238fb6f0153a069aa253d349467c3c4737934f253ef6abac5fe0eca1e5d"},
|
||||
{file = "configparser-5.2.0.tar.gz", hash = "sha256:1b35798fdf1713f1c3139016cfcbc461f09edbf099d1fb658d4b7479fcaa3daa"},
|
||||
]
|
||||
connexion = []
|
||||
coverage = []
|
||||
connexion = [
|
||||
{file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"},
|
||||
{file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"},
|
||||
]
|
||||
coverage = [
|
||||
{file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"},
|
||||
{file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"},
|
||||
{file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"},
|
||||
{file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"},
|
||||
{file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"},
|
||||
{file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"},
|
||||
{file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"},
|
||||
{file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"},
|
||||
]
|
||||
darglint = [
|
||||
{file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"},
|
||||
{file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
|
||||
|
@ -2368,8 +2471,14 @@ flake8-polyfill = [
|
|||
{file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"},
|
||||
{file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"},
|
||||
]
|
||||
flake8-rst-docstrings = []
|
||||
flask = []
|
||||
flake8-rst-docstrings = [
|
||||
{file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"},
|
||||
{file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"},
|
||||
]
|
||||
flask = [
|
||||
{file = "Flask-2.1.3-py3-none-any.whl", hash = "sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"},
|
||||
{file = "Flask-2.1.3.tar.gz", hash = "sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb"},
|
||||
]
|
||||
flask-admin = [
|
||||
{file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"},
|
||||
]
|
||||
|
@ -2590,7 +2699,6 @@ lxml = [
|
|||
{file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"},
|
||||
{file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"},
|
||||
{file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:49a866923e69bc7da45a0565636243707c22752fc38f6b9d5c8428a86121022c"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"},
|
||||
{file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"},
|
||||
|
@ -2786,7 +2894,10 @@ pbr = [
|
|||
{file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"},
|
||||
{file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"},
|
||||
]
|
||||
pep8-naming = []
|
||||
pep8-naming = [
|
||||
{file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"},
|
||||
{file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"},
|
||||
]
|
||||
platformdirs = [
|
||||
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
|
||||
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
|
||||
|
@ -2795,7 +2906,10 @@ pluggy = [
|
|||
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
|
||||
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||
]
|
||||
pre-commit = []
|
||||
pre-commit = [
|
||||
{file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
|
||||
{file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
|
||||
]
|
||||
pre-commit-hooks = [
|
||||
{file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"},
|
||||
{file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"},
|
||||
|
@ -2837,7 +2951,21 @@ py = [
|
|||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyasn1 = []
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
{file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"},
|
||||
{file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"},
|
||||
|
@ -2913,7 +3041,10 @@ python-jose = [
|
|||
{file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"},
|
||||
{file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"},
|
||||
]
|
||||
python-keycloak = []
|
||||
python-keycloak = [
|
||||
{file = "python-keycloak-2.5.0.tar.gz", hash = "sha256:b401d2c67dc1b9e2dbb3309ef2012c2d178584925dc14bd07f6bd2416e5e3ff8"},
|
||||
{file = "python_keycloak-2.5.0-py3-none-any.whl", hash = "sha256:ed1c1935ceaf5d7f928b1b3ab945130f7d54685e4b17da053dbc7bfee0c0271e"},
|
||||
]
|
||||
pytz = [
|
||||
{file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
|
||||
{file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"},
|
||||
|
@ -2922,7 +3053,10 @@ pytz-deprecation-shim = [
|
|||
{file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"},
|
||||
{file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"},
|
||||
]
|
||||
pyupgrade = []
|
||||
pyupgrade = [
|
||||
{file = "pyupgrade-2.37.1-py2.py3-none-any.whl", hash = "sha256:dd2a32628d6d2a7dd6c086d98420e234b9e60c1e1d4c55431578491703e762a5"},
|
||||
{file = "pyupgrade-2.37.1.tar.gz", hash = "sha256:3d9cbd88507a0f3d7397c46870617f0d073d61401c451c08a06763d6235d9e7d"},
|
||||
]
|
||||
pyyaml = [
|
||||
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
|
||||
|
@ -2931,6 +3065,13 @@ pyyaml = [
|
|||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
|
||||
|
@ -3034,22 +3175,32 @@ regex = [
|
|||
{file = "regex-2022.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9efa41d1527b366c88f265a227b20bcec65bda879962e3fc8a2aee11e81266d7"},
|
||||
{file = "regex-2022.3.2.tar.gz", hash = "sha256:79e5af1ff258bc0fe0bdd6f69bc4ae33935a898e3cbefbbccf22e88a27fa053b"},
|
||||
]
|
||||
reorder-python-imports = []
|
||||
reorder-python-imports = [
|
||||
{file = "reorder_python_imports-3.8.1-py2.py3-none-any.whl", hash = "sha256:bfa44fa719595766c18c59ff552e6e02c071f896425746f94b2a34f4b93e217c"},
|
||||
{file = "reorder_python_imports-3.8.1.tar.gz", hash = "sha256:33a981a477875ac79588dbc6f80d7db85eccd2f5529ff289fe51283afd5affb5"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.28.0-py3-none-any.whl", hash = "sha256:bc7861137fbce630f17b03d3ad02ad0bf978c844f3536d0edda6499dafce2b6f"},
|
||||
{file = "requests-2.28.0.tar.gz", hash = "sha256:d568723a7ebd25875d8d1eaf5dfa068cd2fc8194b2e483d7b1f7c81918dbec6b"},
|
||||
]
|
||||
requests-toolbelt = []
|
||||
requests-toolbelt = [
|
||||
{file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"},
|
||||
{file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"},
|
||||
]
|
||||
restructuredtext-lint = [
|
||||
{file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"},
|
||||
]
|
||||
rsa = []
|
||||
rsa = [
|
||||
{file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"},
|
||||
{file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"},
|
||||
]
|
||||
"ruamel.yaml" = [
|
||||
{file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"},
|
||||
{file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"},
|
||||
]
|
||||
"ruamel.yaml.clib" = [
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"},
|
||||
|
@ -3059,18 +3210,22 @@ rsa = []
|
|||
{file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"},
|
||||
{file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"},
|
||||
{file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"},
|
||||
|
@ -3079,7 +3234,14 @@ safety = [
|
|||
{file = "safety-2.0.0-py3-none-any.whl", hash = "sha256:77cebdd128ce47b941e68a1b3bbc29fbbd2b9e98d11f179c5def64c1d05da295"},
|
||||
{file = "safety-2.0.0.tar.gz", hash = "sha256:d739d00a9e4203cfaba34540c822a73ca1d327159ed7776b3dce09391f81c35d"},
|
||||
]
|
||||
sentry-sdk = []
|
||||
sentry-sdk = [
|
||||
{file = "sentry-sdk-1.9.0.tar.gz", hash = "sha256:f185c53496d79b280fe5d9d21e6572aee1ab802d3354eb12314d216cfbaa8d30"},
|
||||
{file = "sentry_sdk-1.9.0-py2.py3-none-any.whl", hash = "sha256:60b13757d6344a94bf0ccb3c0a006c4de77daab09871b30fbbd05d5ec24e54fb"},
|
||||
]
|
||||
setuptools = [
|
||||
{file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"},
|
||||
{file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"},
|
||||
]
|
||||
six = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
|
@ -3100,7 +3262,10 @@ sphinx = [
|
|||
{file = "Sphinx-5.0.2-py3-none-any.whl", hash = "sha256:d3e57663eed1d7c5c50895d191fdeda0b54ded6f44d5621b50709466c338d1e8"},
|
||||
{file = "Sphinx-5.0.2.tar.gz", hash = "sha256:b18e978ea7565720f26019c702cd85c84376e948370f1cd43d60265010e1c7b0"},
|
||||
]
|
||||
sphinx-autoapi = []
|
||||
sphinx-autoapi = [
|
||||
{file = "sphinx-autoapi-1.9.0.tar.gz", hash = "sha256:c897ea337df16ad0cde307cbdfe2bece207788dde1587fa4fc8b857d1fc5dcba"},
|
||||
{file = "sphinx_autoapi-1.9.0-py2.py3-none-any.whl", hash = "sha256:d217953273b359b699d8cb81a5a72985a3e6e15cfe3f703d9a3c201ffc30849b"},
|
||||
]
|
||||
sphinx-autobuild = [
|
||||
{file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"},
|
||||
{file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"},
|
||||
|
@ -3251,8 +3416,14 @@ types-pytz = [
|
|||
{file = "types-pytz-2022.1.1.tar.gz", hash = "sha256:4e7add70886dc2ee6ee7535c8184a26eeb0ac9dbafae9962cb882d74b9f67330"},
|
||||
{file = "types_pytz-2022.1.1-py3-none-any.whl", hash = "sha256:581467742f32f15fff1098698b11fd511057a2a8a7568d33b604083f2b03c24f"},
|
||||
]
|
||||
types-requests = []
|
||||
types-urllib3 = []
|
||||
types-requests = [
|
||||
{file = "types-requests-2.28.6.tar.gz", hash = "sha256:cf3383bbd79394bf051a0a9202d6831fa962f186f923c178f7c059e3424bd00e"},
|
||||
{file = "types_requests-2.28.6-py3-none-any.whl", hash = "sha256:d8d7607419cd4b41a7b9497e15e8c0bad78d50df43c48ad25bc526a11518c3a9"},
|
||||
]
|
||||
types-urllib3 = [
|
||||
{file = "types-urllib3-1.26.20.tar.gz", hash = "sha256:1fb6e2af519a7216a19dd6be8cd2ee787b402a754ccb4a13ca1c0e5b202aea5a"},
|
||||
{file = "types_urllib3-1.26.20-py3-none-any.whl", hash = "sha256:6249b6223226cb2012db3b4ff6945c9cb0e12ece9b24f5e29787c4f05028a979"},
|
||||
]
|
||||
typing-extensions = [
|
||||
{file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"},
|
||||
{file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"},
|
||||
|
@ -3364,7 +3535,10 @@ wtforms = [
|
|||
{file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"},
|
||||
{file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"},
|
||||
]
|
||||
xdoctest = []
|
||||
xdoctest = [
|
||||
{file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"},
|
||||
{file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"},
|
||||
]
|
||||
zipp = [
|
||||
{file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"},
|
||||
{file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"},
|
||||
|
|
|
@ -4,7 +4,10 @@ sonar.host.url=https://sonarcloud.io
|
|||
sonar.python.version=3.9,3.10
|
||||
sonar.python.coverage.reportPaths=coverage.xml
|
||||
sonar.test.inclusions=tests
|
||||
sonar.exclusions=migrations/**,bin/keycloak_test_server.py
|
||||
|
||||
# it's finding "bugs" we don't care about in the deprecated UI
|
||||
sonar.exclusions=migrations/**,bin/keycloak_test_server.py,src/spiffworkflow_backend/routes/admin_blueprint/templates/*.html
|
||||
|
||||
sonar.coverage.exclusions=noxfile.py,conftest.py
|
||||
# sonar.exclusions=crc/templates/*.html,docs/**,config/**,instance/**,migrations/**,postgres/**,readme_images/**,schema/**,templates/**
|
||||
# sonar.sources=crc
|
||||
|
|
|
@ -5,6 +5,7 @@ from typing import Any
|
|||
import connexion # type: ignore
|
||||
import flask.app
|
||||
import flask.json
|
||||
import sqlalchemy
|
||||
from apscheduler.schedulers.background import BackgroundScheduler # type: ignore
|
||||
from flask_bpmn.api.api_error import api_error_blueprint
|
||||
from flask_bpmn.models.db import db
|
||||
|
@ -17,7 +18,9 @@ from spiffworkflow_backend.config import setup_config
|
|||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||
from spiffworkflow_backend.services.message_service import MessageServiceWithAppContext
|
||||
from spiffworkflow_backend.services.background_processing_service import (
|
||||
BackgroundProcessingService,
|
||||
)
|
||||
|
||||
|
||||
class MyJSONEncoder(flask.json.JSONEncoder):
|
||||
|
@ -27,6 +30,16 @@ class MyJSONEncoder(flask.json.JSONEncoder):
|
|||
"""Default."""
|
||||
if hasattr(obj, "serialized"):
|
||||
return obj.serialized
|
||||
elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore
|
||||
return_dict = {}
|
||||
for row_key in obj.keys():
|
||||
row_value = obj[row_key]
|
||||
if hasattr(row_value, "__dict__"):
|
||||
return_dict.update(row_value.__dict__)
|
||||
else:
|
||||
return_dict.update({row_key: row_value})
|
||||
return_dict.pop("_sa_instance_state")
|
||||
return return_dict
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
|
@ -34,10 +47,15 @@ def start_scheduler(app: flask.app.Flask) -> None:
|
|||
"""Start_scheduler."""
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.add_job(
|
||||
MessageServiceWithAppContext(app).process_message_instances_with_app_context,
|
||||
BackgroundProcessingService(app).process_message_instances_with_app_context,
|
||||
"interval",
|
||||
seconds=10,
|
||||
)
|
||||
scheduler.add_job(
|
||||
BackgroundProcessingService(app).run,
|
||||
"interval",
|
||||
seconds=30,
|
||||
)
|
||||
scheduler.start()
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ servers:
|
|||
- url: http://localhost:5000/v1.0
|
||||
security:
|
||||
- jwt: ["secret"]
|
||||
# - oAuth2AuthCode:
|
||||
# - read_email
|
||||
|
||||
paths:
|
||||
/login:
|
||||
|
@ -83,35 +85,47 @@ paths:
|
|||
"200":
|
||||
description: Logout Authenticated User
|
||||
|
||||
/login_swagger:
|
||||
parameters:
|
||||
- name: uid
|
||||
in: query
|
||||
required: true
|
||||
description: The user we are authenticating
|
||||
schema:
|
||||
type: string
|
||||
- name: password
|
||||
in: query
|
||||
required: true
|
||||
description: The password for the user
|
||||
schema:
|
||||
type: string
|
||||
format: password
|
||||
- name: redirect_url
|
||||
in: query
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
security: []
|
||||
operationId: spiffworkflow_backend.routes.user.api_login
|
||||
summary: Authenticate user for API access
|
||||
tags:
|
||||
- Authentication
|
||||
responses:
|
||||
"304":
|
||||
description: Redirection to the hosted frontend with an auth_token header.
|
||||
# /login_api:
|
||||
# parameters:
|
||||
# - name: redirect_url
|
||||
# in: query
|
||||
# required: false
|
||||
# schema:
|
||||
# type: string
|
||||
# get:
|
||||
# security: []
|
||||
# operationId: spiffworkflow_backend.routes.user.login_api
|
||||
# summary: Authenticate user for API access
|
||||
# tags:
|
||||
# - Authentication
|
||||
# responses:
|
||||
# "304":
|
||||
# description: Redirection to the hosted frontend with an auth_token header.
|
||||
# /login_api_return:
|
||||
# parameters:
|
||||
# - name: code
|
||||
# in: query
|
||||
# required: true
|
||||
# schema:
|
||||
# type: string
|
||||
# - name: state
|
||||
# in: query
|
||||
# required: true
|
||||
# schema:
|
||||
# type: string
|
||||
# - name: session_state
|
||||
# in: query
|
||||
# required: false
|
||||
# schema:
|
||||
# type: string
|
||||
# get:
|
||||
# security: []
|
||||
# operationId: spiffworkflow_backend.routes.user.login_api_return
|
||||
# tags:
|
||||
# - Authentication
|
||||
# responses:
|
||||
# "200":
|
||||
# description: Test Return Response
|
||||
|
||||
/status:
|
||||
get:
|
||||
|
@ -127,7 +141,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-groups:
|
||||
parameters:
|
||||
|
@ -208,7 +222,7 @@ paths:
|
|||
description: The process group was deleted.
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_update
|
||||
summary: Upates a single process group
|
||||
summary: Updates a single process group
|
||||
tags:
|
||||
- Process Groups
|
||||
requestBody:
|
||||
|
@ -224,11 +238,11 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/WorkflowSpecCategory"
|
||||
|
||||
/process-groups/{process_group_id}/process-models:
|
||||
/process-models:
|
||||
parameters:
|
||||
- name: process_group_id
|
||||
in: path
|
||||
required: true
|
||||
- name: process_group_identifier
|
||||
in: query
|
||||
required: false
|
||||
description: The group containing the models we want to return
|
||||
schema:
|
||||
type: string
|
||||
|
@ -259,8 +273,6 @@ paths:
|
|||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/WorkflowSpec"
|
||||
|
||||
/process-models:
|
||||
# process_model_add
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_add
|
||||
|
@ -315,7 +327,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/File"
|
||||
$ref: "#/components/schemas/File"
|
||||
# get:
|
||||
# operationId: spiffworkflow_backend.api.process_api_blueprint.get_files
|
||||
# summary: Provide a list of workflow spec files for the given workflow_spec_id. IMPORTANT, only includes metadata, not the file content.
|
||||
|
@ -370,7 +382,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
# process model update
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update
|
||||
|
@ -390,17 +402,17 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/WorkflowSpec"
|
||||
|
||||
/process-models/{process_group_id}/{process_model_id}/process-instances:
|
||||
/process-instances:
|
||||
parameters:
|
||||
- name: process_group_id
|
||||
in: path
|
||||
required: true
|
||||
- name: process_group_identifier
|
||||
in: query
|
||||
required: false
|
||||
description: The unique id of an existing process group
|
||||
schema:
|
||||
type: string
|
||||
- name: process_model_id
|
||||
in: path
|
||||
required: true
|
||||
- name: process_model_identifier
|
||||
in: query
|
||||
required: false
|
||||
description: The unique id of an existing workflow specification.
|
||||
schema:
|
||||
type: string
|
||||
|
@ -461,6 +473,20 @@ paths:
|
|||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
/process-models/{process_group_id}/{process_model_id}/process-instances:
|
||||
parameters:
|
||||
- name: process_group_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing process group
|
||||
schema:
|
||||
type: string
|
||||
- name: process_model_id
|
||||
in: path
|
||||
required: true
|
||||
description: The unique id of an existing workflow specification.
|
||||
schema:
|
||||
type: string
|
||||
# process_instance_create
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create
|
||||
|
@ -519,7 +545,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run:
|
||||
parameters:
|
||||
|
@ -592,7 +618,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-models/{process_group_id}/{process_model_id}/process-instances/reports:
|
||||
parameters:
|
||||
|
@ -645,7 +671,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-models/{process_group_id}/{process_model_id}/process-instances/reports/{report_identifier}:
|
||||
parameters:
|
||||
|
@ -704,7 +730,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
delete:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_delete
|
||||
summary: Delete a process instance report
|
||||
|
@ -716,7 +742,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/process-models/{process_group_id}/{process_model_id}/files/{file_name}:
|
||||
parameters:
|
||||
|
@ -750,7 +776,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/File"
|
||||
$ref: "#/components/schemas/File"
|
||||
# process_model_file_update
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_update
|
||||
|
@ -774,7 +800,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
delete:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_delete
|
||||
summary: Removes an existing process model file
|
||||
|
@ -786,7 +812,7 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#components/schemas/OkTrue"
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/tasks:
|
||||
parameters:
|
||||
|
@ -917,6 +943,39 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/OkTrue"
|
||||
|
||||
/messages:
|
||||
parameters:
|
||||
- name: process_instance_id
|
||||
in: query
|
||||
required: false
|
||||
description: the id of the process instance
|
||||
schema:
|
||||
type: integer
|
||||
- name: page
|
||||
in: query
|
||||
required: false
|
||||
description: The page number to return. Defaults to page 1.
|
||||
schema:
|
||||
type: integer
|
||||
- name: per_page
|
||||
in: query
|
||||
required: false
|
||||
description: The number of models to show per page. Defaults to page 10.
|
||||
schema:
|
||||
type: integer
|
||||
get:
|
||||
tags:
|
||||
- Messages
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.message_instance_list
|
||||
summary: Get a list of message instances
|
||||
responses:
|
||||
"200":
|
||||
description: One task
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Workflow"
|
||||
|
||||
/messages/{message_identifier}:
|
||||
parameters:
|
||||
- name: message_identifier
|
||||
|
@ -986,7 +1045,115 @@ paths:
|
|||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
$ref: "#/components/schemas/ProcessInstanceLog"
|
||||
|
||||
/secrets:
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.add_secret
|
||||
summary: Create a secret for a key and value
|
||||
tags:
|
||||
- Secrets
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Secret"
|
||||
responses:
|
||||
"201":
|
||||
description: Secret created successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: number
|
||||
|
||||
/secrets/{key}:
|
||||
parameters:
|
||||
- name: key
|
||||
in: path
|
||||
required: true
|
||||
description: The key we are using
|
||||
schema:
|
||||
type: string
|
||||
get:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.get_secret
|
||||
summary: Return a secret value for a key
|
||||
tags:
|
||||
- Secrets
|
||||
responses:
|
||||
"200":
|
||||
description: We return a secret
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: string
|
||||
delete:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_secret
|
||||
summary: Delete an existing secret
|
||||
tags:
|
||||
- Secrets
|
||||
responses:
|
||||
"204":
|
||||
description: The secret is deleted
|
||||
"401":
|
||||
description: Unauthorized to delete secret
|
||||
"404":
|
||||
description: Secret does not exist
|
||||
put:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.update_secret
|
||||
summary: Modify an existing secret
|
||||
tags:
|
||||
- Secrets
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Secret"
|
||||
responses:
|
||||
"200":
|
||||
description: Secret updated successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/Secret"
|
||||
"401":
|
||||
description: Unauthorized to update secret
|
||||
"404":
|
||||
description: Secret does not exist
|
||||
|
||||
/secrets/allowed_process_paths:
|
||||
post:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.add_allowed_process_path
|
||||
summary: Create an allowed process to a secret
|
||||
tags:
|
||||
- Secrets
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/SecretAllowedProcessPath"
|
||||
responses:
|
||||
"201":
|
||||
description: Allowed process created successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/SecretAllowedProcessPath"
|
||||
/secrets/allowed_process_paths/{allowed_process_path_id}:
|
||||
parameters:
|
||||
- name: allowed_process_path_id
|
||||
in: path
|
||||
required: true
|
||||
description: The id of the allowed process path to delete
|
||||
schema:
|
||||
type: integer
|
||||
delete:
|
||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_allowed_process_path
|
||||
summary: Delete an existing allowed process for a secret
|
||||
tags:
|
||||
- Secrets
|
||||
responses:
|
||||
"204":
|
||||
description: The allowed process is deleted.
|
||||
|
||||
components:
|
||||
securitySchemes:
|
||||
|
@ -996,6 +1163,18 @@ components:
|
|||
bearerFormat: JWT
|
||||
x-bearerInfoFunc: spiffworkflow_backend.routes.user.verify_token
|
||||
x-scopeValidateFunc: spiffworkflow_backend.routes.user.validate_scope
|
||||
|
||||
oAuth2AuthCode:
|
||||
type: oauth2
|
||||
description: authenticate with openid server
|
||||
flows:
|
||||
authorizationCode:
|
||||
authorizationUrl: /v1.0/login_api
|
||||
tokenUrl: /v1.0/login_return
|
||||
scopes:
|
||||
read_email: read email
|
||||
x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope
|
||||
|
||||
schemas:
|
||||
OkTrue:
|
||||
properties:
|
||||
|
@ -1714,3 +1893,72 @@ components:
|
|||
untracked:
|
||||
type: array
|
||||
example: ["a_file.txt", "b_file.txt"]
|
||||
Secret:
|
||||
properties:
|
||||
key:
|
||||
description: The key of the secret we want to use
|
||||
type: string
|
||||
example: my_secret_key
|
||||
nullable: false
|
||||
value:
|
||||
description: The value associated with the key
|
||||
type: string
|
||||
example: my_super_secret_value
|
||||
nullable: false
|
||||
creator_user_id:
|
||||
description: The id of the logged in user that created this secret
|
||||
type: number
|
||||
example: 1
|
||||
nullable: false
|
||||
allowed_processes:
|
||||
description: The processes allowed to access this secret
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/SecretAllowedProcessPath"
|
||||
nullable: true
|
||||
ProcessInstanceLog:
|
||||
properties:
|
||||
id:
|
||||
description: The id of the log
|
||||
type: number
|
||||
example: 1
|
||||
nullable: false
|
||||
process_instance_id:
|
||||
description: The id of the associated process instance
|
||||
type: number
|
||||
example: 2
|
||||
nullable: false
|
||||
bpmn_process_identifier:
|
||||
description: The id of the bpmn process element
|
||||
type: string
|
||||
example: Process_SimpleProcess
|
||||
nullable: false
|
||||
task:
|
||||
description: The task identifier
|
||||
type: number
|
||||
example: 1234567890
|
||||
nullable: false
|
||||
message:
|
||||
description: The msg returned in the log
|
||||
type: string
|
||||
example: Some message returned in the log
|
||||
nullable: true
|
||||
timestamp:
|
||||
description: The timestamp returned in the log
|
||||
type: number
|
||||
example: 123456789.12345
|
||||
SecretAllowedProcessPath:
|
||||
properties:
|
||||
id:
|
||||
description: The id of the allowed process path
|
||||
type: number
|
||||
example: 1
|
||||
nullable: true
|
||||
secret_id:
|
||||
description: The id of the secret associated with this allowed process path
|
||||
type: number
|
||||
example: 2
|
||||
allowed_relative_path:
|
||||
description: The allowed process path
|
||||
type: string
|
||||
example: /group_one/group_two/model_a
|
||||
|
|
|
@ -27,9 +27,11 @@ from spiffworkflow_backend.models.process_instance import (
|
|||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.spiff_logging import (
|
||||
SpiffLoggingModel,
|
||||
from spiffworkflow_backend.models.secret_model import (
|
||||
SecretAllowedProcessPathModel,
|
||||
) # noqa: F401
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
|
||||
from spiffworkflow_backend.models.task_event import TaskEventModel # noqa: F401
|
||||
from spiffworkflow_backend.models.user import UserModel # noqa: F401
|
||||
from spiffworkflow_backend.models.user_group_assignment import (
|
||||
|
|
|
@ -1,15 +1,22 @@
|
|||
"""Message_correlation."""
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation_property import (
|
||||
MessageCorrelationPropertyModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageCorrelationModel(SpiffworkflowBaseDBModel):
|
||||
|
@ -36,3 +43,7 @@ class MessageCorrelationModel(SpiffworkflowBaseDBModel):
|
|||
value = db.Column(db.String(255), nullable=False, index=True)
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
message_correlations_message_instances = relationship(
|
||||
"MessageCorrelationMessageInstanceModel", cascade="delete"
|
||||
)
|
||||
|
|
|
@ -25,7 +25,7 @@ class MessageCorrelationMessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
message_instance_id = db.Column(
|
||||
ForeignKey(MessageInstanceModel.id), nullable=False, index=True
|
||||
ForeignKey(MessageInstanceModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
message_correlation_id = db.Column(
|
||||
ForeignKey(MessageCorrelationModel.id), nullable=False, index=True
|
||||
|
|
|
@ -3,6 +3,7 @@ import enum
|
|||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
|
@ -15,6 +16,11 @@ from sqlalchemy.orm.events import event
|
|||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401
|
||||
MessageCorrelationMessageInstanceModel,
|
||||
)
|
||||
|
||||
|
||||
class MessageTypes(enum.Enum):
|
||||
"""MessageTypes."""
|
||||
|
@ -38,15 +44,18 @@ class MessageInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
__tablename__ = "message_instance"
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False)
|
||||
message_model = relationship("MessageModel")
|
||||
message_correlations_message_instances = relationship(
|
||||
"MessageCorrelationMessageInstanceModel", cascade="delete"
|
||||
)
|
||||
|
||||
message_type = db.Column(db.String(20), nullable=False)
|
||||
payload = db.Column(db.JSON)
|
||||
status = db.Column(db.String(20), nullable=False, default="ready")
|
||||
failure_cause = db.Column(db.Text())
|
||||
message_type: str = db.Column(db.String(20), nullable=False)
|
||||
payload: str = db.Column(db.JSON)
|
||||
status: str = db.Column(db.String(20), nullable=False, default="ready")
|
||||
failure_cause: str = db.Column(db.Text())
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
|
|
|
@ -79,6 +79,7 @@ class ProcessInstanceStatus(SpiffEnum):
|
|||
faulted = "faulted"
|
||||
suspended = "suspended"
|
||||
terminated = "terminated"
|
||||
erroring = "erroring"
|
||||
|
||||
|
||||
class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||
|
@ -94,6 +95,8 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
|
||||
task_events = relationship("TaskEventModel", cascade="delete") # type: ignore
|
||||
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
|
||||
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
|
||||
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
|
||||
|
||||
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
|
||||
start_in_seconds: int | None = db.Column(db.Integer)
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
"""Secret_model."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
class SecretModel(SpiffworkflowBaseDBModel):
|
||||
"""SecretModel."""
|
||||
|
||||
__tablename__ = "secret"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
key: str = db.Column(db.String(50), unique=True, nullable=False)
|
||||
value: str = db.Column(db.String(255), nullable=False)
|
||||
creator_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
|
||||
|
||||
allowed_processes = relationship("SecretAllowedProcessPathModel", cascade="delete")
|
||||
|
||||
|
||||
class SecretModelSchema(Schema):
|
||||
"""SecretModelSchema."""
|
||||
|
||||
class Meta:
|
||||
"""Meta."""
|
||||
|
||||
model = SecretModel
|
||||
fields = ["key", "value", "creator_user_id"]
|
||||
|
||||
|
||||
class SecretAllowedProcessPathModel(SpiffworkflowBaseDBModel):
|
||||
"""Allowed processes can be Process Groups or Process Models.
|
||||
|
||||
We store the path in either case.
|
||||
"""
|
||||
|
||||
__tablename__ = "secret_allowed_process"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"secret_id", "allowed_relative_path", name="unique_secret_path"
|
||||
),
|
||||
)
|
||||
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
secret_id: int = db.Column(ForeignKey(SecretModel.id), nullable=False) # type: ignore
|
||||
allowed_relative_path: str = db.Column(db.String(500), nullable=False)
|
||||
|
||||
|
||||
class SecretAllowedProcessSchema(Schema):
|
||||
"""SecretAllowedProcessSchema."""
|
||||
|
||||
class Meta:
|
||||
"""Meta."""
|
||||
|
||||
model = SecretAllowedProcessPathModel
|
||||
fields = ["secret_id", "allowed_relative_path"]
|
|
@ -19,6 +19,8 @@ class SpiffLoggingModel(SpiffworkflowBaseDBModel):
|
|||
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||
bpmn_process_identifier: str = db.Column(db.String(255), nullable=False)
|
||||
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
|
||||
bpmn_task_name: str = db.Column(db.String(255), nullable=True)
|
||||
bpmn_task_type: str = db.Column(db.String(255), nullable=True)
|
||||
spiff_task_guid: str = db.Column(db.String(50), nullable=False)
|
||||
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
message: Optional[str] = db.Column(db.String(255), nullable=True)
|
||||
|
|
|
@ -43,7 +43,10 @@ from spiffworkflow_backend.models.process_instance_report import (
|
|||
)
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
|
||||
from spiffworkflow_backend.models.secret_model import SecretAllowedProcessSchema
|
||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
|
@ -54,8 +57,10 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
process_api_blueprint = Blueprint("process_api", __name__)
|
||||
|
||||
|
@ -197,10 +202,12 @@ def process_model_show(process_group_id: str, process_model_id: str) -> Any:
|
|||
|
||||
|
||||
def process_model_list(
|
||||
process_group_id: str, page: int = 1, per_page: int = 100
|
||||
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process model list!"""
|
||||
process_models = ProcessModelService().get_process_models(process_group_id)
|
||||
process_models = ProcessModelService().get_process_models(
|
||||
process_group_id=process_group_identifier
|
||||
)
|
||||
batch = ProcessModelService().get_batch(
|
||||
process_models, page=page, per_page=per_page
|
||||
)
|
||||
|
@ -280,7 +287,6 @@ def process_model_file_delete(
|
|||
|
||||
def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Response:
|
||||
"""Add_file."""
|
||||
ProcessModelService()
|
||||
process_model = get_process_model(process_model_id, process_group_id)
|
||||
request_file = get_file_from_request()
|
||||
if not request_file.filename:
|
||||
|
@ -391,6 +397,10 @@ def process_instance_log_list(
|
|||
SpiffLoggingModel.process_instance_id == process_instance.id
|
||||
)
|
||||
.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
|
||||
.join(UserModel)
|
||||
.add_columns(
|
||||
UserModel.username,
|
||||
)
|
||||
.paginate(page, per_page, False)
|
||||
)
|
||||
|
||||
|
@ -406,6 +416,37 @@ def process_instance_log_list(
|
|||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def message_instance_list(
|
||||
process_instance_id: Optional[int] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Message_instance_list."""
|
||||
# to make sure the process instance exists
|
||||
message_instances_query = MessageInstanceModel.query
|
||||
|
||||
if process_instance_id:
|
||||
message_instances_query = message_instances_query.filter_by(
|
||||
process_instance_id=process_instance_id
|
||||
)
|
||||
|
||||
message_instances = message_instances_query.order_by(
|
||||
MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
|
||||
MessageInstanceModel.id.desc(), # type: ignore
|
||||
).paginate(page, per_page, False)
|
||||
|
||||
response_json = {
|
||||
"results": message_instances.items,
|
||||
"pagination": {
|
||||
"count": len(message_instances.items),
|
||||
"total": message_instances.total,
|
||||
"pages": message_instances.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
# body: {
|
||||
# payload: dict,
|
||||
# process_instance_id: Optional[int],
|
||||
|
@ -491,8 +532,8 @@ def message_start(
|
|||
|
||||
|
||||
def process_instance_list(
|
||||
process_group_id: str,
|
||||
process_model_id: str,
|
||||
process_group_identifier: Optional[str] = None,
|
||||
process_model_identifier: Optional[str] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
start_from: Optional[int] = None,
|
||||
|
@ -502,11 +543,15 @@ def process_instance_list(
|
|||
process_status: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list."""
|
||||
process_model = get_process_model(process_model_id, process_group_id)
|
||||
process_instance_query = ProcessInstanceModel.query
|
||||
if process_model_identifier is not None and process_group_identifier is not None:
|
||||
process_model = get_process_model(
|
||||
process_model_identifier, process_group_identifier
|
||||
)
|
||||
|
||||
results = ProcessInstanceModel.query.filter_by(
|
||||
process_model_identifier=process_model.id
|
||||
)
|
||||
process_instance_query = process_instance_query.filter_by(
|
||||
process_model_identifier=process_model.id
|
||||
)
|
||||
|
||||
# this can never happen. obviously the class has the columns it defines. this is just to appease mypy.
|
||||
if (
|
||||
|
@ -522,17 +567,28 @@ def process_instance_list(
|
|||
)
|
||||
|
||||
if start_from is not None:
|
||||
results = results.filter(ProcessInstanceModel.start_in_seconds >= start_from)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.start_in_seconds >= start_from
|
||||
)
|
||||
if start_till is not None:
|
||||
results = results.filter(ProcessInstanceModel.start_in_seconds <= start_till)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.start_in_seconds <= start_till
|
||||
)
|
||||
if end_from is not None:
|
||||
results = results.filter(ProcessInstanceModel.end_in_seconds >= end_from)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.end_in_seconds >= end_from
|
||||
)
|
||||
if end_till is not None:
|
||||
results = results.filter(ProcessInstanceModel.end_in_seconds <= end_till)
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.end_in_seconds <= end_till
|
||||
)
|
||||
if process_status is not None:
|
||||
results = results.filter(ProcessInstanceModel.status == process_status)
|
||||
process_status_array = process_status.split(",")
|
||||
process_instance_query = process_instance_query.filter(
|
||||
ProcessInstanceModel.status.in_(process_status_array) # type: ignore
|
||||
)
|
||||
|
||||
process_instances = results.order_by(
|
||||
process_instances = process_instance_query.order_by(
|
||||
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
|
||||
).paginate(page, per_page, False)
|
||||
|
||||
|
@ -1044,3 +1100,52 @@ def get_spiff_task_from_process_instance(
|
|||
)
|
||||
)
|
||||
return spiff_task
|
||||
|
||||
|
||||
#
|
||||
# Methods for secrets CRUD - maybe move somewhere else:
|
||||
#
|
||||
def get_secret(key: str) -> Optional[str]:
|
||||
"""Get_secret."""
|
||||
return SecretService.get_secret(key)
|
||||
|
||||
|
||||
def add_secret(body: Dict) -> Response:
|
||||
"""Add secret."""
|
||||
secret_model = SecretService().add_secret(
|
||||
body["key"], body["value"], body["creator_user_id"]
|
||||
)
|
||||
assert secret_model # noqa: S101
|
||||
return Response(
|
||||
json.dumps(SecretModelSchema().dump(secret_model)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def update_secret(key: str, body: dict) -> None:
|
||||
"""Update secret."""
|
||||
SecretService().update_secret(key, body["value"], body["creator_user_id"])
|
||||
|
||||
|
||||
def delete_secret(key: str) -> None:
|
||||
"""Delete secret."""
|
||||
current_user = UserService.current_user()
|
||||
SecretService.delete_secret(key, current_user.id)
|
||||
|
||||
|
||||
def add_allowed_process_path(body: dict) -> Any:
|
||||
"""Get allowed process paths."""
|
||||
allowed_process_path = SecretService.add_allowed_process(
|
||||
body["secret_id"], g.user.id, body["allowed_relative_path"]
|
||||
)
|
||||
return Response(
|
||||
json.dumps(SecretAllowedProcessSchema().dump(allowed_process_path)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def delete_allowed_process_path(allowed_process_path_id: int) -> Any:
|
||||
"""Get allowed process paths."""
|
||||
SecretService().delete_allowed_process(allowed_process_path_id, g.user.id)
|
||||
|
|
|
@ -151,22 +151,41 @@ def validate_scope(token: Any) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
def api_login(uid: str, password: str, redirect_url: Optional[str] = None) -> dict:
|
||||
"""Api_login."""
|
||||
# TODO: Fix this! mac 20220801
|
||||
# token:dict = PublicAuthenticationService().get_public_access_token(uid, password)
|
||||
# g.token = token
|
||||
#
|
||||
# return token
|
||||
return {}
|
||||
# def login_api(redirect_url: str = "/v1.0/ui") -> Response:
|
||||
# """Api_login."""
|
||||
# # TODO: Fix this! mac 20220801
|
||||
# # token:dict = PublicAuthenticationService().get_public_access_token(uid, password)
|
||||
# #
|
||||
# # return token
|
||||
# # if uid:
|
||||
# # sub = f"service:internal::service_id:{uid}"
|
||||
# # token = encode_auth_token(sub)
|
||||
# # user_model = UserModel(username=uid,
|
||||
# # uid=uid,
|
||||
# # service='internal',
|
||||
# # name="API User")
|
||||
# # g.user = user_model
|
||||
# #
|
||||
# # g.token = token
|
||||
# # scope = get_scope(token)
|
||||
# # return token
|
||||
# # return {"uid": uid, "sub": uid, "scope": scope}
|
||||
# return login(redirect_url)
|
||||
|
||||
|
||||
def encode_auth_token(uid: str) -> str:
|
||||
# def login_api_return(code: str, state: str, session_state: str) -> Optional[Response]:
|
||||
# print("login_api_return")
|
||||
|
||||
|
||||
def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str:
|
||||
"""Generates the Auth Token.
|
||||
|
||||
:return: string
|
||||
"""
|
||||
payload = {"sub": uid}
|
||||
payload = {"sub": sub}
|
||||
if token_type is None:
|
||||
token_type = "internal" # noqa: S105
|
||||
payload["token_type"] = token_type
|
||||
if "SECRET_KEY" in current_app.config:
|
||||
secret_key = current_app.config.get("SECRET_KEY")
|
||||
else:
|
||||
|
@ -215,6 +234,8 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
|
|||
name = user_info["name"]
|
||||
if "username" in user_info:
|
||||
username = user_info["username"]
|
||||
elif "preferred_username" in user_info:
|
||||
username = user_info["preferred_username"]
|
||||
if "email" in user_info:
|
||||
email = user_info["email"]
|
||||
user_model = UserService().create_user(
|
||||
|
@ -302,4 +323,15 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
|
|||
.filter(UserModel.service_id == service_id)
|
||||
.first()
|
||||
)
|
||||
# user: UserModel = UserModel.query.filter()
|
||||
if user:
|
||||
return user
|
||||
user = UserModel(
|
||||
username=service_id,
|
||||
uid=service_id,
|
||||
service=service,
|
||||
service_id=service_id,
|
||||
name="API User",
|
||||
)
|
||||
|
||||
return user
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
"""Background_processing_service."""
|
||||
import flask
|
||||
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
|
||||
|
||||
class BackgroundProcessingService:
|
||||
"""Used to facilitate doing work outside of an HTTP request/response."""
|
||||
|
||||
def __init__(self, app: flask.app.Flask):
|
||||
"""__init__."""
|
||||
self.app = app
|
||||
|
||||
def run(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
ProcessInstanceService.do_waiting()
|
||||
|
||||
def process_message_instances_with_app_context(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
MessageService.process_message_instances()
|
|
@ -1,6 +1,7 @@
|
|||
"""Logging_service."""
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
|
@ -113,6 +114,8 @@ def setup_logger(app: Flask) -> None:
|
|||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
|
||||
app.logger.debug("Printing log to create app logger")
|
||||
|
||||
# the json formatter is nice for real environments but makes
|
||||
# debugging locally a little more difficult
|
||||
if app.env != "development":
|
||||
|
@ -132,13 +135,16 @@ def setup_logger(app: Flask) -> None:
|
|||
|
||||
spiff_logger_filehandler = None
|
||||
if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]:
|
||||
spiff_logger_filehandler = logging.FileHandler(f"log/{app.env}.log")
|
||||
spiff_logger_filehandler = logging.FileHandler(
|
||||
f"{app.instance_path}/../../log/{app.env}.log"
|
||||
)
|
||||
spiff_logger_filehandler.setLevel(spiff_log_level)
|
||||
spiff_logger_filehandler.setFormatter(log_formatter)
|
||||
|
||||
# make all loggers act the same
|
||||
for name in logging.root.manager.loggerDict:
|
||||
if "spiff" not in name:
|
||||
# use a regex so spiffworkflow_backend isn't filtered out
|
||||
if not re.match(r"^spiff\b", name):
|
||||
the_logger = logging.getLogger(name)
|
||||
the_logger.setLevel(log_level)
|
||||
if spiff_logger_filehandler:
|
||||
|
@ -176,6 +182,8 @@ class DBHandler(logging.Handler):
|
|||
bpmn_process_identifier = record.workflow # type: ignore
|
||||
spiff_task_guid = str(record.task_id) # type: ignore
|
||||
bpmn_task_identifier = str(record.task_spec) # type: ignore
|
||||
bpmn_task_name = record.task_name if hasattr(record, "task_name") else None # type: ignore
|
||||
bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore
|
||||
timestamp = record.created
|
||||
message = record.msg if hasattr(record, "msg") else None
|
||||
current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
|
||||
|
@ -183,7 +191,9 @@ class DBHandler(logging.Handler):
|
|||
process_instance_id=record.process_instance_id, # type: ignore
|
||||
bpmn_process_identifier=bpmn_process_identifier,
|
||||
spiff_task_guid=spiff_task_guid,
|
||||
bpmn_task_name=bpmn_task_name,
|
||||
bpmn_task_identifier=bpmn_task_identifier,
|
||||
bpmn_task_type=bpmn_task_type,
|
||||
message=message,
|
||||
timestamp=timestamp,
|
||||
current_user_id=current_user_id,
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
import flask
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
|
@ -26,23 +25,6 @@ from spiffworkflow_backend.services.process_instance_service import (
|
|||
)
|
||||
|
||||
|
||||
class MessageServiceWithAppContext:
|
||||
"""Wrapper for Message Service.
|
||||
|
||||
This wrappers is to facilitate running the MessageService from the scheduler
|
||||
since we need to specify the app context then.
|
||||
"""
|
||||
|
||||
def __init__(self, app: flask.app.Flask):
|
||||
"""__init__."""
|
||||
self.app = app
|
||||
|
||||
def process_message_instances_with_app_context(self) -> None:
|
||||
"""Since this runs in a scheduler, we need to specify the app context as well."""
|
||||
with self.app.app_context():
|
||||
MessageService.process_message_instances()
|
||||
|
||||
|
||||
class MessageServiceError(Exception):
|
||||
"""MessageServiceError."""
|
||||
|
||||
|
|
|
@ -95,11 +95,13 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||
expression: str,
|
||||
context: Dict[str, Union[Box, str]],
|
||||
task: Optional[SpiffTask] = None,
|
||||
_external_methods: None = None,
|
||||
external_methods: Optional[Dict[str, Any]] = None,
|
||||
) -> Any:
|
||||
"""Evaluate the given expression, within the context of the given task and return the result."""
|
||||
try:
|
||||
return super()._evaluate(expression, context)
|
||||
return super()._evaluate(
|
||||
expression, context, external_methods=external_methods
|
||||
)
|
||||
except Exception as exception:
|
||||
if task is None:
|
||||
raise ProcessInstanceProcessorError(
|
||||
|
@ -355,7 +357,7 @@ class ProcessInstanceProcessor:
|
|||
@staticmethod
|
||||
def __get_bpmn_process_instance(
|
||||
process_instance_model: ProcessInstanceModel,
|
||||
spec: WorkflowSpec = None,
|
||||
spec: Optional[WorkflowSpec] = None,
|
||||
validate_only: bool = False,
|
||||
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
|
||||
) -> BpmnWorkflow:
|
||||
|
@ -366,12 +368,17 @@ class ProcessInstanceProcessor:
|
|||
original_spiff_logger_log_level = spiff_logger.level
|
||||
spiff_logger.setLevel(logging.WARNING)
|
||||
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor._serializer.deserialize_json(
|
||||
process_instance_model.bpmn_json
|
||||
try:
|
||||
bpmn_process_instance = (
|
||||
ProcessInstanceProcessor._serializer.deserialize_json(
|
||||
process_instance_model.bpmn_json
|
||||
)
|
||||
)
|
||||
)
|
||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||
except Exception as err:
|
||||
raise (err)
|
||||
finally:
|
||||
spiff_logger.setLevel(original_spiff_logger_log_level)
|
||||
|
||||
bpmn_process_instance.script_engine = (
|
||||
ProcessInstanceProcessor._script_engine
|
||||
)
|
||||
|
@ -561,10 +568,14 @@ class ProcessInstanceProcessor:
|
|||
bpmn_process_identifier
|
||||
)
|
||||
new_bpmn_files.add(new_bpmn_file_full_path)
|
||||
dmn_file_glob = os.path.join(
|
||||
os.path.dirname(new_bpmn_file_full_path), "*.dmn"
|
||||
)
|
||||
parser.add_dmn_files_by_glob(dmn_file_glob)
|
||||
processed_identifiers.add(bpmn_process_identifier)
|
||||
|
||||
for new_bpmn_file_full_path in new_bpmn_files:
|
||||
parser.add_bpmn_file(new_bpmn_file_full_path)
|
||||
if new_bpmn_files:
|
||||
parser.add_bpmn_files(new_bpmn_files)
|
||||
ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files(
|
||||
parser, processed_identifiers
|
||||
)
|
||||
|
@ -717,52 +728,59 @@ class ProcessInstanceProcessor:
|
|||
"""Queue_waiting_receive_messages."""
|
||||
waiting_tasks = self.get_all_waiting_tasks()
|
||||
for waiting_task in waiting_tasks:
|
||||
if waiting_task.task_spec.__class__.__name__ in [
|
||||
# if it's not something that can wait for a message, skip it
|
||||
if waiting_task.task_spec.__class__.__name__ not in [
|
||||
"IntermediateCatchEvent",
|
||||
"ReceiveTask",
|
||||
]:
|
||||
message_model = MessageModel.query.filter_by(
|
||||
name=waiting_task.task_spec.event_definition.name
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ApiError(
|
||||
"invalid_message_name",
|
||||
f"Invalid message name: {waiting_task.task_spec.event_definition.name}.",
|
||||
)
|
||||
continue
|
||||
|
||||
message_instance = MessageInstanceModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
message_type="receive",
|
||||
message_model_id=message_model.id,
|
||||
# timer events are not related to messaging, so ignore them for these purposes
|
||||
if waiting_task.task_spec.event_definition.__class__.__name__ in [
|
||||
"TimerEventDefinition",
|
||||
]:
|
||||
continue
|
||||
|
||||
message_model = MessageModel.query.filter_by(
|
||||
name=waiting_task.task_spec.event_definition.name
|
||||
).first()
|
||||
if message_model is None:
|
||||
raise ApiError(
|
||||
"invalid_message_name",
|
||||
f"Invalid message name: {waiting_task.task_spec.event_definition.name}.",
|
||||
)
|
||||
db.session.add(message_instance)
|
||||
|
||||
for (
|
||||
spiff_correlation_property
|
||||
) in waiting_task.task_spec.event_definition.correlation_properties:
|
||||
# NOTE: we may have to cycle through keys here
|
||||
# not sure yet if it's valid for a property to be associated with multiple keys
|
||||
correlation_key_name = spiff_correlation_property.correlation_keys[
|
||||
0
|
||||
]
|
||||
message_correlation = (
|
||||
MessageCorrelationModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
name=correlation_key_name,
|
||||
)
|
||||
.join(MessageCorrelationPropertyModel)
|
||||
.filter_by(identifier=spiff_correlation_property.name)
|
||||
.first()
|
||||
)
|
||||
message_correlation_message_instance = (
|
||||
MessageCorrelationMessageInstanceModel(
|
||||
message_instance_id=message_instance.id,
|
||||
message_correlation_id=message_correlation.id,
|
||||
)
|
||||
)
|
||||
db.session.add(message_correlation_message_instance)
|
||||
message_instance = MessageInstanceModel(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
message_type="receive",
|
||||
message_model_id=message_model.id,
|
||||
)
|
||||
db.session.add(message_instance)
|
||||
|
||||
db.session.commit()
|
||||
for (
|
||||
spiff_correlation_property
|
||||
) in waiting_task.task_spec.event_definition.correlation_properties:
|
||||
# NOTE: we may have to cycle through keys here
|
||||
# not sure yet if it's valid for a property to be associated with multiple keys
|
||||
correlation_key_name = spiff_correlation_property.correlation_keys[0]
|
||||
message_correlation = (
|
||||
MessageCorrelationModel.query.filter_by(
|
||||
process_instance_id=self.process_instance_model.id,
|
||||
name=correlation_key_name,
|
||||
)
|
||||
.join(MessageCorrelationPropertyModel)
|
||||
.filter_by(identifier=spiff_correlation_property.name)
|
||||
.first()
|
||||
)
|
||||
message_correlation_message_instance = (
|
||||
MessageCorrelationMessageInstanceModel(
|
||||
message_instance_id=message_instance.id,
|
||||
message_correlation_id=message_correlation.id,
|
||||
)
|
||||
)
|
||||
db.session.add(message_correlation_message_instance)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||
"""Do_engine_steps."""
|
||||
|
|
|
@ -58,6 +58,30 @@ class ProcessInstanceService:
|
|||
db.session.commit()
|
||||
return process_instance_model
|
||||
|
||||
@staticmethod
|
||||
def do_waiting() -> None:
|
||||
"""Do_waiting."""
|
||||
records = (
|
||||
db.session.query(ProcessInstanceModel)
|
||||
.filter(ProcessInstanceModel.status == ProcessInstanceStatus.waiting.value)
|
||||
.all()
|
||||
)
|
||||
for process_instance in records:
|
||||
try:
|
||||
current_app.logger.info(
|
||||
f"Processing process_instance {process_instance.id}"
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
||||
except Exception:
|
||||
db.session.rollback() # in case the above left the database with a bad transaction
|
||||
process_instance.status = ProcessInstanceStatus.erroring.value
|
||||
db.session.add(process_instance)
|
||||
db.session.commit()
|
||||
error_message = f"Error running waiting task for process_instance {process_instance.id}"
|
||||
"({process_instance.process_model_identifier}). {str(e)}"
|
||||
current_app.logger.error(error_message)
|
||||
|
||||
@staticmethod
|
||||
def processor_to_process_instance_api(
|
||||
processor: ProcessInstanceProcessor, next_task: None = None
|
||||
|
|
|
@ -0,0 +1,204 @@
|
|||
"""Secret_service."""
|
||||
from typing import Optional
|
||||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from spiffworkflow_backend.models.secret_model import SecretAllowedProcessPathModel
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||
|
||||
# from cryptography.fernet import Fernet
|
||||
#
|
||||
#
|
||||
# class EncryptionService:
|
||||
# key = Fernet.generate_key() # this is your "password"
|
||||
# cipher_suite = Fernet(key)
|
||||
# encoded_text = cipher_suite.encrypt(b"Hello stackoverflow!")
|
||||
# decoded_text = cipher_suite.decrypt(encoded_text)
|
||||
|
||||
|
||||
class SecretService:
|
||||
"""SecretService."""
|
||||
|
||||
def encrypt_key(self, plain_key: str) -> str:
|
||||
"""Encrypt_key."""
|
||||
# flask_secret = current_app.secret_key
|
||||
# print("encrypt_key")
|
||||
...
|
||||
|
||||
def decrypt_key(self, encrypted_key: str) -> str:
|
||||
"""Decrypt key."""
|
||||
...
|
||||
|
||||
@staticmethod
|
||||
def add_secret(
|
||||
key: str,
|
||||
value: str,
|
||||
creator_user_id: int,
|
||||
) -> SecretModel:
|
||||
"""Add_secret."""
|
||||
# encrypted_key = self.encrypt_key(key)
|
||||
secret_model = SecretModel(
|
||||
key=key, value=value, creator_user_id=creator_user_id
|
||||
)
|
||||
db.session.add(secret_model)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
raise ApiError(
|
||||
code="create_secret_error",
|
||||
message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. "
|
||||
f"Original error is {e}",
|
||||
) from e
|
||||
return secret_model
|
||||
|
||||
@staticmethod
|
||||
def get_secret(key: str) -> Optional[str]:
|
||||
"""Get_secret."""
|
||||
secret: SecretModel = (
|
||||
db.session.query(SecretModel).filter(SecretModel.key == key).first()
|
||||
)
|
||||
if secret is not None:
|
||||
return secret.value
|
||||
|
||||
@staticmethod
|
||||
def update_secret(
|
||||
key: str,
|
||||
value: str,
|
||||
creator_user_id: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Does this pass pre commit?"""
|
||||
secret_model = SecretModel.query.filter(SecretModel.key == key).first()
|
||||
if secret_model:
|
||||
if secret_model.creator_user_id == creator_user_id:
|
||||
secret_model.value = value
|
||||
db.session.add(secret_model)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
raise ApiError(
|
||||
code="update_secret_error",
|
||||
message=f"There was an error updating the secret with key: {key}, and value: {value}",
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
code="update_secret_error",
|
||||
message=f"User: {creator_user_id} cannot update the secret with key : {key}",
|
||||
status_code=401,
|
||||
)
|
||||
else:
|
||||
raise ApiError(
|
||||
code="update_secret_error",
|
||||
message=f"Cannot update secret with key: {key}. Resource does not exist.",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_secret(key: str, user_id: int) -> None:
|
||||
"""Delete secret."""
|
||||
secret_model = SecretModel.query.filter(SecretModel.key == key).first()
|
||||
if secret_model:
|
||||
if secret_model.creator_user_id == user_id:
|
||||
db.session.delete(secret_model)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
raise ApiError(
|
||||
code="delete_secret_error",
|
||||
message=f"Could not delete secret with key: {key}. Original error is: {e}",
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
code="delete_secret_error",
|
||||
message=f"User: {user_id} cannot delete the secret with key : {key}",
|
||||
status_code=401,
|
||||
)
|
||||
else:
|
||||
raise ApiError(
|
||||
code="delete_secret_error",
|
||||
message=f"Cannot delete secret with key: {key}. Resource does not exist.",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_allowed_process(
|
||||
secret_id: int, user_id: str, allowed_relative_path: str
|
||||
) -> SecretAllowedProcessPathModel:
|
||||
"""Add_allowed_process."""
|
||||
secret_model = SecretModel.query.filter(SecretModel.id == secret_id).first()
|
||||
if secret_model:
|
||||
if secret_model.creator_user_id == user_id:
|
||||
secret_process_model = SecretAllowedProcessPathModel(
|
||||
secret_id=secret_model.id,
|
||||
allowed_relative_path=allowed_relative_path,
|
||||
)
|
||||
assert secret_process_model # noqa: S101
|
||||
db.session.add(secret_process_model)
|
||||
try:
|
||||
db.session.commit()
|
||||
except IntegrityError as ie:
|
||||
db.session.rollback()
|
||||
raise ApiError(
|
||||
code="add_allowed_process_error",
|
||||
message=f"Error adding allowed_process with secret {secret_model.id}, "
|
||||
f"and path: {allowed_relative_path}. Resource already exists. "
|
||||
f"Original error is {ie}",
|
||||
status_code=409,
|
||||
) from ie
|
||||
except Exception as e:
|
||||
# TODO: should we call db.session.rollback() here?
|
||||
# db.session.rollback()
|
||||
raise ApiError(
|
||||
code="add_allowed_process_error",
|
||||
message=f"Could not create an allowed process for secret with key: {secret_model.key} "
|
||||
f"with path: {allowed_relative_path}. "
|
||||
f"Original error is {e}",
|
||||
) from e
|
||||
return secret_process_model
|
||||
else:
|
||||
raise ApiError(
|
||||
code="add_allowed_process_error",
|
||||
message=f"User: {user_id} cannot modify the secret with key : {secret_model.key}",
|
||||
status_code=401,
|
||||
)
|
||||
else:
|
||||
raise ApiError(
|
||||
code="add_allowed_process_error",
|
||||
message=f"Cannot add allowed process to secret with key: {secret_id}. Resource does not exist.",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_allowed_process(allowed_process_id: int, user_id: int) -> None:
|
||||
"""Delete_allowed_process."""
|
||||
allowed_process = SecretAllowedProcessPathModel.query.filter(
|
||||
SecretAllowedProcessPathModel.id == allowed_process_id
|
||||
).first()
|
||||
if allowed_process:
|
||||
secret = SecretModel.query.filter(
|
||||
SecretModel.id == allowed_process.secret_id
|
||||
).first()
|
||||
assert secret # noqa: S101
|
||||
if secret.creator_user_id == user_id:
|
||||
db.session.delete(allowed_process)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
raise ApiError(
|
||||
code="delete_allowed_process_error",
|
||||
message=f"There was an exception deleting allowed_process: {allowed_process_id}. "
|
||||
f"Original error is: {e}",
|
||||
) from e
|
||||
else:
|
||||
raise ApiError(
|
||||
code="delete_allowed_process_error",
|
||||
message=f"User: {user_id} cannot delete the allowed_process with id : {allowed_process_id}",
|
||||
status_code=401,
|
||||
)
|
||||
else:
|
||||
raise ApiError(
|
||||
code="delete_allowed_process_error",
|
||||
message=f"Cannot delete allowed_process: {allowed_process_id}. Resource does not exist.",
|
||||
status_code=404,
|
||||
)
|
|
@ -6,6 +6,8 @@ from typing import Dict
|
|||
import requests
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
|
||||
|
||||
def connector_proxy_url() -> Any:
|
||||
"""Returns the connector proxy url."""
|
||||
|
@ -16,27 +18,28 @@ class ServiceTaskDelegate:
|
|||
"""ServiceTaskDelegate."""
|
||||
|
||||
@staticmethod
|
||||
def call_connector(
|
||||
name: str, bpmn_params: Any
|
||||
) -> None: # TODO what is the return/type
|
||||
def normalize_value(value: Any) -> Any:
|
||||
"""Normalize_value."""
|
||||
secret_prefix = "secret:" # noqa: S105
|
||||
if value.startswith(secret_prefix):
|
||||
key = value.removeprefix(secret_prefix)
|
||||
value = SecretService().get_secret(key)
|
||||
return value
|
||||
|
||||
@staticmethod
|
||||
def call_connector(name: str, bpmn_params: Any) -> str:
|
||||
"""Calls a connector via the configured proxy."""
|
||||
|
||||
def normalize_value(v: Any) -> Any:
|
||||
"""Normalize_value."""
|
||||
value = v["value"]
|
||||
secret_prefix = "secret:" # noqa: S105
|
||||
if value.startswith(secret_prefix):
|
||||
key = value.removeprefix(secret_prefix)
|
||||
# TODO replace with call to secret store
|
||||
value = key
|
||||
return value
|
||||
|
||||
params = {k: normalize_value(v) for k, v in bpmn_params.items()}
|
||||
params = {
|
||||
k: ServiceTaskDelegate.normalize_value(v["value"])
|
||||
for k, v in bpmn_params.items()
|
||||
}
|
||||
proxied_response = requests.get(f"{connector_proxy_url()}/v1/do/{name}", params)
|
||||
|
||||
if proxied_response.status_code != 200:
|
||||
print("got error from connector proxy")
|
||||
|
||||
return proxied_response.text
|
||||
|
||||
|
||||
class ServiceTaskService:
|
||||
"""ServiceTaskService."""
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
|
||||
<bpmn:process id="Process_test_c11_C_1_1_bd2e724" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_109wuuc</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_109wuuc" sourceRef="StartEvent_1" targetRef="Event_1brn88p" />
|
||||
<bpmn:endEvent id="Event_07oa1s6">
|
||||
<bpmn:incoming>Flow_0cy1fiy</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0cy1fiy" sourceRef="Event_1brn88p" targetRef="Event_07oa1s6" />
|
||||
<bpmn:intermediateCatchEvent id="Event_1brn88p" name="30 seconds">
|
||||
<bpmn:incoming>Flow_109wuuc</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0cy1fiy</bpmn:outgoing>
|
||||
<bpmn:timerEventDefinition id="TimerEventDefinition_1al5gzu">
|
||||
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">timedelta(seconds=30)</bpmn:timeDuration>
|
||||
</bpmn:timerEventDefinition>
|
||||
</bpmn:intermediateCatchEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_test_c11_C_1_1_bd2e724">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_07oa1s6_di" bpmnElement="Event_07oa1s6">
|
||||
<dc:Bounds x="372" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1ou5tgd_di" bpmnElement="Event_1brn88p">
|
||||
<dc:Bounds x="272" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="262" y="202" width="57" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_109wuuc_di" bpmnElement="Flow_109wuuc">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="272" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0cy1fiy_di" bpmnElement="Flow_0cy1fiy">
|
||||
<di:waypoint x="308" y="177" />
|
||||
<di:waypoint x="372" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -13,7 +13,6 @@ from flask.testing import FlaskClient
|
|||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
from tests.spiffworkflow_backend.helpers.test_data import logged_in_headers
|
||||
from werkzeug.test import TestResponse
|
||||
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
|
@ -26,12 +25,14 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
# from tests.spiffworkflow_backend.helpers.test_data import logged_in_headers
|
||||
|
||||
|
||||
class BaseTest:
|
||||
"""BaseTest."""
|
||||
|
||||
@staticmethod
|
||||
def find_or_create_user(username: str = "test_user1") -> UserModel:
|
||||
def find_or_create_user(username: str = "test_user_1") -> UserModel:
|
||||
"""Find_or_create_user."""
|
||||
user = UserModel.query.filter_by(username=username).first()
|
||||
if isinstance(user, UserModel):
|
||||
|
@ -39,7 +40,6 @@ class BaseTest:
|
|||
|
||||
user = UserService().create_user("internal", username, username=username)
|
||||
if isinstance(user, UserModel):
|
||||
UserService().create_principal(user_id=user.id)
|
||||
return user
|
||||
|
||||
raise ApiError(
|
||||
|
@ -128,7 +128,7 @@ class BaseTest:
|
|||
"/v1.0/process-models",
|
||||
content_type="application/json",
|
||||
data=json.dumps(ProcessModelInfoSchema().dump(model)),
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
return response
|
||||
|
@ -154,7 +154,7 @@ class BaseTest:
|
|||
data=data,
|
||||
follow_redirects=True,
|
||||
content_type="multipart/form-data",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert response.get_data() is not None
|
||||
|
@ -164,7 +164,7 @@ class BaseTest:
|
|||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/{file_name}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
file2 = json.loads(response.get_data(as_text=True))
|
||||
|
@ -184,7 +184,7 @@ class BaseTest:
|
|||
)
|
||||
response = client.post(
|
||||
"/v1.0/process-groups",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=json.dumps(ProcessGroupSchema().dump(process_group)),
|
||||
)
|
||||
|
@ -220,6 +220,32 @@ class BaseTest:
|
|||
db.session.commit()
|
||||
return process_instance
|
||||
|
||||
@staticmethod
|
||||
def logged_in_headers(
|
||||
user: UserModel, _redirect_url: str = "http://some/frontend/url"
|
||||
) -> Dict[str, str]:
|
||||
"""Logged_in_headers."""
|
||||
# if user is None:
|
||||
# uid = 'test_user'
|
||||
# user_info = {'uid': 'test_user'}
|
||||
# else:
|
||||
# uid = user.uid
|
||||
# user_info = {'uid': user.uid}
|
||||
|
||||
# query_string = user_info_to_query_string(user_info, redirect_url)
|
||||
# rv = self.app.get("/v1.0/login%s" % query_string, follow_redirects=False)
|
||||
# self.assertTrue(rv.status_code == 302)
|
||||
# self.assertTrue(str.startswith(rv.location, redirect_url))
|
||||
#
|
||||
# user_model = session.query(UserModel).filter_by(uid=uid).first()
|
||||
# self.assertIsNotNone(user_model.ldap_info.display_name)
|
||||
# self.assertEqual(user_model.uid, uid)
|
||||
# self.assertTrue('user' in g, 'User should be in Flask globals')
|
||||
# user = UserService.current_user(allow_admin_impersonate=True)
|
||||
# self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
|
||||
|
||||
return dict(Authorization="Bearer " + user.encode_auth_token())
|
||||
|
||||
def get_test_data_file_contents(
|
||||
self, file_name: str, process_model_test_data_dir: str
|
||||
) -> bytes:
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""User."""
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
from tests.spiffworkflow_backend.helpers.example_data import ExampleDataLoader
|
||||
|
@ -9,7 +8,6 @@ from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
|||
)
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
|
@ -81,29 +79,3 @@ def load_test_spec(
|
|||
# query_string_list.append('redirect_url=%s' % redirect_url)
|
||||
#
|
||||
# return '?%s' % '&'.join(query_string_list)
|
||||
|
||||
|
||||
def logged_in_headers(
|
||||
user: UserModel, _redirect_url: str = "http://some/frontend/url"
|
||||
) -> Dict[str, str]:
|
||||
"""Logged_in_headers."""
|
||||
# if user is None:
|
||||
# uid = 'test_user'
|
||||
# user_info = {'uid': 'test_user'}
|
||||
# else:
|
||||
# uid = user.uid
|
||||
# user_info = {'uid': user.uid}
|
||||
|
||||
# query_string = user_info_to_query_string(user_info, redirect_url)
|
||||
# rv = self.app.get("/v1.0/login%s" % query_string, follow_redirects=False)
|
||||
# self.assertTrue(rv.status_code == 302)
|
||||
# self.assertTrue(str.startswith(rv.location, redirect_url))
|
||||
#
|
||||
# user_model = session.query(UserModel).filter_by(uid=uid).first()
|
||||
# self.assertIsNotNone(user_model.ldap_info.display_name)
|
||||
# self.assertEqual(user_model.uid, uid)
|
||||
# self.assertTrue('user' in g, 'User should be in Flask globals')
|
||||
# user = UserService.current_user(allow_admin_impersonate=True)
|
||||
# self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
|
||||
|
||||
return dict(Authorization="Bearer " + user.encode_auth_token())
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import logged_in_headers
|
||||
|
||||
|
||||
class TestLoggingService(BaseTest):
|
||||
|
@ -15,7 +14,7 @@ class TestLoggingService(BaseTest):
|
|||
process_group_id = "test_logging_spiff_logger"
|
||||
process_model_id = "simple_script"
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
response = self.create_process_instance(
|
||||
client, process_group_id, process_model_id, headers
|
||||
)
|
||||
|
@ -23,13 +22,13 @@ class TestLoggingService(BaseTest):
|
|||
process_instance_id = response.json["id"]
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
log_response = client.get(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert log_response.status_code == 200
|
||||
assert log_response.json
|
||||
|
|
|
@ -10,7 +10,6 @@ from flask.testing import FlaskClient
|
|||
from flask_bpmn.models.db import db
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
from tests.spiffworkflow_backend.helpers.test_data import logged_in_headers
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
|
@ -101,7 +100,7 @@ class TestProcessApi(BaseTest):
|
|||
data=data,
|
||||
follow_redirects=True,
|
||||
content_type="multipart/form-data",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
process_model = ProcessModelService().get_process_model(
|
||||
|
@ -125,7 +124,7 @@ class TestProcessApi(BaseTest):
|
|||
user = self.find_or_create_user()
|
||||
response = client.delete(
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -142,7 +141,7 @@ class TestProcessApi(BaseTest):
|
|||
test_process_group_id = "runs_without_input"
|
||||
test_process_model_id = "sample"
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
# create an instance from a model
|
||||
response = self.create_process_instance(
|
||||
client, test_process_group_id, test_process_model_id, headers
|
||||
|
@ -155,7 +154,7 @@ class TestProcessApi(BaseTest):
|
|||
# try to delete the model
|
||||
response = client.delete(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
# make sure we get an error in the response
|
||||
|
@ -183,7 +182,7 @@ class TestProcessApi(BaseTest):
|
|||
user = self.find_or_create_user()
|
||||
response = client.put(
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=json.dumps(ProcessModelInfoSchema().dump(process_model)),
|
||||
)
|
||||
|
@ -216,8 +215,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# get all models
|
||||
response = client.get(
|
||||
f"/v1.0/process-groups/{group_id}/process-models",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models?process_group_identifier={group_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 5
|
||||
|
@ -227,8 +226,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# get first page, 1 per page
|
||||
response = client.get(
|
||||
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=1",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models?page=1&per_page=1&process_group_identifier={group_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
|
@ -239,8 +238,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# get second page, 1 per page
|
||||
response = client.get(
|
||||
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=1",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models?page=2&per_page=1&process_group_identifier={group_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
|
@ -251,8 +250,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# get first page, 3 per page
|
||||
response = client.get(
|
||||
f"/v1.0/process-groups/{group_id}/process-models?page=1&per_page=3",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models?page=1&per_page=3&process_group_identifier={group_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 3
|
||||
|
@ -263,8 +262,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# get second page, 3 per page
|
||||
response = client.get(
|
||||
f"/v1.0/process-groups/{group_id}/process-models?page=2&per_page=3",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models?page=2&per_page=3&process_group_identifier={group_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
# there should only be 2 left
|
||||
assert response.json is not None
|
||||
|
@ -287,7 +286,7 @@ class TestProcessApi(BaseTest):
|
|||
user = self.find_or_create_user()
|
||||
response = client.post(
|
||||
"/v1.0/process-groups",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=json.dumps(ProcessGroupSchema().dump(process_group)),
|
||||
)
|
||||
|
@ -320,7 +319,8 @@ class TestProcessApi(BaseTest):
|
|||
assert persisted.id == process_group_id
|
||||
|
||||
client.delete(
|
||||
f"/v1.0/process-groups/{process_group_id}", headers=logged_in_headers(user)
|
||||
f"/v1.0/process-groups/{process_group_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
with pytest.raises(ProcessEntityNotFoundError):
|
||||
|
@ -345,7 +345,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.put(
|
||||
f"/v1.0/process-groups/{group_id}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=json.dumps(ProcessGroupSchema().dump(process_group)),
|
||||
)
|
||||
|
@ -370,7 +370,7 @@ class TestProcessApi(BaseTest):
|
|||
# get all groups
|
||||
response = client.get(
|
||||
"/v1.0/process-groups",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 5
|
||||
|
@ -381,7 +381,7 @@ class TestProcessApi(BaseTest):
|
|||
# get first page, one per page
|
||||
response = client.get(
|
||||
"/v1.0/process-groups?page=1&per_page=1",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
|
@ -393,7 +393,7 @@ class TestProcessApi(BaseTest):
|
|||
# get second page, one per page
|
||||
response = client.get(
|
||||
"/v1.0/process-groups?page=2&per_page=1",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
|
@ -405,7 +405,7 @@ class TestProcessApi(BaseTest):
|
|||
# get first page, 3 per page
|
||||
response = client.get(
|
||||
"/v1.0/process-groups?page=1&per_page=3",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 3
|
||||
|
@ -419,7 +419,7 @@ class TestProcessApi(BaseTest):
|
|||
# get second page, 3 per page
|
||||
response = client.get(
|
||||
"/v1.0/process-groups?page=2&per_page=3",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
# there should only be 2 left
|
||||
assert response.json is not None
|
||||
|
@ -436,15 +436,15 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_model_file_update."""
|
||||
self.create_spec_file(client)
|
||||
|
||||
spec = load_test_spec("random_fact")
|
||||
process_model = load_test_spec("random_fact")
|
||||
data = {"key1": "THIS DATA"}
|
||||
user = self.find_or_create_user()
|
||||
response = client.put(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg",
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
|
||||
data=data,
|
||||
follow_redirects=True,
|
||||
content_type="multipart/form-data",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
@ -457,15 +457,15 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_model_file_update."""
|
||||
self.create_spec_file(client)
|
||||
|
||||
spec = load_test_spec("random_fact")
|
||||
process_model = load_test_spec("random_fact")
|
||||
data = {"file": (io.BytesIO(b""), "random_fact.svg")}
|
||||
user = self.find_or_create_user()
|
||||
response = client.put(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg",
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
|
||||
data=data,
|
||||
follow_redirects=True,
|
||||
content_type="multipart/form-data",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
@ -478,16 +478,16 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_model_file_update."""
|
||||
original_file = self.create_spec_file(client)
|
||||
|
||||
spec = load_test_spec("random_fact")
|
||||
process_model = load_test_spec("random_fact")
|
||||
new_file_contents = b"THIS_IS_NEW_DATA"
|
||||
data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")}
|
||||
user = self.find_or_create_user()
|
||||
response = client.put(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg",
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
|
||||
data=data,
|
||||
follow_redirects=True,
|
||||
content_type="multipart/form-data",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
@ -495,8 +495,8 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["ok"]
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
updated_file = json.loads(response.get_data(as_text=True))
|
||||
|
@ -509,12 +509,12 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_model_file_update."""
|
||||
self.create_spec_file(client)
|
||||
|
||||
spec = load_test_spec("random_fact")
|
||||
process_model = load_test_spec("random_fact")
|
||||
user = self.find_or_create_user()
|
||||
response = client.delete(
|
||||
f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{spec.id}/files/random_fact.svg",
|
||||
f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{process_model.id}/files/random_fact.svg",
|
||||
follow_redirects=True,
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
@ -527,12 +527,12 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_model_file_update."""
|
||||
self.create_spec_file(client)
|
||||
|
||||
spec = load_test_spec("random_fact")
|
||||
process_model = load_test_spec("random_fact")
|
||||
user = self.find_or_create_user()
|
||||
response = client.delete(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact_DOES_NOT_EXIST.svg",
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact_DOES_NOT_EXIST.svg",
|
||||
follow_redirects=True,
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
@ -545,12 +545,12 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_process_model_file_update."""
|
||||
self.create_spec_file(client)
|
||||
|
||||
spec = load_test_spec("random_fact")
|
||||
process_model = load_test_spec("random_fact")
|
||||
user = self.find_or_create_user()
|
||||
response = client.delete(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg",
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
|
||||
follow_redirects=True,
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
@ -558,8 +558,8 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["ok"]
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/files/random_fact.svg",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 404
|
||||
|
||||
|
@ -573,7 +573,7 @@ class TestProcessApi(BaseTest):
|
|||
load_test_spec(process_model_dir_name, process_group_id=test_process_group_id)
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/files/hello_world.bpmn",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -586,10 +586,10 @@ class TestProcessApi(BaseTest):
|
|||
) -> None:
|
||||
"""Test_get_workflow_from_workflow_spec."""
|
||||
user = self.find_or_create_user()
|
||||
spec = load_test_spec("hello_world")
|
||||
process_model = load_test_spec("hello_world")
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{spec.process_group_id}/{spec.id}/process-instances",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/process-instances",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
assert response.json is not None
|
||||
|
@ -601,7 +601,9 @@ class TestProcessApi(BaseTest):
|
|||
) -> None:
|
||||
"""Test_get_process_groups_when_none."""
|
||||
user = self.find_or_create_user()
|
||||
response = client.get("/v1.0/process-groups", headers=logged_in_headers(user))
|
||||
response = client.get(
|
||||
"/v1.0/process-groups", headers=self.logged_in_headers(user)
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert response.json["results"] == []
|
||||
|
@ -612,7 +614,9 @@ class TestProcessApi(BaseTest):
|
|||
"""Test_get_process_groups_when_there_are_some."""
|
||||
user = self.find_or_create_user()
|
||||
load_test_spec("hello_world")
|
||||
response = client.get("/v1.0/process-groups", headers=logged_in_headers(user))
|
||||
response = client.get(
|
||||
"/v1.0/process-groups", headers=self.logged_in_headers(user)
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
|
@ -630,7 +634,7 @@ class TestProcessApi(BaseTest):
|
|||
load_test_spec(process_model_dir_name, process_group_id=test_process_group_id)
|
||||
response = client.get(
|
||||
f"/v1.0/process-groups/{test_process_group_id}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -647,7 +651,7 @@ class TestProcessApi(BaseTest):
|
|||
load_test_spec(process_model_dir_name, process_group_id=test_process_group_id)
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -664,7 +668,7 @@ class TestProcessApi(BaseTest):
|
|||
group_id = self.create_process_group(client, user, "my_group")
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{group_id}/{process_model_dir_name}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert response.json is not None
|
||||
|
@ -677,7 +681,7 @@ class TestProcessApi(BaseTest):
|
|||
test_process_group_id = "runs_without_input"
|
||||
test_process_model_id = "sample"
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
response = self.create_process_instance(
|
||||
client, test_process_group_id, test_process_model_id, headers
|
||||
)
|
||||
|
@ -693,7 +697,7 @@ class TestProcessApi(BaseTest):
|
|||
process_group_id = "runs_without_input"
|
||||
process_model_id = "sample"
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
response = self.create_process_instance(
|
||||
client, process_group_id, process_model_id, headers
|
||||
)
|
||||
|
@ -701,7 +705,7 @@ class TestProcessApi(BaseTest):
|
|||
process_instance_id = response.json["id"]
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.json is not None
|
||||
|
@ -709,7 +713,7 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["updated_at_in_seconds"] > 0
|
||||
assert response.json["status"] == "complete"
|
||||
assert response.json["process_model_identifier"] == process_model_id
|
||||
assert response.json["data"]["current_user"]["username"] == "test_user1"
|
||||
assert response.json["data"]["current_user"]["username"] == user.username
|
||||
assert response.json["data"]["Mike"] == "Awesome"
|
||||
assert response.json["data"]["person"] == "Kevin"
|
||||
|
||||
|
@ -733,7 +737,7 @@ class TestProcessApi(BaseTest):
|
|||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
data=json.dumps({"payload": payload}),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
@ -773,7 +777,7 @@ class TestProcessApi(BaseTest):
|
|||
client,
|
||||
process_model.process_group_id,
|
||||
process_model.id,
|
||||
logged_in_headers(user),
|
||||
self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
process_instance_id = response.json["id"]
|
||||
|
@ -781,7 +785,7 @@ class TestProcessApi(BaseTest):
|
|||
response = client.post(
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/"
|
||||
f"{process_model.id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.json is not None
|
||||
|
@ -789,7 +793,7 @@ class TestProcessApi(BaseTest):
|
|||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
data=json.dumps(
|
||||
{"payload": payload, "process_instance_id": process_instance_id}
|
||||
),
|
||||
|
@ -824,7 +828,7 @@ class TestProcessApi(BaseTest):
|
|||
client,
|
||||
process_model.process_group_id,
|
||||
process_model.id,
|
||||
logged_in_headers(user),
|
||||
self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
process_instance_id = response.json["id"]
|
||||
|
@ -832,7 +836,7 @@ class TestProcessApi(BaseTest):
|
|||
response = client.post(
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/"
|
||||
f"{process_model.id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -840,7 +844,7 @@ class TestProcessApi(BaseTest):
|
|||
response = client.post(
|
||||
f"/v1.0/process-models/{process_model.process_group_id}/"
|
||||
f"{process_model.id}/process-instances/{process_instance_id}/terminate",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -859,7 +863,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model_id = "user_task"
|
||||
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
response = self.create_process_instance(
|
||||
client, process_group_id, process_model_id, headers
|
||||
)
|
||||
|
@ -868,7 +872,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.json is not None
|
||||
|
@ -883,7 +887,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
delete_response = client.delete(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert delete_response.status_code == 200
|
||||
|
||||
|
@ -895,7 +899,7 @@ class TestProcessApi(BaseTest):
|
|||
process_model_id = "user_task"
|
||||
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
response = self.create_process_instance(
|
||||
client, process_group_id, process_model_id, headers
|
||||
)
|
||||
|
@ -904,7 +908,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
|
||||
assert response.json is not None
|
||||
|
@ -925,14 +929,14 @@ class TestProcessApi(BaseTest):
|
|||
test_process_group_id = "runs_without_input"
|
||||
process_model_dir_name = "sample"
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
self.create_process_instance(
|
||||
client, test_process_group_id, process_model_dir_name, headers
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -961,7 +965,7 @@ class TestProcessApi(BaseTest):
|
|||
test_process_group_id = "runs_without_input"
|
||||
process_model_dir_name = "sample"
|
||||
user = self.find_or_create_user()
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
self.create_process_instance(
|
||||
client, test_process_group_id, process_model_dir_name, headers
|
||||
)
|
||||
|
@ -979,8 +983,8 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances?per_page=2&page=3",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances?per_page=2&page=3",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -990,8 +994,8 @@ class TestProcessApi(BaseTest):
|
|||
assert response.json["pagination"]["total"] == 5
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances?per_page=2&page=1",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances?per_page=2&page=1",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -1027,8 +1031,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# Without filtering we should get all 5 instances
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-instances?process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
|
@ -1038,19 +1042,29 @@ class TestProcessApi(BaseTest):
|
|||
# we should get 1 instance each time
|
||||
for i in range(5):
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}",
|
||||
headers=logged_in_headers(user),
|
||||
f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
assert len(results) == 1
|
||||
assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/process-instances?process_status=not_started,complete&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
assert len(results) == 2
|
||||
assert results[0]["status"] in ["complete", "not_started"]
|
||||
assert results[1]["status"] in ["complete", "not_started"]
|
||||
|
||||
# filter by start/end seconds
|
||||
# start > 1000 - this should eliminate the first
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances?start_from=1001",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
|
@ -1060,8 +1074,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# start > 2000, end < 5000 - this should eliminate the first 2 and the last
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=2001&end_till=5999",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances?start_from=2001&end_till=5999",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
|
@ -1071,8 +1085,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# start > 1000, start < 4000 - this should eliminate the first and the last 2
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?start_from=1001&start_till=3999",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances?start_from=1001&start_till=3999",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
|
@ -1082,8 +1096,8 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
# end > 2000, end < 6000 - this should eliminate the first and the last
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances?end_from=2001&end_till=5999",
|
||||
headers=logged_in_headers(user),
|
||||
"/v1.0/process-instances?end_from=2001&end_till=5999",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.json is not None
|
||||
results = response.json["results"]
|
||||
|
@ -1098,7 +1112,7 @@ class TestProcessApi(BaseTest):
|
|||
process_group_identifier = "runs_without_input"
|
||||
process_model_identifier = "sample"
|
||||
user = self.find_or_create_user()
|
||||
logged_in_headers(user)
|
||||
self.logged_in_headers(user)
|
||||
load_test_spec(
|
||||
process_model_identifier, process_group_id=process_group_identifier
|
||||
)
|
||||
|
@ -1113,7 +1127,7 @@ class TestProcessApi(BaseTest):
|
|||
)
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{process_group_identifier}/{process_model_identifier}/process-instances/reports",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -1162,7 +1176,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -1215,7 +1229,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure?grade_level=1",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
|
@ -1235,7 +1249,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.get(
|
||||
f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure?grade_level=1",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 404
|
||||
data = json.loads(response.get_data(as_text=True))
|
||||
|
@ -1249,7 +1263,7 @@ class TestProcessApi(BaseTest):
|
|||
user: UserModel,
|
||||
) -> Any:
|
||||
"""Setup_testing_instance."""
|
||||
headers = logged_in_headers(user)
|
||||
headers = self.logged_in_headers(user)
|
||||
response = self.create_process_instance(
|
||||
client, process_group_id, process_model_id, headers
|
||||
)
|
||||
|
@ -1279,7 +1293,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
@ -1325,7 +1339,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
|
||||
|
@ -1360,7 +1374,7 @@ class TestProcessApi(BaseTest):
|
|||
|
||||
response = client.post(
|
||||
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
|
||||
headers=logged_in_headers(user),
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert len(outbox) == 1
|
||||
|
@ -1400,10 +1414,78 @@ class TestProcessApi(BaseTest):
|
|||
assert result["name"] == file_name
|
||||
assert bytes(str(result["file_contents"]), "utf-8") == file_data
|
||||
|
||||
def test_can_get_message_instances_by_process_instance_id_and_without(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_can_get_message_instances_by_process_instance_id."""
|
||||
load_test_spec(
|
||||
"message_receiver",
|
||||
process_model_source_directory="message_send_one_conversation",
|
||||
bpmn_file_name="message_receiver",
|
||||
)
|
||||
user = self.find_or_create_user()
|
||||
message_model_identifier = "message_send"
|
||||
payload = {
|
||||
"topica": "the_topica_string",
|
||||
"topicb": "the_topicb_string",
|
||||
"andThis": "another_item_non_key",
|
||||
}
|
||||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(user),
|
||||
data=json.dumps({"payload": payload}),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
process_instance_id_one = response.json["id"]
|
||||
|
||||
response = client.post(
|
||||
f"/v1.0/messages/{message_model_identifier}",
|
||||
content_type="application/json",
|
||||
headers=self.logged_in_headers(user),
|
||||
data=json.dumps({"payload": payload}),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
process_instance_id_two = response.json["id"]
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/messages?process_instance_id={process_instance_id_one}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
assert (
|
||||
response.json["results"][0]["process_instance_id"]
|
||||
== process_instance_id_one
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
f"/v1.0/messages?process_instance_id={process_instance_id_two}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 1
|
||||
assert (
|
||||
response.json["results"][0]["process_instance_id"]
|
||||
== process_instance_id_two
|
||||
)
|
||||
|
||||
response = client.get(
|
||||
"/v1.0/messages",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.json is not None
|
||||
assert len(response.json["results"]) == 2
|
||||
|
||||
# def test_get_process_model(self):
|
||||
#
|
||||
# load_test_spec('random_fact')
|
||||
# response = client.get('/v1.0/workflow-specification/random_fact', headers=logged_in_headers())
|
||||
# response = client.get('/v1.0/workflow-specification/random_fact', headers=self.logged_in_headers())
|
||||
# assert_success(response)
|
||||
# json_data = json.loads(response.get_data(as_text=True))
|
||||
# api_spec = WorkflowSpecInfoSchema().load(json_data)
|
||||
|
|
|
@ -0,0 +1,489 @@
|
|||
"""Test_secret_service."""
|
||||
import json
|
||||
|
||||
import pytest
|
||||
from flask.app import Flask
|
||||
from flask.testing import FlaskClient
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from werkzeug.test import TestResponse
|
||||
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.secret_model import SecretAllowedProcessPathModel
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
|
||||
|
||||
class SecretServiceTestHelpers(BaseTest):
|
||||
"""SecretServiceTestHelpers."""
|
||||
|
||||
test_key = "test_key"
|
||||
test_value = "test_value"
|
||||
test_process_group_id = "test"
|
||||
test_process_group_display_name = "My Test Process Group"
|
||||
test_process_model_id = "make_cookies"
|
||||
test_process_model_display_name = "Cooooookies"
|
||||
test_process_model_description = "Om nom nom delicious cookies"
|
||||
|
||||
def add_test_secret(self, user: UserModel) -> SecretModel:
|
||||
"""Add_test_secret."""
|
||||
return SecretService().add_secret(self.test_key, self.test_value, user.id)
|
||||
|
||||
def add_test_process(
|
||||
self, client: FlaskClient, user: UserModel
|
||||
) -> ProcessModelInfo:
|
||||
"""Add_test_process."""
|
||||
self.create_process_group(
|
||||
client,
|
||||
user,
|
||||
self.test_process_group_id,
|
||||
display_name=self.test_process_group_display_name,
|
||||
)
|
||||
self.create_process_model_with_api(
|
||||
client,
|
||||
process_group_id=self.test_process_group_id,
|
||||
process_model_id=self.test_process_model_id,
|
||||
process_model_display_name=self.test_process_model_display_name,
|
||||
process_model_description=self.test_process_model_description,
|
||||
)
|
||||
process_model_info = ProcessModelService().get_process_model(
|
||||
self.test_process_model_id, self.test_process_group_id
|
||||
)
|
||||
return process_model_info
|
||||
|
||||
def add_test_secret_allowed_process(
|
||||
self, client: FlaskClient, user: UserModel
|
||||
) -> SecretAllowedProcessPathModel:
|
||||
"""Add_test_secret_allowed_process."""
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
|
||||
test_secret = self.add_test_secret(user)
|
||||
allowed_process_model = SecretService().add_allowed_process(
|
||||
secret_id=test_secret.id,
|
||||
user_id=user.id,
|
||||
allowed_relative_path=process_model_relative_path,
|
||||
)
|
||||
return allowed_process_model
|
||||
|
||||
|
||||
class TestSecretService(SecretServiceTestHelpers):
|
||||
"""TestSecretService."""
|
||||
|
||||
def test_add_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
|
||||
"""Test_add_secret."""
|
||||
user = self.find_or_create_user()
|
||||
test_secret = self.add_test_secret(user)
|
||||
|
||||
assert test_secret is not None
|
||||
assert test_secret.key == self.test_key
|
||||
assert test_secret.value == self.test_value
|
||||
assert test_secret.creator_user_id == user.id
|
||||
|
||||
def test_add_secret_duplicate_key_fails(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_add_secret_duplicate_key_fails."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
self.add_test_secret(user)
|
||||
assert ae.value.code == "create_secret_error"
|
||||
|
||||
def test_get_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None:
|
||||
"""Test_get_secret."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
|
||||
secret = SecretService().get_secret(self.test_key)
|
||||
assert secret is not None
|
||||
assert secret == self.test_value
|
||||
|
||||
def test_get_secret_bad_key_fails(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_get_secret_bad_service."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
|
||||
bad_secret = SecretService().get_secret("bad_key")
|
||||
assert bad_secret is None
|
||||
|
||||
def test_update_secret(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test update secret."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
secret = SecretService.get_secret(self.test_key)
|
||||
assert secret == self.test_value
|
||||
SecretService.update_secret(self.test_key, "new_secret_value", user.id)
|
||||
new_secret = SecretService.get_secret(self.test_key)
|
||||
assert new_secret == "new_secret_value" # noqa: S105
|
||||
|
||||
def test_update_secret_bad_user_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_update_secret_bad_user."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService.update_secret(
|
||||
self.test_key, "new_secret_value", user.id + 1
|
||||
) # noqa: S105
|
||||
assert (
|
||||
ae.value.message
|
||||
== f"User: {user.id+1} cannot update the secret with key : test_key"
|
||||
)
|
||||
|
||||
def test_update_secret_bad_secret_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_update_secret_bad_secret_fails."""
|
||||
user = self.find_or_create_user()
|
||||
secret = self.add_test_secret(user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService.update_secret(secret.key + "x", "some_new_value", user.id)
|
||||
assert "Resource does not exist" in ae.value.message
|
||||
assert ae.value.code == "update_secret_error"
|
||||
|
||||
def test_delete_secret(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test delete secret."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
secrets = SecretModel.query.all()
|
||||
assert len(secrets) == 1
|
||||
assert secrets[0].creator_user_id == user.id
|
||||
SecretService.delete_secret(self.test_key, user.id)
|
||||
secrets = SecretModel.query.all()
|
||||
assert len(secrets) == 0
|
||||
|
||||
def test_delete_secret_bad_user_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_delete_secret_bad_user."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService.delete_secret(self.test_key, user.id + 1)
|
||||
assert (
|
||||
f"User: {user.id+1} cannot delete the secret with key" in ae.value.message
|
||||
)
|
||||
|
||||
def test_delete_secret_bad_secret_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_delete_secret_bad_secret_fails."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService.delete_secret(self.test_key + "x", user.id)
|
||||
assert "Resource does not exist" in ae.value.message
|
||||
|
||||
def test_secret_add_allowed_process(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_secret_add_allowed_process."""
|
||||
user = self.find_or_create_user()
|
||||
test_secret = self.add_test_secret(user)
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
allowed_process_model = SecretService().add_allowed_process(
|
||||
secret_id=test_secret.id,
|
||||
user_id=user.id,
|
||||
allowed_relative_path=process_model_relative_path,
|
||||
)
|
||||
|
||||
assert allowed_process_model is not None
|
||||
assert isinstance(allowed_process_model, SecretAllowedProcessPathModel)
|
||||
assert allowed_process_model.secret_id == test_secret.id
|
||||
assert (
|
||||
allowed_process_model.allowed_relative_path == process_model_relative_path
|
||||
)
|
||||
|
||||
assert len(test_secret.allowed_processes) == 1
|
||||
assert test_secret.allowed_processes[0] == allowed_process_model
|
||||
|
||||
def test_secret_add_allowed_process_same_process_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Do not allow duplicate entries for secret_id/allowed_relative_path pairs.
|
||||
|
||||
We actually take care of this in the db model with a unique constraint
|
||||
on the 2 columns.
|
||||
"""
|
||||
user = self.find_or_create_user()
|
||||
test_secret = self.add_test_secret(user)
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
SecretService().add_allowed_process(
|
||||
secret_id=test_secret.id,
|
||||
user_id=user.id,
|
||||
allowed_relative_path=process_model_relative_path,
|
||||
)
|
||||
allowed_processes = SecretAllowedProcessPathModel.query.all()
|
||||
assert len(allowed_processes) == 1
|
||||
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService().add_allowed_process(
|
||||
secret_id=test_secret.id,
|
||||
user_id=user.id,
|
||||
allowed_relative_path=process_model_relative_path,
|
||||
)
|
||||
assert "Resource already exists" in ae.value.message
|
||||
|
||||
def test_secret_add_allowed_process_bad_user_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_secret_add_allowed_process_bad_user."""
|
||||
user = self.find_or_create_user()
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
test_secret = self.add_test_secret(user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService().add_allowed_process(
|
||||
secret_id=test_secret.id,
|
||||
user_id=user.id + 1,
|
||||
allowed_relative_path=process_model_relative_path,
|
||||
)
|
||||
assert (
|
||||
ae.value.message
|
||||
== f"User: {user.id+1} cannot modify the secret with key : {self.test_key}"
|
||||
)
|
||||
|
||||
def test_secret_add_allowed_process_bad_secret_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_secret_add_allowed_process_bad_secret_fails."""
|
||||
user = self.find_or_create_user()
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
test_secret = self.add_test_secret(user)
|
||||
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService().add_allowed_process(
|
||||
secret_id=test_secret.id + 1,
|
||||
user_id=user.id,
|
||||
allowed_relative_path=process_model_relative_path,
|
||||
)
|
||||
assert "Resource does not exist" in ae.value.message
|
||||
|
||||
def test_secret_delete_allowed_process(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_secret_delete_allowed_process."""
|
||||
user = self.find_or_create_user()
|
||||
allowed_process_model = self.add_test_secret_allowed_process(client, user)
|
||||
|
||||
allowed_processes = SecretAllowedProcessPathModel.query.all()
|
||||
assert len(allowed_processes) == 1
|
||||
|
||||
SecretService().delete_allowed_process(allowed_process_model.id, user.id)
|
||||
|
||||
allowed_processes = SecretAllowedProcessPathModel.query.all()
|
||||
assert len(allowed_processes) == 0
|
||||
|
||||
def test_secret_delete_allowed_process_bad_user_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_secret_delete_allowed_process_bad_user_fails."""
|
||||
user = self.find_or_create_user()
|
||||
allowed_process_model = self.add_test_secret_allowed_process(client, user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService().delete_allowed_process(
|
||||
allowed_process_model.id, user.id + 1
|
||||
)
|
||||
message = ae.value.message
|
||||
assert (
|
||||
f"User: {user.id+1} cannot delete the allowed_process with id : {allowed_process_model.id}"
|
||||
in message
|
||||
)
|
||||
|
||||
def test_secret_delete_allowed_process_bad_allowed_process_fails(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_secret_delete_allowed_process_bad_allowed_process_fails."""
|
||||
user = self.find_or_create_user()
|
||||
allowed_process_model = self.add_test_secret_allowed_process(client, user)
|
||||
with pytest.raises(ApiError) as ae:
|
||||
SecretService().delete_allowed_process(
|
||||
allowed_process_model.id + 1, user.id
|
||||
)
|
||||
assert "Resource does not exist" in ae.value.message
|
||||
|
||||
|
||||
class TestSecretServiceApi(SecretServiceTestHelpers):
|
||||
"""TestSecretServiceApi."""
|
||||
|
||||
def test_add_secret(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_add_secret."""
|
||||
user = self.find_or_create_user()
|
||||
secret_model = SecretModel(
|
||||
key=self.test_key,
|
||||
value=self.test_value,
|
||||
creator_user_id=user.id,
|
||||
)
|
||||
data = json.dumps(SecretModelSchema().dump(secret_model))
|
||||
response: TestResponse = client.post(
|
||||
"/v1.0/secrets",
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=data,
|
||||
)
|
||||
assert response.json
|
||||
secret: dict = response.json
|
||||
for key in ["key", "value", "creator_user_id"]:
|
||||
assert key in secret.keys()
|
||||
assert secret["key"] == self.test_key
|
||||
assert secret["value"] == self.test_value
|
||||
assert secret["creator_user_id"] == user.id
|
||||
|
||||
def test_get_secret(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test get secret."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
secret_response = client.get(
|
||||
f"/v1.0/secrets/{self.test_key}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert secret_response
|
||||
assert secret_response.status_code == 200
|
||||
assert secret_response.json == self.test_value
|
||||
|
||||
def test_update_secret(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_update_secret."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
secret = SecretService.get_secret(self.test_key)
|
||||
assert secret == self.test_value
|
||||
secret_model = SecretModel(
|
||||
key=self.test_key, value="new_secret_value", creator_user_id=user.id
|
||||
)
|
||||
response = client.put(
|
||||
f"/v1.0/secrets/{self.test_key}",
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=json.dumps(SecretModelSchema().dump(secret_model)),
|
||||
)
|
||||
assert response.status_code == 204
|
||||
|
||||
secret_model = SecretModel.query.filter(
|
||||
SecretModel.key == self.test_key
|
||||
).first()
|
||||
assert secret_model.value == "new_secret_value"
|
||||
|
||||
def test_delete_secret(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test delete secret."""
|
||||
user = self.find_or_create_user()
|
||||
self.add_test_secret(user)
|
||||
secret = SecretService.get_secret(self.test_key)
|
||||
assert secret
|
||||
assert secret == self.test_value
|
||||
secret_response = client.delete(
|
||||
f"/v1.0/secrets/{self.test_key}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert secret_response.status_code == 204
|
||||
secret = SecretService.get_secret(self.test_key)
|
||||
assert secret is None
|
||||
|
||||
def test_delete_secret_bad_user(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_delete_secret_bad_user."""
|
||||
user_1 = self.find_or_create_user()
|
||||
user_2 = self.find_or_create_user("test_user_2")
|
||||
self.add_test_secret(user_1)
|
||||
secret_response = client.delete(
|
||||
f"/v1.0/secrets/{self.test_key}",
|
||||
headers=self.logged_in_headers(user_2),
|
||||
)
|
||||
assert secret_response.status_code == 401
|
||||
|
||||
def test_delete_secret_bad_key(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test delete secret."""
|
||||
user = self.find_or_create_user()
|
||||
secret_response = client.delete(
|
||||
"/v1.0/secrets/bad_secret_key",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert secret_response.status_code == 404
|
||||
|
||||
def test_add_secret_allowed_process(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test add secret allowed process."""
|
||||
user = self.find_or_create_user()
|
||||
test_secret = self.add_test_secret(user)
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
data = {
|
||||
"secret_id": test_secret.id,
|
||||
"allowed_relative_path": process_model_relative_path,
|
||||
}
|
||||
response: TestResponse = client.post(
|
||||
"/v1.0/secrets/allowed_process_paths",
|
||||
headers=self.logged_in_headers(user),
|
||||
content_type="application/json",
|
||||
data=json.dumps(data),
|
||||
)
|
||||
assert response.status_code == 201
|
||||
allowed_processes = SecretAllowedProcessPathModel.query.all()
|
||||
assert len(allowed_processes) == 1
|
||||
assert allowed_processes[0].allowed_relative_path == process_model_relative_path
|
||||
assert allowed_processes[0].secret_id == test_secret.id
|
||||
|
||||
def test_delete_secret_allowed_process(
|
||||
self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test delete secret allowed process."""
|
||||
user = self.find_or_create_user()
|
||||
test_secret = self.add_test_secret(user)
|
||||
process_model_info = self.add_test_process(client, user)
|
||||
process_model_relative_path = FileSystemService.process_model_relative_path(
|
||||
process_model_info
|
||||
)
|
||||
allowed_process = SecretService.add_allowed_process(
|
||||
test_secret.id, user.id, process_model_relative_path
|
||||
)
|
||||
allowed_processes = SecretAllowedProcessPathModel.query.all()
|
||||
assert len(allowed_processes) == 1
|
||||
assert allowed_processes[0].secret_id == test_secret.id
|
||||
assert allowed_processes[0].allowed_relative_path == process_model_relative_path
|
||||
response = client.delete(
|
||||
f"/v1.0/secrets/allowed_process_paths/{allowed_process.id}",
|
||||
headers=self.logged_in_headers(user),
|
||||
)
|
||||
assert response.status_code == 204
|
||||
allowed_processes = SecretAllowedProcessPathModel.query.all()
|
||||
assert len(allowed_processes) == 0
|
|
@ -34,7 +34,7 @@ class TestMessageInstance(BaseTest):
|
|||
assert queued_message.status == "ready"
|
||||
assert queued_message.failure_cause is None
|
||||
|
||||
queued_message_from_query = MessageInstanceModel.query.filter_by(
|
||||
queued_message_from_query = MessageInstanceModel.query.filter_by( # type: ignore
|
||||
id=queued_message.id
|
||||
).first()
|
||||
assert queued_message_from_query is not None
|
||||
|
@ -137,7 +137,7 @@ class TestMessageInstance(BaseTest):
|
|||
== "MessageInstanceModel: failure_cause must be set if status is failed"
|
||||
)
|
||||
assert queued_message.id is None
|
||||
db.session.remove()
|
||||
db.session.remove() # type: ignore
|
||||
|
||||
queued_message = MessageInstanceModel(
|
||||
process_instance_id=process_instance.id,
|
||||
|
|
|
@ -188,8 +188,14 @@ class TestMessageService(BaseTest):
|
|||
process_instance_result = ProcessInstanceModel.query.all()
|
||||
|
||||
assert len(process_instance_result) == 3
|
||||
process_instance_receiver_one = process_instance_result[1]
|
||||
process_instance_receiver_two = process_instance_result[2]
|
||||
process_instance_receiver_one = ProcessInstanceModel.query.filter_by(
|
||||
process_model_identifier="message_receiver_one"
|
||||
).first()
|
||||
assert process_instance_receiver_one is not None
|
||||
process_instance_receiver_two = ProcessInstanceModel.query.filter_by(
|
||||
process_model_identifier="message_receiver_two"
|
||||
).first()
|
||||
assert process_instance_receiver_two is not None
|
||||
|
||||
# just make sure it's a different process instance
|
||||
assert (
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
"""Test_various_bpmn_constructs."""
|
||||
from flask.app import Flask
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
|
||||
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
|
||||
|
||||
class TestVariousBpmnConstructs(BaseTest):
|
||||
"""TestVariousBpmnConstructs."""
|
||||
|
||||
def test_running_process_with_timer_intermediate_catch_event(
|
||||
self, app: Flask, with_db_and_bpmn_file_cleanup: None
|
||||
) -> None:
|
||||
"""Test_running_process_with_timer_intermediate_catch_event."""
|
||||
process_model = load_test_spec(
|
||||
"timers_intermediate_catch_event",
|
||||
process_model_source_directory="timer_intermediate_catch_event",
|
||||
)
|
||||
|
||||
process_instance = self.create_process_instance_from_process_model(
|
||||
process_model
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.do_engine_steps(save=True)
|
Loading…
Reference in New Issue