Squashed 'spiffworkflow-backend/' changes from dba09086ba..5c6601237e

5c6601237e upgrade spiff and add commented out assertion
150cb68824 Change steps when viewing a process instance model (#18)
beac7f40c4 prevent backfill from exploding when a primary file name on another model does not exist
3e6b61911e work around parser.get_process_dependencies returning a set containing the element None
65c343337d Point back to spiff main (#19)
7eafd5c994 Track spiff step details more granularly (#17)
85829dd56d Clear the remaining __init__.py imports in SpiffWorkflow (#14)
c3468ca548 From the logs, allow viewing a diagram in a previous state (#15)

git-subtree-dir: spiffworkflow-backend
git-subtree-split: 5c6601237ebdccf864b23dac74bf3e1ca77ead1e
This commit is contained in:
burnettk 2022-11-04 09:33:44 -04:00
parent 95d9dbf036
commit a0b923c9ad
15 changed files with 219 additions and 58 deletions

View File

@ -1,8 +1,8 @@
"""empty message """empty message
Revision ID: bdd1d64689db Revision ID: b1647eff45c9
Revises: Revises:
Create Date: 2022-11-02 11:31:50.606843 Create Date: 2022-11-02 14:25:09.992800
""" """
from alembic import op from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = 'bdd1d64689db' revision = 'b1647eff45c9'
down_revision = None down_revision = None
branch_labels = None branch_labels = None
depends_on = None depends_on = None
@ -106,6 +106,7 @@ def upgrade():
sa.Column('status', sa.String(length=50), nullable=True), sa.Column('status', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
sa.Column('spiff_step', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
@ -229,10 +230,22 @@ def upgrade():
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('message', sa.String(length=255), nullable=True), sa.Column('message', sa.String(length=255), nullable=True),
sa.Column('current_user_id', sa.Integer(), nullable=True), sa.Column('current_user_id', sa.Integer(), nullable=True),
sa.Column('spiff_step', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['current_user_id'], ['user.id'], ), sa.ForeignKeyConstraint(['current_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id') sa.PrimaryKeyConstraint('id')
) )
op.create_table('spiff_step_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('spiff_step', sa.Integer(), nullable=False),
sa.Column('task_json', sa.JSON(), nullable=False),
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('active_task_user', op.create_table('active_task_user',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('active_task_id', sa.Integer(), nullable=False), sa.Column('active_task_id', sa.Integer(), nullable=False),
@ -266,6 +279,7 @@ def downgrade():
op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user') op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user') op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
op.drop_table('active_task_user') op.drop_table('active_task_user')
op.drop_table('spiff_step_details')
op.drop_table('spiff_logging') op.drop_table('spiff_logging')
op.drop_table('permission_assignment') op.drop_table('permission_assignment')
op.drop_table('message_instance') op.drop_table('message_instance')

40
poetry.lock generated
View File

@ -95,7 +95,7 @@ python-versions = ">=3.5"
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]] [[package]]
name = "Babel" name = "Babel"
@ -268,7 +268,7 @@ optional = false
python-versions = ">=3.6.0" python-versions = ">=3.6.0"
[package.extras] [package.extras]
unicode-backport = ["unicodedata2"] unicode_backport = ["unicodedata2"]
[[package]] [[package]]
name = "classify-imports" name = "classify-imports"
@ -1512,7 +1512,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras] [package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"] socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "requests-toolbelt" name = "requests-toolbelt"
@ -1625,7 +1625,7 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"] fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"]
httpx = ["httpx (>=0.16.0)"] httpx = ["httpx (>=0.16.0)"]
pure-eval = ["asttokens", "executing", "pure-eval"] pure_eval = ["asttokens", "executing", "pure-eval"]
pyspark = ["pyspark (>=2.4.4)"] pyspark = ["pyspark (>=2.4.4)"]
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
rq = ["rq (>=0.6)"] rq = ["rq (>=0.6)"]
@ -1873,7 +1873,7 @@ pytz = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "a6392d19061f623394f5705fb78af23673d3940d" resolved_reference = "8d820dce1f439bb76bc07e39629832d998d6f634"
[[package]] [[package]]
name = "SQLAlchemy" name = "SQLAlchemy"
@ -1891,19 +1891,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"] asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"] mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"] mssql_pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"] mssql_pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql-connector = ["mysql-connector-python"] mysql_connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"] postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql-psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"] postgresql_psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"] pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3_binary"] sqlcipher = ["sqlcipher3_binary"]
@ -2946,10 +2946,7 @@ orjson = [
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
@ -3062,7 +3059,18 @@ py = [
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
] ]
pyasn1 = [ pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
] ]
pycodestyle = [ pycodestyle = [

View File

@ -1000,6 +1000,12 @@ paths:
description: If true, this wil return all tasks associated with the process instance and not just user tasks. description: If true, this wil return all tasks associated with the process instance and not just user tasks.
schema: schema:
type: boolean type: boolean
- name: spiff_step
in: query
required: false
description: If set will return the tasks as they were during a specific step of execution.
schema:
type: integer
get: get:
tags: tags:
- Process Instances - Process Instances

View File

@ -46,6 +46,9 @@ from spiffworkflow_backend.models.process_instance_report import (
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401 from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401 from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
from spiffworkflow_backend.models.spiff_step_details import (
SpiffStepDetailsModel,
) # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401 from spiffworkflow_backend.models.user import UserModel # noqa: F401
from spiffworkflow_backend.models.group import GroupModel # noqa: F401 from spiffworkflow_backend.models.group import GroupModel # noqa: F401

View File

@ -81,6 +81,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
spiff_step_details = relationship("SpiffStepDetailsModel", cascade="delete") # type: ignore
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: int | None = db.Column(db.Integer) start_in_seconds: int | None = db.Column(db.Integer)
@ -92,6 +93,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
bpmn_xml_file_contents: bytes | None = None bpmn_xml_file_contents: bytes | None = None
bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_type: str = db.Column(db.String(50))
bpmn_version_control_identifier: str = db.Column(db.String(255)) bpmn_version_control_identifier: str = db.Column(db.String(255))
spiff_step: int = db.Column(db.Integer)
@property @property
def serialized(self) -> dict[str, Any]: def serialized(self) -> dict[str, Any]:
@ -110,6 +112,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"end_in_seconds": self.end_in_seconds, "end_in_seconds": self.end_in_seconds,
"process_initiator_id": self.process_initiator_id, "process_initiator_id": self.process_initiator_id,
"bpmn_xml_file_contents": local_bpmn_xml_file_contents, "bpmn_xml_file_contents": local_bpmn_xml_file_contents,
"spiff_step": self.spiff_step,
} }
@property @property

View File

@ -25,3 +25,4 @@ class SpiffLoggingModel(SpiffworkflowBaseDBModel):
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
message: Optional[str] = db.Column(db.String(255), nullable=True) message: Optional[str] = db.Column(db.String(255), nullable=True)
current_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) current_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
spiff_step: int = db.Column(db.Integer, nullable=False)

View File

@ -0,0 +1,23 @@
"""Spiff_step_details."""
from dataclasses import dataclass
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from sqlalchemy import ForeignKey
from sqlalchemy.orm import deferred
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.user import UserModel
@dataclass
class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
"""SpiffStepDetailsModel."""
__tablename__ = "spiff_step_details"
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: str | None = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)

View File

@ -56,6 +56,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.secret_model import SecretModel from spiffworkflow_backend.models.secret_model import SecretModel
from spiffworkflow_backend.models.secret_model import SecretModelSchema from spiffworkflow_backend.models.secret_model import SecretModelSchema
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.authorization_service import AuthorizationService
@ -954,10 +955,23 @@ def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Res
def process_instance_task_list( def process_instance_task_list(
process_instance_id: int, all_tasks: bool = False process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0
) -> flask.wrappers.Response: ) -> flask.wrappers.Response:
"""Process_instance_task_list.""" """Process_instance_task_list."""
process_instance = find_process_instance_by_id_or_raise(process_instance_id) process_instance = find_process_instance_by_id_or_raise(process_instance_id)
if spiff_step > 0:
step_detail = (
db.session.query(SpiffStepDetailsModel)
.filter(
SpiffStepDetailsModel.process_instance_id == process_instance.id,
SpiffStepDetailsModel.spiff_step == spiff_step,
)
.first()
)
if step_detail is not None:
process_instance.bpmn_json = json.dumps(step_detail.task_json)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
spiff_tasks = None spiff_tasks = None
@ -1233,6 +1247,7 @@ def script_unit_test_run(
"""Script_unit_test_run.""" """Script_unit_test_run."""
# FIXME: We should probably clear this somewhere else but this works # FIXME: We should probably clear this somewhere else but this works
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
python_script = _get_required_parameter_or_raise("python_script", body) python_script = _get_required_parameter_or_raise("python_script", body)
input_json = _get_required_parameter_or_raise("input_json", body) input_json = _get_required_parameter_or_raise("input_json", body)

View File

@ -9,8 +9,8 @@ from spiffworkflow_backend.models.script_attributes_context import (
from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.scripts.script import Script
class GetUser(Script): class GetCurrentUser(Script):
"""GetUser.""" """GetCurrentUser."""
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description.""" """Get_description."""

View File

@ -8,7 +8,7 @@ from spiffworkflow_backend.scripts.script import Script
class GetProcessInfo(Script): class GetProcessInfo(Script):
"""GetUser.""" """GetProcessInfo."""
def get_description(self) -> str: def get_description(self) -> str:
"""Get_description.""" """Get_description."""

View File

@ -108,6 +108,8 @@ class SpiffFilter(logging.Filter):
if hasattr(tld, "process_instance_id"): if hasattr(tld, "process_instance_id"):
process_instance_id = tld.process_instance_id process_instance_id = tld.process_instance_id
setattr(record, "process_instance_id", process_instance_id) # noqa: B010 setattr(record, "process_instance_id", process_instance_id) # noqa: B010
if hasattr(tld, "spiff_step"):
setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010
if hasattr(g, "user") and g.user: if hasattr(g, "user") and g.user:
setattr(record, "current_user_id", g.user.id) # noqa: B010 setattr(record, "current_user_id", g.user.id) # noqa: B010
return True return True
@ -204,6 +206,11 @@ class DBHandler(logging.Handler):
timestamp = record.created timestamp = record.created
message = record.msg if hasattr(record, "msg") else None message = record.msg if hasattr(record, "msg") else None
current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
spiff_step = (
record.spiff_step # type: ignore
if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore
else 1
)
spiff_log = SpiffLoggingModel( spiff_log = SpiffLoggingModel(
process_instance_id=record.process_instance_id, # type: ignore process_instance_id=record.process_instance_id, # type: ignore
bpmn_process_identifier=bpmn_process_identifier, bpmn_process_identifier=bpmn_process_identifier,
@ -214,6 +221,7 @@ class DBHandler(logging.Handler):
message=message, message=message,
timestamp=timestamp, timestamp=timestamp,
current_user_id=current_user_id, current_user_id=current_user_id,
spiff_step=spiff_step,
) )
db.session.add(spiff_log) db.session.add(spiff_log)
db.session.commit() db.session.commit()

View File

@ -29,31 +29,39 @@ from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ign
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer # type: ignore from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
from SpiffWorkflow.bpmn.specs.events import CancelEventDefinition # type: ignore from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
from SpiffWorkflow.bpmn.specs.events import EndEvent from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter # type: ignore from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from SpiffWorkflow.spiff.serializer import BoundaryEventConverter # type: ignore from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
from SpiffWorkflow.spiff.serializer import CallActivityTaskConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import (
from SpiffWorkflow.spiff.serializer import EndEventConverter CallActivityTaskConverter,
from SpiffWorkflow.spiff.serializer import IntermediateCatchEventConverter )
from SpiffWorkflow.spiff.serializer import IntermediateThrowEventConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
from SpiffWorkflow.spiff.serializer import ManualTaskConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import (
from SpiffWorkflow.spiff.serializer import NoneTaskConverter IntermediateCatchEventConverter,
from SpiffWorkflow.spiff.serializer import ReceiveTaskConverter )
from SpiffWorkflow.spiff.serializer import ScriptTaskConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import (
from SpiffWorkflow.spiff.serializer import SendTaskConverter IntermediateThrowEventConverter,
from SpiffWorkflow.spiff.serializer import ServiceTaskConverter )
from SpiffWorkflow.spiff.serializer import StartEventConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import ManualTaskConverter
from SpiffWorkflow.spiff.serializer import SubWorkflowTaskConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter
from SpiffWorkflow.spiff.serializer import TransactionSubprocessConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import ReceiveTaskConverter
from SpiffWorkflow.spiff.serializer import UserTaskConverter from SpiffWorkflow.spiff.serializer.task_spec_converters import ScriptTaskConverter
from SpiffWorkflow.spiff.serializer.task_spec_converters import SendTaskConverter
from SpiffWorkflow.spiff.serializer.task_spec_converters import ServiceTaskConverter
from SpiffWorkflow.spiff.serializer.task_spec_converters import StartEventConverter
from SpiffWorkflow.spiff.serializer.task_spec_converters import SubWorkflowTaskConverter
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
TransactionSubprocessConverter,
)
from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverter
from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState from SpiffWorkflow.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
@ -79,12 +87,16 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import ( from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext, ScriptAttributesContext,
) )
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserModelSchema from spiffworkflow_backend.models.user import UserModelSchema
from spiffworkflow_backend.scripts.script import Script from spiffworkflow_backend.scripts.script import Script
from spiffworkflow_backend.services.file_system_service import FileSystemService from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
from spiffworkflow_backend.services.spec_file_service import (
ProcessModelFileNotFoundError,
)
from spiffworkflow_backend.services.spec_file_service import SpecFileService from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.user_service import UserService
@ -276,9 +288,9 @@ class ProcessInstanceProcessor:
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False self, process_instance_model: ProcessInstanceModel, validate_only: bool = False
) -> None: ) -> None:
"""Create a Workflow Processor based on the serialized information available in the process_instance model.""" """Create a Workflow Processor based on the serialized information available in the process_instance model."""
current_app.config[ tld = current_app.config["THREAD_LOCAL_DATA"]
"THREAD_LOCAL_DATA" tld.process_instance_id = process_instance_model.id
].process_instance_id = process_instance_model.id tld.spiff_step = process_instance_model.spiff_step
# we want this to be the fully qualified path to the process model including all group subcomponents # we want this to be the fully qualified path to the process model including all group subcomponents
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
@ -411,10 +423,8 @@ class ProcessInstanceProcessor:
bpmn_process_spec, subprocesses bpmn_process_spec, subprocesses
) )
def add_user_info_to_process_instance( def current_user(self) -> Any:
self, bpmn_process_instance: BpmnWorkflow """Current_user."""
) -> None:
"""Add_user_info_to_process_instance."""
current_user = None current_user = None
if UserService.has_user(): if UserService.has_user():
current_user = UserService.current_user() current_user = UserService.current_user()
@ -425,6 +435,14 @@ class ProcessInstanceProcessor:
elif self.process_instance_model.process_initiator_id: elif self.process_instance_model.process_initiator_id:
current_user = self.process_instance_model.process_initiator current_user = self.process_instance_model.process_initiator
return current_user
def add_user_info_to_process_instance(
self, bpmn_process_instance: BpmnWorkflow
) -> None:
"""Add_user_info_to_process_instance."""
current_user = self.current_user()
if current_user: if current_user:
current_user_data = UserModelSchema().dump(current_user) current_user_data = UserModelSchema().dump(current_user)
tasks = bpmn_process_instance.get_tasks(TaskState.READY) tasks = bpmn_process_instance.get_tasks(TaskState.READY)
@ -542,9 +560,31 @@ class ProcessInstanceProcessor:
"lane_assignment_id": lane_assignment_id, "lane_assignment_id": lane_assignment_id,
} }
def save_spiff_step_details(self) -> None:
"""SaveSpiffStepDetails."""
bpmn_json = self.serialize()
wf_json = json.loads(bpmn_json)
task_json = "{}"
if "tasks" in wf_json:
task_json = json.dumps(wf_json["tasks"])
# TODO want to just save the tasks, something wasn't immediately working
# so after the flow works with the full wf_json revisit this
task_json = wf_json
details_model = SpiffStepDetailsModel(
process_instance_id=self.process_instance_model.id,
spiff_step=self.process_instance_model.spiff_step or 1,
task_json=task_json,
timestamp=round(time.time()),
completed_by_user_id=self.current_user().id,
)
db.session.add(details_model)
db.session.commit()
def save(self) -> None: def save(self) -> None:
"""Saves the current state of this processor to the database.""" """Saves the current state of this processor to the database."""
self.process_instance_model.bpmn_json = self.serialize() self.process_instance_model.bpmn_json = self.serialize()
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
user_tasks = list(self.get_all_user_tasks()) user_tasks = list(self.get_all_user_tasks())
self.process_instance_model.status = self.get_status().value self.process_instance_model.status = self.get_status().value
@ -632,10 +672,14 @@ class ProcessInstanceProcessor:
process_models = ProcessModelService().get_process_models() process_models = ProcessModelService().get_process_models()
for process_model in process_models: for process_model in process_models:
if process_model.primary_file_name: if process_model.primary_file_name:
etree_element = SpecFileService.get_etree_element_from_file_name( try:
process_model, process_model.primary_file_name etree_element = SpecFileService.get_etree_element_from_file_name(
) process_model, process_model.primary_file_name
bpmn_process_identifiers = [] )
bpmn_process_identifiers = []
except ProcessModelFileNotFoundError:
# if primary_file_name doesn't actually exist on disk, then just go on to the next process_model
continue
try: try:
bpmn_process_identifiers = ( bpmn_process_identifiers = (
@ -663,6 +707,11 @@ class ProcessInstanceProcessor:
bpmn_process_identifier: str, bpmn_process_identifier: str,
) -> str: ) -> str:
"""Bpmn_file_full_path_from_bpmn_process_identifier.""" """Bpmn_file_full_path_from_bpmn_process_identifier."""
if bpmn_process_identifier is None:
raise ValueError(
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
)
bpmn_process_id_lookup = BpmnProcessIdLookup.query.filter_by( bpmn_process_id_lookup = BpmnProcessIdLookup.query.filter_by(
bpmn_process_identifier=bpmn_process_identifier bpmn_process_identifier=bpmn_process_identifier
).first() ).first()
@ -695,6 +744,10 @@ class ProcessInstanceProcessor:
if processed_identifiers is None: if processed_identifiers is None:
processed_identifiers = set() processed_identifiers = set()
processor_dependencies = parser.get_process_dependencies() processor_dependencies = parser.get_process_dependencies()
# since get_process_dependencies() returns a set with None sometimes, we need to remove it
processor_dependencies = processor_dependencies - {None}
processor_dependencies_new = processor_dependencies - processed_identifiers processor_dependencies_new = processor_dependencies - processed_identifiers
bpmn_process_identifiers_in_parser = parser.get_process_ids() bpmn_process_identifiers_in_parser = parser.get_process_ids()
@ -930,11 +983,29 @@ class ProcessInstanceProcessor:
db.session.commit() db.session.commit()
def increment_spiff_step(self) -> None:
"""Spiff_step++."""
spiff_step = self.process_instance_model.spiff_step or 0
spiff_step += 1
self.process_instance_model.spiff_step = spiff_step
current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step
db.session.add(self.process_instance_model)
db.session.commit()
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
"""Do_engine_steps.""" """Do_engine_steps."""
try: try:
self.bpmn_process_instance.refresh_waiting_tasks() self.bpmn_process_instance.refresh_waiting_tasks(
self.bpmn_process_instance.do_engine_steps(exit_at=exit_at) will_refresh_task=lambda t: self.increment_spiff_step(),
did_refresh_task=lambda t: self.save_spiff_step_details(),
)
self.bpmn_process_instance.do_engine_steps(
exit_at=exit_at,
will_complete_task=lambda t: self.increment_spiff_step(),
did_complete_task=lambda t: self.save_spiff_step_details(),
)
self.process_bpmn_messages() self.process_bpmn_messages()
self.queue_waiting_receive_messages() self.queue_waiting_receive_messages()
@ -956,6 +1027,7 @@ class ProcessInstanceProcessor:
# A little hackly, but make the bpmn_process_instance catch a cancel event. # A little hackly, but make the bpmn_process_instance catch a cancel event.
bpmn_process_instance.signal("cancel") # generate a cancel signal. bpmn_process_instance.signal("cancel") # generate a cancel signal.
bpmn_process_instance.catch(CancelEventDefinition()) bpmn_process_instance.catch(CancelEventDefinition())
# Due to this being static, can't save granular step details in this case
bpmn_process_instance.do_engine_steps() bpmn_process_instance.do_engine_steps()
except WorkflowTaskExecException as we: except WorkflowTaskExecException as we:
raise ApiError.from_workflow_exception("task_error", str(we), we) from we raise ApiError.from_workflow_exception("task_error", str(we), we) from we
@ -1054,7 +1126,9 @@ class ProcessInstanceProcessor:
def complete_task(self, task: SpiffTask) -> None: def complete_task(self, task: SpiffTask) -> None:
"""Complete_task.""" """Complete_task."""
self.increment_spiff_step()
self.bpmn_process_instance.complete_task_from_id(task.id) self.bpmn_process_instance.complete_task_from_id(task.id)
self.save_spiff_step_details()
def get_data(self) -> dict[str, Any]: def get_data(self) -> dict[str, Any]:
"""Get_data.""" """Get_data."""

View File

@ -27,6 +27,10 @@ from spiffworkflow_backend.services.file_system_service import FileSystemService
from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.process_model_service import ProcessModelService
class ProcessModelFileNotFoundError(Exception):
"""ProcessModelFileNotFoundError."""
class SpecFileService(FileSystemService): class SpecFileService(FileSystemService):
"""SpecFileService.""" """SpecFileService."""
@ -90,9 +94,8 @@ class SpecFileService(FileSystemService):
"""Get_data.""" """Get_data."""
file_path = SpecFileService.file_path(process_model_info, file_name) file_path = SpecFileService.file_path(process_model_info, file_name)
if not os.path.exists(file_path): if not os.path.exists(file_path):
raise ApiError( raise ProcessModelFileNotFoundError(
"unknown_file", f"No file found with name {file_name} in {process_model_info.display_name}"
f"No file found with name {file_name} in {process_model_info.display_name}",
) )
with open(file_path, "rb") as f_handle: with open(file_path, "rb") as f_handle:
spec_file_data = f_handle.read() spec_file_data = f_handle.read()

View File

@ -1119,6 +1119,8 @@ class TestProcessApi(BaseTest):
) )
assert response.json is not None assert response.json is not None
# assert response.json['next_task'] is not None
active_tasks = ( active_tasks = (
db.session.query(ActiveTaskModel) db.session.query(ActiveTaskModel)
.filter(ActiveTaskModel.process_instance_id == process_instance_id) .filter(ActiveTaskModel.process_instance_id == process_instance_id)

View File

@ -36,6 +36,7 @@ class TestSpiffLogging(BaseTest):
bpmn_task_identifier=bpmn_task_identifier, bpmn_task_identifier=bpmn_task_identifier,
message=message, message=message,
timestamp=timestamp, timestamp=timestamp,
spiff_step=1,
) )
assert spiff_log.timestamp == timestamp assert spiff_log.timestamp == timestamp