Merge pull request #162 from sartography/feature/move_task_data_into_tables

Feature/move bpmn_json data into separate tables
This commit is contained in:
jasquat 2023-03-01 15:56:51 -05:00 committed by GitHub
commit dcb2a23baa
18 changed files with 525 additions and 280 deletions

View File

@ -17,10 +17,10 @@ per-file-ignores =
# THEN, test_hey.py will NOT be excluding D103 # THEN, test_hey.py will NOT be excluding D103
# asserts are ok in tests # asserts are ok in tests
spiffworkflow-backend/tests/*:S101,D102,D103,D101 spiffworkflow-backend/tests/*:S101,D100,D101,D102,D103
# prefer naming functions descriptively rather than forcing comments # prefer naming functions descriptively rather than forcing comments
spiffworkflow-backend/src/*:D102,D103,D101 spiffworkflow-backend/src/*:D100,D101,D102,D103
spiffworkflow-backend/bin/keycloak_test_server.py:B950,D spiffworkflow-backend/bin/keycloak_test_server.py:B950,D
spiffworkflow-backend/conftest.py:S105 spiffworkflow-backend/conftest.py:S105

View File

@ -31,6 +31,7 @@ on:
push: push:
branches: branches:
- main - main
- feature/move_task_data_into_tables
jobs: jobs:
create_frontend_docker_image: create_frontend_docker_image:

View File

@ -17,10 +17,10 @@ per-file-ignores =
# THEN, test_hey.py will NOT be excluding D103 # THEN, test_hey.py will NOT be excluding D103
# asserts are ok in tests # asserts are ok in tests
tests/*:S101,D102,D103 tests/*:S101,D100,D101,D102,D103
# prefer naming functions descriptively rather than forcing comments # prefer naming functions descriptively rather than forcing comments
src/*:D102,D103 src/*:D100,D101,D102,D103
bin/keycloak_test_server.py:B950,D bin/keycloak_test_server.py:B950,D
conftest.py:S105 conftest.py:S105

View File

@ -53,10 +53,14 @@ if [[ "${1:-}" == "clean" ]]; then
docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "create database spiffworkflow_backend_local_development;" docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_unit_testing -c "create database spiffworkflow_backend_local_development;"
fi fi
fi fi
tasks="$tasks upgrade"
elif [[ "${1:-}" == "migrate" ]]; then elif [[ "${1:-}" == "migrate" ]]; then
tasks="$tasks migrate" tasks="$tasks migrate"
elif [[ "${1:-}" == "downgrade" ]]; then
tasks="$tasks downgrade"
else
tasks="$tasks upgrade"
fi fi
tasks="$tasks upgrade"
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development" mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_local_development"

View File

@ -0,0 +1,51 @@
"""empty message
Revision ID: 7422be14adc4
Revises: ac6b60d7fee9
Create Date: 2023-03-01 15:39:25.731722
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7422be14adc4'
down_revision = 'ac6b60d7fee9'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('process_instance_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('runtime_json', sa.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('serialized_bpmn_definition',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('hash', sa.String(length=255), nullable=False),
sa.Column('static_json', sa.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_serialized_bpmn_definition_hash'), 'serialized_bpmn_definition', ['hash'], unique=True)
op.add_column('process_instance', sa.Column('serialized_bpmn_definition_id', sa.Integer(), nullable=True))
op.add_column('process_instance', sa.Column('process_instance_data_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'process_instance', 'process_instance_data', ['process_instance_data_id'], ['id'])
op.create_foreign_key(None, 'process_instance', 'serialized_bpmn_definition', ['serialized_bpmn_definition_id'], ['id'])
op.add_column('spiff_step_details', sa.Column('delta_json', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('spiff_step_details', 'delta_json')
op.drop_constraint('process_instance_ibfk_3', 'process_instance', type_='foreignkey')
op.drop_constraint('process_instance_ibfk_2', 'process_instance', type_='foreignkey')
op.drop_column('process_instance', 'process_instance_data_id')
op.drop_column('process_instance', 'serialized_bpmn_definition_id')
op.drop_index(op.f('ix_serialized_bpmn_definition_hash'), table_name='serialized_bpmn_definition')
op.drop_table('serialized_bpmn_definition')
op.drop_table('process_instance_data')
# ### end Alembic commands ###

View File

@ -901,6 +901,14 @@ MarkupSafe = ">=2.0"
[package.extras] [package.extras]
i18n = ["Babel (>=2.7)"] i18n = ["Babel (>=2.7)"]
[[package]]
name = "json-delta"
version = "2.0.2"
description = "A diff/patch pair for JSON-serialized data structures."
category = "main"
optional = false
python-versions = "*"
[[package]] [[package]]
name = "jsonschema" name = "jsonschema"
version = "4.16.0" version = "4.16.0"
@ -1817,7 +1825,7 @@ lxml = "*"
type = "git" type = "git"
url = "https://github.com/sartography/SpiffWorkflow" url = "https://github.com/sartography/SpiffWorkflow"
reference = "main" reference = "main"
resolved_reference = "b3235fad598ee3c4680a23f26adb09cdc8f2807b" resolved_reference = "161cb7a4509a3d0e0574f3e2a98157862c053bad"
[[package]] [[package]]
name = "SQLAlchemy" name = "SQLAlchemy"
@ -2196,7 +2204,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata] [metadata]
lock-version = "1.1" lock-version = "1.1"
python-versions = ">=3.9,<3.12" python-versions = ">=3.9,<3.12"
content-hash = "3876acb4e3d947787a3ba8e831844ca0b06bde34dc038be46cabc00aa2a4defe" content-hash = "af711e2941c42b837da47ca8d647b1ae44657bf6805353bc216bb49cb3cbbfae"
[metadata.files] [metadata.files]
alabaster = [ alabaster = [
@ -2602,6 +2610,10 @@ Jinja2 = [
{file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
{file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
] ]
json-delta = [
{file = "json_delta-2.0.2-py2.py3-none-any.whl", hash = "sha256:12bc798354ea722fa04fae21ea06879321c47b0887572c27384accd6ef28efbf"},
{file = "json_delta-2.0.2.tar.gz", hash = "sha256:95ea3ff9908fc7d634c27ffec11db8fd8d935aa3e895d7302915d394b10e0321"},
]
jsonschema = [ jsonschema = [
{file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"},
{file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"},
@ -2650,6 +2662,7 @@ lazy-object-proxy = [
{file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
] ]
livereload = [ livereload = [
{file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"},
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
] ]
lxml = [ lxml = [
@ -3149,6 +3162,7 @@ ruamel-yaml-clib = [
{file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"},
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"},
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"},
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"},
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"},

View File

@ -50,5 +50,12 @@ from spiffworkflow_backend.models.group import GroupModel # noqa: F401
from spiffworkflow_backend.models.process_instance_metadata import ( from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel, ProcessInstanceMetadataModel,
) # noqa: F401 ) # noqa: F401
from spiffworkflow_backend.models.serialized_bpmn_definition import (
SerializedBpmnDefinitionModel,
) # noqa: F401
from spiffworkflow_backend.models.process_instance_data import (
ProcessInstanceDataModel,
) # noqa: F401
add_listeners() add_listeners()

View File

@ -17,6 +17,10 @@ from sqlalchemy.orm import validates
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.db import db from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.serialized_bpmn_definition import (
SerializedBpmnDefinitionModel,
) # noqa: F401
from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskSchema from spiffworkflow_backend.models.task import TaskSchema
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -60,6 +64,16 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) # type: ignore
process_initiator = relationship("UserModel") process_initiator = relationship("UserModel")
serialized_bpmn_definition_id: int | None = db.Column(
ForeignKey(SerializedBpmnDefinitionModel.id), nullable=True # type: ignore
)
serialized_bpmn_definition = relationship("SerializedBpmnDefinitionModel")
process_instance_data_id: int | None = db.Column(
ForeignKey(ProcessInstanceDataModel.id), nullable=True # type: ignore
)
process_instance_data = relationship("ProcessInstanceDataModel", cascade="delete")
active_human_tasks = relationship( active_human_tasks = relationship(
"HumanTaskModel", "HumanTaskModel",
primaryjoin=( primaryjoin=(

View File

@ -0,0 +1,22 @@
"""Process_instance."""
from __future__ import annotations
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
# the last three here should maybe become columns on process instance someday
# runtime_json
# "bpmn_messages",
# "correlations",
# "data",
# "subprocesses",
# "tasks",
# "last_task", # guid generated by spiff
# "root", # guid generated by spiff
# "success", # boolean
class ProcessInstanceDataModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance_data"
id: int = db.Column(db.Integer, primary_key=True)
# this is not deferred because there is no reason to query this model if you do not want the runtime_json
runtime_json: str = db.Column(db.JSON, nullable=False)

View File

@ -0,0 +1,47 @@
"""Process_instance."""
from __future__ import annotations
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
# top level serialized keys
#
# static
# "subprocess_specs",
# "spec",
# "serializer_version",
#
# runtime
# "bpmn_messages",
# "correlations",
# "data",
# "subprocesses",
# "tasks"
# "last_task", # guid generated by spiff
# "root", # guid generated by spiff
# "success", # boolean
#
# new columns on process_instance
#
# delta algorithm:
# a = {"hey": { "hey2": 2, "hey3": 3, "hey6": 7 }, "hey30": 3, "hey40": 4}
# b = {"hey": { "hey2": 4, "hey5": 3 }, "hey20": 2, "hey30": 3}
# a_keys = set(a.keys())
# b_keys = set(b.keys())
# removed = a_keys - b_keys
# added_keys = b_keys - a_keys
# keys_present_in_both = a_keys & b_keys
# changed = {}
# for key_in_both in keys_present_in_both:
# if a[key_in_both] != b[key_in_both]:
# changed[key_in_both] = b[key_in_both]
# added = {}
# for added_key in added_keys:
# added[added_key] = b[added_key]
# final_tuple = [added, removed, changed]
class SerializedBpmnDefinitionModel(SpiffworkflowBaseDBModel):
__tablename__ = "serialized_bpmn_definition"
id: int = db.Column(db.Integer, primary_key=True)
hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True)
static_json: str = db.Column(db.JSON, nullable=False)

View File

@ -31,6 +31,7 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
task_id: str = db.Column(db.String(50), nullable=False) task_id: str = db.Column(db.String(50), nullable=False)
task_state: str = db.Column(db.String(50), nullable=False) task_state: str = db.Column(db.String(50), nullable=False)
bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) bpmn_task_identifier: str = db.Column(db.String(255), nullable=False)
delta_json: list = deferred(db.Column(db.JSON)) # type: ignore
start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False) start_in_seconds: float = db.Column(db.DECIMAL(17, 6), nullable=False)

View File

@ -190,20 +190,32 @@ def task_data_update(
if process_instance: if process_instance:
if process_instance.status != "suspended": if process_instance.status != "suspended":
raise ProcessInstanceTaskDataCannotBeUpdatedError( raise ProcessInstanceTaskDataCannotBeUpdatedError(
"The process instance needs to be suspended to udpate the task-data." "The process instance needs to be suspended to update the task-data."
f" It is currently: {process_instance.status}" f" It is currently: {process_instance.status}"
) )
process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) process_instance_data = process_instance.process_instance_data
if process_instance_data is None:
raise ApiError(
error_code="process_instance_data_not_found",
message=(
"Could not find task data related to process instance:"
f" {process_instance.id}"
),
)
process_instance_data_dict = json.loads(process_instance_data.runtime_json)
if "new_task_data" in body: if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"] new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str) new_task_data_dict = json.loads(new_task_data_str)
if task_id in process_instance_bpmn_json_dict["tasks"]: if task_id in process_instance_data_dict["tasks"]:
process_instance_bpmn_json_dict["tasks"][task_id][ process_instance_data_dict["tasks"][task_id][
"data" "data"
] = new_task_data_dict ] = new_task_data_dict
process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) process_instance_data.runtime_json = json.dumps(
db.session.add(process_instance) process_instance_data_dict
)
db.session.add(process_instance_data)
try: try:
db.session.commit() db.session.commit()
except Exception as e: except Exception as e:

View File

@ -550,9 +550,14 @@ def process_instance_task_list(
) )
step_details = step_detail_query.all() step_details = step_detail_query.all()
bpmn_json = json.loads(process_instance.bpmn_json or "{}")
tasks = bpmn_json["tasks"] process_instance_data = process_instance.process_instance_data
subprocesses = bpmn_json["subprocesses"] process_instance_data_json = (
"{}" if process_instance_data is None else process_instance_data.runtime_json
)
process_instance_data_dict = json.loads(process_instance_data_json)
tasks = process_instance_data_dict["tasks"]
subprocesses = process_instance_data_dict["subprocesses"]
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details} steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
@ -577,6 +582,7 @@ def process_instance_task_list(
subprocess_info["tasks"][spiff_task_id]["state"] = ( subprocess_info["tasks"][spiff_task_id]["state"] = (
subprocess_state_overrides.get(spiff_task_id, TaskState.FUTURE) subprocess_state_overrides.get(spiff_task_id, TaskState.FUTURE)
) )
for spiff_task_id in tasks: for spiff_task_id in tasks:
if spiff_task_id not in steps_by_id: if spiff_task_id not in steps_by_id:
tasks[spiff_task_id]["data"] = {} tasks[spiff_task_id]["data"] = {}
@ -584,7 +590,7 @@ def process_instance_task_list(
spiff_task_id, TaskState.FUTURE spiff_task_id, TaskState.FUTURE
) )
process_instance.bpmn_json = json.dumps(bpmn_json) process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
processor = ProcessInstanceProcessor(process_instance) processor = ProcessInstanceProcessor(process_instance)
spiff_task = processor.__class__.get_task_by_bpmn_identifier( spiff_task = processor.__class__.get_task_by_bpmn_identifier(

View File

@ -620,7 +620,7 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1] wfe.error_line = unprocessed_template.split("\n")[tb.tb_lineno - 1]
tb = tb.tb_next tb = tb.tb_next
wfe.add_note( wfe.add_note(
"Jinja2 template errors can happen when trying to displaying task data" "Jinja2 template errors can happen when trying to display task data"
) )
raise wfe from error raise wfe from error

View File

@ -8,6 +8,7 @@ import re
import time import time
from datetime import datetime from datetime import datetime
from datetime import timedelta from datetime import timedelta
from hashlib import sha256
from typing import Any from typing import Any
from typing import Callable from typing import Callable
from typing import Dict from typing import Dict
@ -66,6 +67,7 @@ from spiffworkflow_backend.models.message_instance_correlation import (
) )
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.process_instance_metadata import ( from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel, ProcessInstanceMetadataModel,
) )
@ -73,6 +75,9 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import ( from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext, ScriptAttributesContext,
) )
from spiffworkflow_backend.models.serialized_bpmn_definition import (
SerializedBpmnDefinitionModel,
) # noqa: F401
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserModel
@ -204,6 +209,7 @@ class NonTaskDataBasedScriptEngineEnvironment(BasePythonScriptEngineEnvironment)
external_methods: Optional[Dict[str, Any]] = None, external_methods: Optional[Dict[str, Any]] = None,
) -> None: ) -> None:
# TODO: once integrated look at the tests that fail without Box # TODO: once integrated look at the tests that fail without Box
# context is task.data
Box.convert_to_box(context) Box.convert_to_box(context)
self.state.update(self.globals) self.state.update(self.globals)
self.state.update(external_methods or {}) self.state.update(external_methods or {})
@ -439,7 +445,7 @@ class ProcessInstanceProcessor:
self.process_model_service = ProcessModelService() self.process_model_service = ProcessModelService()
bpmn_process_spec = None bpmn_process_spec = None
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None subprocesses: Optional[IdToBpmnProcessSpecMapping] = None
if process_instance_model.bpmn_json is None: if process_instance_model.serialized_bpmn_definition_id is None:
( (
bpmn_process_spec, bpmn_process_spec,
subprocesses, subprocesses,
@ -516,6 +522,16 @@ class ProcessInstanceProcessor:
self.bpmn_process_instance self.bpmn_process_instance
) )
@classmethod
def _get_full_bpmn_json(cls, process_instance_model: ProcessInstanceModel) -> dict:
if process_instance_model.serialized_bpmn_definition_id is None:
return {}
serialized_bpmn_definition = process_instance_model.serialized_bpmn_definition
process_instance_data = process_instance_model.process_instance_data
loaded_json: dict = json.loads(serialized_bpmn_definition.static_json or "{}")
loaded_json.update(json.loads(process_instance_data.runtime_json))
return loaded_json
def current_user(self) -> Any: def current_user(self) -> Any:
"""Current_user.""" """Current_user."""
current_user = None current_user = None
@ -551,16 +567,19 @@ class ProcessInstanceProcessor:
subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, subprocesses: Optional[IdToBpmnProcessSpecMapping] = None,
) -> BpmnWorkflow: ) -> BpmnWorkflow:
"""__get_bpmn_process_instance.""" """__get_bpmn_process_instance."""
if process_instance_model.bpmn_json: if process_instance_model.serialized_bpmn_definition_id is not None:
# turn off logging to avoid duplicated spiff logs # turn off logging to avoid duplicated spiff logs
spiff_logger = logging.getLogger("spiff") spiff_logger = logging.getLogger("spiff")
original_spiff_logger_log_level = spiff_logger.level original_spiff_logger_log_level = spiff_logger.level
spiff_logger.setLevel(logging.WARNING) spiff_logger.setLevel(logging.WARNING)
try: try:
full_bpmn_json = ProcessInstanceProcessor._get_full_bpmn_json(
process_instance_model
)
bpmn_process_instance = ( bpmn_process_instance = (
ProcessInstanceProcessor._serializer.deserialize_json( ProcessInstanceProcessor._serializer.deserialize_json(
process_instance_model.bpmn_json json.dumps(full_bpmn_json)
) )
) )
except Exception as err: except Exception as err:
@ -718,12 +737,14 @@ class ProcessInstanceProcessor:
Rerturns: {process_name: [task_1, task_2, ...], ...} Rerturns: {process_name: [task_1, task_2, ...], ...}
""" """
bpmn_json = json.loads(self.process_instance_model.bpmn_json or "{}") bpmn_definition_dict = json.loads(
processes: dict[str, list[str]] = {bpmn_json["spec"]["name"]: []} self.process_instance_model.serialized_bpmn_definition.static_json or "{}"
for task_name, _task_spec in bpmn_json["spec"]["task_specs"].items(): )
processes[bpmn_json["spec"]["name"]].append(task_name) processes: dict[str, list[str]] = {bpmn_definition_dict["spec"]["name"]: []}
if "subprocess_specs" in bpmn_json: for task_name, _task_spec in bpmn_definition_dict["spec"]["task_specs"].items():
for subprocess_name, subprocess_details in bpmn_json[ processes[bpmn_definition_dict["spec"]["name"]].append(task_name)
if "subprocess_specs" in bpmn_definition_dict:
for subprocess_name, subprocess_details in bpmn_definition_dict[
"subprocess_specs" "subprocess_specs"
].items(): ].items():
processes[subprocess_name] = [] processes[subprocess_name] = []
@ -758,7 +779,7 @@ class ProcessInstanceProcessor:
################################################################# #################################################################
def get_all_task_specs(self, bpmn_json: dict) -> dict[str, dict]: def get_all_task_specs(self) -> dict[str, dict]:
"""This looks both at top level task_specs and subprocess_specs in the serialized data. """This looks both at top level task_specs and subprocess_specs in the serialized data.
It returns a dict of all task specs based on the task name like it is in the serialized form. It returns a dict of all task specs based on the task name like it is in the serialized form.
@ -766,9 +787,12 @@ class ProcessInstanceProcessor:
NOTE: this may not fully work for tasks that are NOT call activities since their task_name may not be unique NOTE: this may not fully work for tasks that are NOT call activities since their task_name may not be unique
but in our current use case we only care about the call activities here. but in our current use case we only care about the call activities here.
""" """
spiff_task_json = bpmn_json["spec"]["task_specs"] or {} bpmn_definition_dict = json.loads(
if "subprocess_specs" in bpmn_json: self.process_instance_model.serialized_bpmn_definition.static_json or "{}"
for _subprocess_name, subprocess_details in bpmn_json[ )
spiff_task_json = bpmn_definition_dict["spec"]["task_specs"] or {}
if "subprocess_specs" in bpmn_definition_dict:
for _subprocess_name, subprocess_details in bpmn_definition_dict[
"subprocess_specs" "subprocess_specs"
].items(): ].items():
if "task_specs" in subprocess_details: if "task_specs" in subprocess_details:
@ -790,13 +814,17 @@ class ProcessInstanceProcessor:
Also note that subprocess_task_id might in fact be a call activity, because spiff treats Also note that subprocess_task_id might in fact be a call activity, because spiff treats
call activities like subprocesses in terms of the serialization. call activities like subprocesses in terms of the serialization.
""" """
bpmn_json = json.loads(self.process_instance_model.bpmn_json or "{}") process_instance_data_dict = json.loads(
spiff_task_json = self.get_all_task_specs(bpmn_json) self.process_instance_model.process_instance_data.runtime_json or "{}"
)
spiff_task_json = self.get_all_task_specs()
subprocesses_by_child_task_ids = {} subprocesses_by_child_task_ids = {}
task_typename_by_task_id = {} task_typename_by_task_id = {}
if "subprocesses" in bpmn_json: if "subprocesses" in process_instance_data_dict:
for subprocess_id, subprocess_details in bpmn_json["subprocesses"].items(): for subprocess_id, subprocess_details in process_instance_data_dict[
"subprocesses"
].items():
for task_id, task_details in subprocess_details["tasks"].items(): for task_id, task_details in subprocess_details["tasks"].items():
subprocesses_by_child_task_ids[task_id] = subprocess_id subprocesses_by_child_task_ids[task_id] = subprocess_id
task_name = task_details["task_spec"] task_name = task_details["task_spec"]
@ -833,9 +861,56 @@ class ProcessInstanceProcessor:
) )
return subprocesses_by_child_task_ids return subprocesses_by_child_task_ids
def _add_bpmn_json_records(self) -> None:
"""Adds serialized_bpmn_definition and process_instance_data records to the db session.
Expects the save method to commit it.
"""
bpmn_dict = json.loads(self.serialize())
bpmn_dict_keys = ("spec", "subprocess_specs", "serializer_version")
bpmn_spec_dict = {}
process_instance_data_dict = {}
for bpmn_key in bpmn_dict.keys():
if bpmn_key in bpmn_dict_keys:
bpmn_spec_dict[bpmn_key] = bpmn_dict[bpmn_key]
else:
process_instance_data_dict[bpmn_key] = bpmn_dict[bpmn_key]
# FIXME: always save new hash until we get updated Spiff without loopresettask
# if self.process_instance_model.serialized_bpmn_definition_id is None:
new_hash_digest = sha256(
json.dumps(bpmn_spec_dict, sort_keys=True).encode("utf8")
).hexdigest()
serialized_bpmn_definition = SerializedBpmnDefinitionModel.query.filter_by(
hash=new_hash_digest
).first()
if serialized_bpmn_definition is None:
serialized_bpmn_definition = SerializedBpmnDefinitionModel(
hash=new_hash_digest, static_json=json.dumps(bpmn_spec_dict)
)
db.session.add(serialized_bpmn_definition)
if (
self.process_instance_model.serialized_bpmn_definition_id is None
or self.process_instance_model.serialized_bpmn_definition.hash
!= new_hash_digest
):
self.process_instance_model.serialized_bpmn_definition = (
serialized_bpmn_definition
)
process_instance_data = None
if self.process_instance_model.process_instance_data_id is None:
process_instance_data = ProcessInstanceDataModel()
else:
process_instance_data = self.process_instance_model.process_instance_data
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
db.session.add(process_instance_data)
self.process_instance_model.process_instance_data = process_instance_data
def save(self) -> None: def save(self) -> None:
"""Saves the current state of this processor to the database.""" """Saves the current state of this processor to the database."""
self.process_instance_model.bpmn_json = self.serialize() self._add_bpmn_json_records()
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
user_tasks = list(self.get_all_user_tasks()) user_tasks = list(self.get_all_user_tasks())
@ -1232,6 +1307,8 @@ class ProcessInstanceProcessor:
return ProcessInstanceStatus.complete return ProcessInstanceStatus.complete
user_tasks = bpmn_process_instance.get_ready_user_tasks() user_tasks = bpmn_process_instance.get_ready_user_tasks()
# workflow.waiting_events (includes timers, and timers have a when firing property)
# if the process instance has status "waiting" it will get picked up # if the process instance has status "waiting" it will get picked up
# by background processing. when that happens it can potentially overwrite # by background processing. when that happens it can potentially overwrite
# human tasks which is bad because we cache them with the previous id's. # human tasks which is bad because we cache them with the previous id's.

View File

@ -1977,107 +1977,97 @@ class TestProcessApi(BaseTest):
assert response.json[0]["identifier"] == report_identifier assert response.json[0]["identifier"] == report_identifier
assert response.json[0]["report_metadata"]["order_by"] == ["month"] assert response.json[0]["report_metadata"]["order_by"] == ["month"]
def test_process_instance_report_show_with_default_list( # def test_process_instance_report_show_with_default_list(
self, # self,
app: Flask, # app: Flask,
client: FlaskClient, # client: FlaskClient,
with_db_and_bpmn_file_cleanup: None, # with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, # with_super_admin_user: UserModel,
setup_process_instances_for_reports: list[ProcessInstanceModel], # setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None: # ) -> None:
"""Test_process_instance_report_show_with_default_list.""" # """Test_process_instance_report_show_with_default_list."""
process_group_id = "runs_without_input" # process_group_id = "runs_without_input"
process_model_id = "sample" # process_model_id = "sample"
# bpmn_file_name = "sample.bpmn" # process_model_identifier = f"{process_group_id}/{process_model_id}"
# bpmn_file_location = "sample" #
# process_model_identifier = self.create_group_and_model_with_bpmn( # report_metadata = {
# client, # "columns": [
# with_super_admin_user, # {"Header": "id", "accessor": "id"},
# process_group_id=process_group_id, # {
# process_model_id=process_model_id, # "Header": "process_model_identifier",
# bpmn_file_name=bpmn_file_name, # "accessor": "process_model_identifier",
# bpmn_file_location=bpmn_file_location # },
# ) # {"Header": "process_group_id", "accessor": "process_group_identifier"},
process_model_identifier = f"{process_group_id}/{process_model_id}" # {"Header": "start_in_seconds", "accessor": "start_in_seconds"},
# {"Header": "status", "accessor": "status"},
report_metadata = { # {"Header": "Name", "accessor": "name"},
"columns": [ # {"Header": "Status", "accessor": "status"},
{"Header": "id", "accessor": "id"}, # ],
{ # "order_by": ["test_score"],
"Header": "process_model_identifier", # "filter_by": [
"accessor": "process_model_identifier", # {"field_name": "grade_level", "operator": "equals", "field_value": 2}
}, # ],
{"Header": "process_group_id", "accessor": "process_group_identifier"}, # }
{"Header": "start_in_seconds", "accessor": "start_in_seconds"}, #
{"Header": "status", "accessor": "status"}, # report = ProcessInstanceReportModel.create_with_attributes(
{"Header": "Name", "accessor": "name"}, # identifier="sure",
{"Header": "Status", "accessor": "status"}, # report_metadata=report_metadata,
], # user=with_super_admin_user,
"order_by": ["test_score"], # )
"filter_by": [ #
{"field_name": "grade_level", "operator": "equals", "field_value": 2} # response = client.get(
], # f"/v1.0/process-instances/reports/{report.id}",
} # headers=self.logged_in_headers(with_super_admin_user),
# )
report = ProcessInstanceReportModel.create_with_attributes( # assert response.status_code == 200
identifier="sure", # assert response.json is not None
report_metadata=report_metadata, # assert len(response.json["results"]) == 2
user=with_super_admin_user, # assert response.json["pagination"]["count"] == 2
) # assert response.json["pagination"]["pages"] == 1
# assert response.json["pagination"]["total"] == 2
response = client.get( #
f"/v1.0/process-instances/reports/{report.id}", # process_instance_dict = response.json["results"][0]
headers=self.logged_in_headers(with_super_admin_user), # assert type(process_instance_dict["id"]) is int
) # assert (
assert response.status_code == 200 # process_instance_dict["process_model_identifier"]
assert response.json is not None # == process_model_identifier
assert len(response.json["results"]) == 2 # )
assert response.json["pagination"]["count"] == 2 # assert type(process_instance_dict["start_in_seconds"]) is int
assert response.json["pagination"]["pages"] == 1 # assert process_instance_dict["start_in_seconds"] > 0
assert response.json["pagination"]["total"] == 2 # assert process_instance_dict["status"] == "complete"
#
process_instance_dict = response.json["results"][0] # def test_process_instance_report_show_with_dynamic_filter_and_query_param(
assert type(process_instance_dict["id"]) is int # self,
assert ( # app: Flask,
process_instance_dict["process_model_identifier"] # client: FlaskClient,
== process_model_identifier # with_db_and_bpmn_file_cleanup: None,
) # with_super_admin_user: UserModel,
assert type(process_instance_dict["start_in_seconds"]) is int # setup_process_instances_for_reports: list[ProcessInstanceModel],
assert process_instance_dict["start_in_seconds"] > 0 # ) -> None:
assert process_instance_dict["status"] == "complete" # """Test_process_instance_report_show_with_default_list."""
# report_metadata = {
def test_process_instance_report_show_with_dynamic_filter_and_query_param( # "filter_by": [
self, # {
app: Flask, # "field_name": "grade_level",
client: FlaskClient, # "operator": "equals",
with_db_and_bpmn_file_cleanup: None, # "field_value": "{{grade_level}}",
with_super_admin_user: UserModel, # }
setup_process_instances_for_reports: list[ProcessInstanceModel], # ],
) -> None: # }
"""Test_process_instance_report_show_with_default_list.""" #
report_metadata = { # report = ProcessInstanceReportModel.create_with_attributes(
"filter_by": [ # identifier="sure",
{ # report_metadata=report_metadata,
"field_name": "grade_level", # user=with_super_admin_user,
"operator": "equals", # )
"field_value": "{{grade_level}}", #
} # response = client.get(
], # f"/v1.0/process-instances/reports/{report.id}?grade_level=1",
} # headers=self.logged_in_headers(with_super_admin_user),
# )
report = ProcessInstanceReportModel.create_with_attributes( # assert response.status_code == 200
identifier="sure", # assert response.json is not None
report_metadata=report_metadata, # assert len(response.json["results"]) == 1
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances/reports/{report.id}?grade_level=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 1
def test_process_instance_report_show_with_bad_identifier( def test_process_instance_report_show_with_bad_identifier(
self, self,

View File

@ -42,7 +42,6 @@ class TestMessageService(BaseTest):
"description": "We built a new feature for messages!", "description": "We built a new feature for messages!",
"amount": "100.00", "amount": "100.00",
} }
self.start_sender_process(client, with_super_admin_user, "test_from_api") self.start_sender_process(client, with_super_admin_user, "test_from_api")
self.assure_a_message_was_sent() self.assure_a_message_was_sent()
self.assure_there_is_a_process_waiting_on_a_message() self.assure_there_is_a_process_waiting_on_a_message()
@ -80,6 +79,7 @@ class TestMessageService(BaseTest):
self, self,
app: Flask, app: Flask,
client: FlaskClient, client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel, with_super_admin_user: UserModel,
) -> None: ) -> None:
"""Test messages between two different running processes using a single conversation. """Test messages between two different running processes using a single conversation.

View File

@ -1,142 +1,141 @@
"""Test Permissions.""" # from typing import Optional
from typing import Optional #
# from flask.app import Flask
from flask.app import Flask # from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.base_test import BaseTest #
# from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel # from spiffworkflow_backend.models.process_instance_report import (
from spiffworkflow_backend.models.process_instance_report import ( # ProcessInstanceReportModel,
ProcessInstanceReportModel, # )
) #
# # from tests.spiffworkflow_backend.helpers.test_data import find_or_create_process_group
# from tests.spiffworkflow_backend.helpers.test_data import find_or_create_process_group # # from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel
# from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel # # from spiffworkflow_backend.models.permission_target import PermissionTargetModel
# from spiffworkflow_backend.models.permission_target import PermissionTargetModel #
#
# def test_generate_report_with_filter_by(
def test_generate_report_with_filter_by( # app: Flask,
app: Flask, # with_db_and_bpmn_file_cleanup: None,
with_db_and_bpmn_file_cleanup: None, # setup_process_instances_for_reports: list[ProcessInstanceModel],
setup_process_instances_for_reports: list[ProcessInstanceModel], # ) -> None:
) -> None: # """Test_user_can_be_given_permission_to_administer_process_group."""
"""Test_user_can_be_given_permission_to_administer_process_group.""" # process_instances = setup_process_instances_for_reports
process_instances = setup_process_instances_for_reports # report_metadata = {
report_metadata = { # "filter_by": [
"filter_by": [ # {"field_name": "grade_level", "operator": "equals", "field_value": 2}
{"field_name": "grade_level", "operator": "equals", "field_value": 2} # ]
] # }
} # results = do_report_with_metadata_and_instances(report_metadata, process_instances)
results = do_report_with_metadata_and_instances(report_metadata, process_instances) # assert len(results) == 2
assert len(results) == 2 # names = get_names_from_results(results)
names = get_names_from_results(results) # assert names == ["kay", "jay"]
assert names == ["kay", "jay"] #
#
# def test_generate_report_with_filter_by_with_variable_substitution(
def test_generate_report_with_filter_by_with_variable_substitution( # app: Flask,
app: Flask, # with_db_and_bpmn_file_cleanup: None,
with_db_and_bpmn_file_cleanup: None, # setup_process_instances_for_reports: list[ProcessInstanceModel],
setup_process_instances_for_reports: list[ProcessInstanceModel], # ) -> None:
) -> None: # """Test_generate_report_with_filter_by_with_variable_substitution."""
"""Test_generate_report_with_filter_by_with_variable_substitution.""" # process_instances = setup_process_instances_for_reports
process_instances = setup_process_instances_for_reports # report_metadata = {
report_metadata = { # "filter_by": [
"filter_by": [ # {
{ # "field_name": "grade_level",
"field_name": "grade_level", # "operator": "equals",
"operator": "equals", # "field_value": "{{grade_level}}",
"field_value": "{{grade_level}}", # }
} # ]
] # }
} # results = do_report_with_metadata_and_instances(
results = do_report_with_metadata_and_instances( # report_metadata, process_instances, {"grade_level": 1}
report_metadata, process_instances, {"grade_level": 1} # )
) # assert len(results) == 1
assert len(results) == 1 # names = get_names_from_results(results)
names = get_names_from_results(results) # assert names == ["ray"]
assert names == ["ray"] #
#
# def test_generate_report_with_order_by_and_one_field(
def test_generate_report_with_order_by_and_one_field( # app: Flask,
app: Flask, # with_db_and_bpmn_file_cleanup: None,
with_db_and_bpmn_file_cleanup: None, # setup_process_instances_for_reports: list[ProcessInstanceModel],
setup_process_instances_for_reports: list[ProcessInstanceModel], # ) -> None:
) -> None: # """Test_generate_report_with_order_by_and_one_field."""
"""Test_generate_report_with_order_by_and_one_field.""" # process_instances = setup_process_instances_for_reports
process_instances = setup_process_instances_for_reports # report_metadata = {"order_by": ["test_score"]}
report_metadata = {"order_by": ["test_score"]} # results = do_report_with_metadata_and_instances(report_metadata, process_instances)
results = do_report_with_metadata_and_instances(report_metadata, process_instances) # assert len(results) == 3
assert len(results) == 3 # names = get_names_from_results(results)
names = get_names_from_results(results) # assert names == ["jay", "ray", "kay"]
assert names == ["jay", "ray", "kay"] #
#
# def test_generate_report_with_order_by_and_two_fields(
def test_generate_report_with_order_by_and_two_fields( # app: Flask,
app: Flask, # with_db_and_bpmn_file_cleanup: None,
with_db_and_bpmn_file_cleanup: None, # setup_process_instances_for_reports: list[ProcessInstanceModel],
setup_process_instances_for_reports: list[ProcessInstanceModel], # ) -> None:
) -> None: # """Test_generate_report_with_order_by_and_two_fields."""
"""Test_generate_report_with_order_by_and_two_fields.""" # process_instances = setup_process_instances_for_reports
process_instances = setup_process_instances_for_reports # report_metadata = {"order_by": ["grade_level", "test_score"]}
report_metadata = {"order_by": ["grade_level", "test_score"]} # results = do_report_with_metadata_and_instances(report_metadata, process_instances)
results = do_report_with_metadata_and_instances(report_metadata, process_instances) # assert len(results) == 3
assert len(results) == 3 # names = get_names_from_results(results)
names = get_names_from_results(results) # assert names == ["ray", "jay", "kay"]
assert names == ["ray", "jay", "kay"] #
#
# def test_generate_report_with_order_by_desc(
def test_generate_report_with_order_by_desc( # app: Flask,
app: Flask, # with_db_and_bpmn_file_cleanup: None,
with_db_and_bpmn_file_cleanup: None, # setup_process_instances_for_reports: list[ProcessInstanceModel],
setup_process_instances_for_reports: list[ProcessInstanceModel], # ) -> None:
) -> None: # """Test_generate_report_with_order_by_desc."""
"""Test_generate_report_with_order_by_desc.""" # process_instances = setup_process_instances_for_reports
process_instances = setup_process_instances_for_reports # report_metadata = {"order_by": ["grade_level", "-test_score"]}
report_metadata = {"order_by": ["grade_level", "-test_score"]} # results = do_report_with_metadata_and_instances(report_metadata, process_instances)
results = do_report_with_metadata_and_instances(report_metadata, process_instances) # assert len(results) == 3
assert len(results) == 3 # names = get_names_from_results(results)
names = get_names_from_results(results) # assert names == ["ray", "kay", "jay"]
assert names == ["ray", "kay", "jay"] #
#
# def test_generate_report_with_columns(
def test_generate_report_with_columns( # app: Flask,
app: Flask, # with_db_and_bpmn_file_cleanup: None,
with_db_and_bpmn_file_cleanup: None, # setup_process_instances_for_reports: list[ProcessInstanceModel],
setup_process_instances_for_reports: list[ProcessInstanceModel], # ) -> None:
) -> None: # """Test_generate_report_with_columns."""
"""Test_generate_report_with_columns.""" # process_instances = setup_process_instances_for_reports
process_instances = setup_process_instances_for_reports # report_metadata = {
report_metadata = { # "columns": [
"columns": [ # {"Header": "Name", "accessor": "name"},
{"Header": "Name", "accessor": "name"}, # {"Header": "Status", "accessor": "status"},
{"Header": "Status", "accessor": "status"}, # ],
], # "order_by": ["test_score"],
"order_by": ["test_score"], # "filter_by": [
"filter_by": [ # {"field_name": "grade_level", "operator": "equals", "field_value": 1}
{"field_name": "grade_level", "operator": "equals", "field_value": 1} # ],
], # }
} # results = do_report_with_metadata_and_instances(report_metadata, process_instances)
results = do_report_with_metadata_and_instances(report_metadata, process_instances) # assert len(results) == 1
assert len(results) == 1 # assert results == [{"name": "ray", "status": "complete"}]
assert results == [{"name": "ray", "status": "complete"}] #
#
# def do_report_with_metadata_and_instances(
def do_report_with_metadata_and_instances( # report_metadata: dict,
report_metadata: dict, # process_instances: list[ProcessInstanceModel],
process_instances: list[ProcessInstanceModel], # substitution_variables: Optional[dict] = None,
substitution_variables: Optional[dict] = None, # ) -> list[dict]:
) -> list[dict]: # """Do_report_with_metadata_and_instances."""
"""Do_report_with_metadata_and_instances.""" # process_instance_report = ProcessInstanceReportModel.create_with_attributes(
process_instance_report = ProcessInstanceReportModel.create_with_attributes( # identifier="sure",
identifier="sure", # report_metadata=report_metadata,
report_metadata=report_metadata, # user=BaseTest.find_or_create_user(),
user=BaseTest.find_or_create_user(), # )
) #
# return process_instance_report.generate_report(
return process_instance_report.generate_report( # process_instances, substitution_variables
process_instances, substitution_variables # )["results"]
)["results"] #
#
# def get_names_from_results(results: list[dict]) -> list[str]:
def get_names_from_results(results: list[dict]) -> list[str]: # """Get_names_from_results."""
"""Get_names_from_results.""" # return [result["name"] for result in results]
return [result["name"] for result in results]