Improvement/better serialization (#540)
* using new spiffworkflow locally and the db can be recreated w/ burnettk * tests are passing w/ burnettk * added version 3 data migration for typenames on tasks and bpmn processes w/ burnettk * pyl w/ burnettk * switch SpiffWorkflow back to main --------- Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
parent
493b2696ff
commit
4d842e8dbf
|
@ -41,7 +41,7 @@ def put_serializer_version_onto_numeric_track() -> None:
|
||||||
|
|
||||||
def all_potentially_relevant_process_instances() -> list[ProcessInstanceModel]:
|
def all_potentially_relevant_process_instances() -> list[ProcessInstanceModel]:
|
||||||
return ProcessInstanceModel.query.filter(
|
return ProcessInstanceModel.query.filter(
|
||||||
ProcessInstanceModel.spiff_serializer_version < Version2.VERSION,
|
ProcessInstanceModel.spiff_serializer_version < Version2.version(),
|
||||||
ProcessInstanceModel.status.in_(ProcessInstanceModel.non_terminal_statuses()),
|
ProcessInstanceModel.status.in_(ProcessInstanceModel.non_terminal_statuses()),
|
||||||
).all()
|
).all()
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alembic"
|
name = "alembic"
|
||||||
|
@ -930,6 +930,7 @@ files = [
|
||||||
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
|
{file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"},
|
||||||
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
|
{file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"},
|
||||||
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
|
{file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"},
|
||||||
|
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
|
{file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
|
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
|
{file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"},
|
||||||
|
@ -938,6 +939,7 @@ files = [
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
|
{file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"},
|
||||||
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
|
{file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
|
{file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"},
|
||||||
|
{file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"},
|
||||||
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
|
{file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"},
|
||||||
|
@ -967,6 +969,7 @@ files = [
|
||||||
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
|
{file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"},
|
||||||
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
|
{file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
|
{file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"},
|
||||||
|
{file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
|
{file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"},
|
||||||
|
@ -975,6 +978,7 @@ files = [
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
|
{file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
|
{file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"},
|
||||||
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
|
{file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"},
|
||||||
|
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
|
{file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
|
{file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"},
|
||||||
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
|
{file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"},
|
||||||
|
@ -2366,7 +2370,7 @@ lxml = "*"
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "92a7fdc7c9f4afb232f95c0e2d9008049949b92d"
|
resolved_reference = "c3a49431ecbe50fd44c89ad07aa72a01fff79b42"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlalchemy"
|
name = "sqlalchemy"
|
||||||
|
@ -2419,7 +2423,7 @@ files = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""}
|
greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""}
|
||||||
typing-extensions = ">=4.2.0"
|
typing-extensions = ">=4.2.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import abc
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
|
||||||
|
|
||||||
|
class DataMigrationBase(metaclass=abc.ABCMeta):
|
||||||
|
"""Abstract class to describe what is required for data migration."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __subclasshook__(cls, subclass: Any) -> bool:
|
||||||
|
return (
|
||||||
|
hasattr(subclass, "run")
|
||||||
|
and callable(subclass.run)
|
||||||
|
and hasattr(subclass, "version")
|
||||||
|
and callable(subclass.version)
|
||||||
|
and NotImplemented
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abc.abstractmethod
|
||||||
|
def version(cls) -> str:
|
||||||
|
"""Returns the version number for the migration.
|
||||||
|
|
||||||
|
NOTE: These versions should be string forms of integers.
|
||||||
|
This is because eventually we will store them as integers on the process instance serializer version column.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError("method must be implemented on subclass: version")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abc.abstractmethod
|
||||||
|
def run(cls, process_instance: ProcessInstanceModel) -> None:
|
||||||
|
raise NotImplementedError("method must be implemented on subclass: run")
|
|
@ -2,7 +2,9 @@ import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
from spiffworkflow_backend.data_migrations.data_migration_base import DataMigrationBase
|
||||||
from spiffworkflow_backend.data_migrations.version_2 import Version2
|
from spiffworkflow_backend.data_migrations.version_2 import Version2
|
||||||
|
from spiffworkflow_backend.data_migrations.version_3 import Version3
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
|
||||||
|
@ -18,23 +20,15 @@ def benchmark_log_func(func: Any) -> Any:
|
||||||
t1 = time.time()
|
t1 = time.time()
|
||||||
r = func(*args, **kwargs)
|
r = func(*args, **kwargs)
|
||||||
t2 = time.time()
|
t2 = time.time()
|
||||||
|
class_name = args[1].__name__ # type: ignore
|
||||||
# __qualname__, i know you use it every day. but if not, it's the function name prefixed with any qualifying class names
|
# __qualname__, i know you use it every day. but if not, it's the function name prefixed with any qualifying class names
|
||||||
current_app.logger.debug(f"Function={func.__qualname__}, Time={t2 - t1}")
|
current_app.logger.debug(f"Function={func.__qualname__}({class_name}), Time={t2 - t1}")
|
||||||
return r
|
return r
|
||||||
|
|
||||||
return st_func
|
return st_func
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceMigrator:
|
class ProcessInstanceMigrator:
|
||||||
@classmethod
|
|
||||||
@benchmark_log_func
|
|
||||||
def run_version_2(cls, process_instance: ProcessInstanceModel) -> None:
|
|
||||||
if process_instance.spiff_serializer_version < Version2.VERSION:
|
|
||||||
Version2.run(process_instance)
|
|
||||||
process_instance.spiff_serializer_version = Version2.VERSION
|
|
||||||
db.session.add(process_instance)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def run(cls, process_instance: ProcessInstanceModel) -> None:
|
def run(cls, process_instance: ProcessInstanceModel) -> None:
|
||||||
"""This updates the serialization of an instance to the current expected state.
|
"""This updates the serialization of an instance to the current expected state.
|
||||||
|
@ -50,4 +44,21 @@ class ProcessInstanceMigrator:
|
||||||
if process_instance.spiff_serializer_version is None:
|
if process_instance.spiff_serializer_version is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
cls.run_version_2(process_instance)
|
# we need to run version3 first to get the typenames in place otherwise version2 fails
|
||||||
|
# to properly create a bpmn_process_instance when calling from_dict on the assembled dictionary
|
||||||
|
if process_instance.spiff_serializer_version < Version2.version():
|
||||||
|
cls.run_version(Version3, process_instance)
|
||||||
|
cls.run_version(Version2, process_instance)
|
||||||
|
else:
|
||||||
|
cls.run_version(Version3, process_instance)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@benchmark_log_func
|
||||||
|
def run_version(
|
||||||
|
cls, data_migration_version_class: DataMigrationBase, process_instance: ProcessInstanceModel
|
||||||
|
) -> None:
|
||||||
|
if process_instance.spiff_serializer_version < data_migration_version_class.version():
|
||||||
|
data_migration_version_class.run(process_instance)
|
||||||
|
process_instance.spiff_serializer_version = data_migration_version_class.version()
|
||||||
|
db.session.add(process_instance)
|
||||||
|
db.session.commit()
|
||||||
|
|
|
@ -2,13 +2,16 @@ import time
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
|
from spiffworkflow_backend.data_migrations.data_migration_base import DataMigrationBase
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
|
from spiffworkflow_backend.services.process_instance_processor import ProcessInstanceProcessor
|
||||||
from spiffworkflow_backend.services.task_service import TaskService
|
from spiffworkflow_backend.services.task_service import TaskService
|
||||||
|
|
||||||
|
|
||||||
class Version2:
|
class Version2(DataMigrationBase):
|
||||||
VERSION = "2"
|
@classmethod
|
||||||
|
def version(cls) -> str:
|
||||||
|
return "2"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def run(cls, process_instance: ProcessInstanceModel) -> None:
|
def run(cls, process_instance: ProcessInstanceModel) -> None:
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
import copy
|
||||||
|
|
||||||
|
from flask import current_app
|
||||||
|
from spiffworkflow_backend.data_migrations.data_migration_base import DataMigrationBase
|
||||||
|
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||||
|
from spiffworkflow_backend.models.db import db
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
from spiffworkflow_backend.models.task import TaskModel
|
||||||
|
|
||||||
|
|
||||||
|
class Version3(DataMigrationBase):
|
||||||
|
@classmethod
|
||||||
|
def version(cls) -> str:
|
||||||
|
return "3"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def run(cls, process_instance: ProcessInstanceModel) -> None:
|
||||||
|
try:
|
||||||
|
tasks = TaskModel.query.filter_by(process_instance_id=process_instance.id)
|
||||||
|
bpmn_process_ids = []
|
||||||
|
for task_model in tasks:
|
||||||
|
new_properties_json = copy.copy(task_model.properties_json)
|
||||||
|
if "typename" not in new_properties_json or new_properties_json["typename"] != "task":
|
||||||
|
new_properties_json["typename"] = "task"
|
||||||
|
task_model.properties_json = new_properties_json
|
||||||
|
db.session.add(task_model)
|
||||||
|
bpmn_process_ids.append(task_model.bpmn_process_id)
|
||||||
|
|
||||||
|
bpmn_processes = BpmnProcessModel.query.filter(BpmnProcessModel.id.in_(bpmn_process_ids)) # type: ignore
|
||||||
|
for bpmn_process in bpmn_processes:
|
||||||
|
new_properties_json = copy.copy(bpmn_process.properties_json)
|
||||||
|
typename = "BpmnWorkflow"
|
||||||
|
if bpmn_process.direct_parent_process_id is not None:
|
||||||
|
typename = "BpmnSubWorkflow"
|
||||||
|
if "typename" not in new_properties_json or new_properties_json["typename"] != typename:
|
||||||
|
new_properties_json["typename"] = typename
|
||||||
|
bpmn_process.properties_json = new_properties_json
|
||||||
|
db.session.add(bpmn_process)
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
current_app.logger.warning(
|
||||||
|
f"Failed to migrate process_instance '{process_instance.id}'. The error was {str(ex)}"
|
||||||
|
)
|
|
@ -1,7 +1,7 @@
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from SpiffWorkflow.bpmn.serializer.helpers.spec import BpmnSpecConverter # type: ignore
|
from SpiffWorkflow.bpmn.serializer.helpers.registry import BpmnConverter # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # type: ignore
|
from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
@ -67,22 +67,14 @@ class JSONDataStore(BpmnDataStoreSpecification): # type: ignore
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
del my_task.data[self.bpmn_id]
|
del my_task.data[self.bpmn_id]
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def register_converter(spec_config: dict[str, Any]) -> None:
|
|
||||||
spec_config["task_specs"].append(JSONDataStoreConverter)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
|
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
|
||||||
data_store_classes["JSONDataStore"] = JSONDataStore
|
data_store_classes["JSONDataStore"] = JSONDataStore
|
||||||
|
|
||||||
|
|
||||||
class JSONDataStoreConverter(BpmnSpecConverter): # type: ignore
|
class JSONDataStoreConverter(BpmnConverter): # type: ignore
|
||||||
"""JSONDataStoreConverter."""
|
"""JSONDataStoreConverter."""
|
||||||
|
|
||||||
def __init__(self, registry): # type: ignore
|
|
||||||
"""__init__."""
|
|
||||||
super().__init__(JSONDataStore, registry)
|
|
||||||
|
|
||||||
def to_dict(self, spec: Any) -> dict[str, Any]:
|
def to_dict(self, spec: Any) -> dict[str, Any]:
|
||||||
"""to_dict."""
|
"""to_dict."""
|
||||||
return {
|
return {
|
||||||
|
@ -119,22 +111,14 @@ class JSONFileDataStore(BpmnDataStoreSpecification): # type: ignore
|
||||||
FileSystemService.write_to_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id), data)
|
FileSystemService.write_to_json_file_at_relative_path(location, _data_store_filename(self.bpmn_id), data)
|
||||||
del my_task.data[self.bpmn_id]
|
del my_task.data[self.bpmn_id]
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def register_converter(spec_config: dict[str, Any]) -> None:
|
|
||||||
spec_config["task_specs"].append(JSONFileDataStoreConverter)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
|
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
|
||||||
data_store_classes["JSONFileDataStore"] = JSONFileDataStore
|
data_store_classes["JSONFileDataStore"] = JSONFileDataStore
|
||||||
|
|
||||||
|
|
||||||
class JSONFileDataStoreConverter(BpmnSpecConverter): # type: ignore
|
class JSONFileDataStoreConverter(BpmnConverter): # type: ignore
|
||||||
"""JSONFileDataStoreConverter."""
|
"""JSONFileDataStoreConverter."""
|
||||||
|
|
||||||
def __init__(self, registry): # type: ignore
|
|
||||||
"""__init__."""
|
|
||||||
super().__init__(JSONFileDataStore, registry)
|
|
||||||
|
|
||||||
def to_dict(self, spec: Any) -> dict[str, Any]:
|
def to_dict(self, spec: Any) -> dict[str, Any]:
|
||||||
"""to_dict."""
|
"""to_dict."""
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from time import time
|
from time import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.serializer.helpers.spec import BpmnSpecConverter # type: ignore
|
from SpiffWorkflow.bpmn.serializer.helpers.registry import BpmnConverter # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # type: ignore
|
from SpiffWorkflow.bpmn.specs.data_spec import BpmnDataStoreSpecification # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
|
@ -35,22 +35,14 @@ class TypeaheadDataStore(BpmnDataStoreSpecification): # type: ignore
|
||||||
updated_at_in_seconds=now,
|
updated_at_in_seconds=now,
|
||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def register_converter(spec_config: dict[str, Any]) -> None:
|
|
||||||
spec_config["task_specs"].append(TypeaheadDataStoreConverter)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
|
def register_data_store_class(data_store_classes: dict[str, Any]) -> None:
|
||||||
data_store_classes["TypeaheadDataStore"] = TypeaheadDataStore
|
data_store_classes["TypeaheadDataStore"] = TypeaheadDataStore
|
||||||
|
|
||||||
|
|
||||||
class TypeaheadDataStoreConverter(BpmnSpecConverter): # type: ignore
|
class TypeaheadDataStoreConverter(BpmnConverter): # type: ignore
|
||||||
"""TypeaheadDataStoreConverter."""
|
"""TypeaheadDataStoreConverter."""
|
||||||
|
|
||||||
def __init__(self, registry): # type: ignore
|
|
||||||
"""__init__."""
|
|
||||||
super().__init__(TypeaheadDataStore, registry)
|
|
||||||
|
|
||||||
def to_dict(self, spec: Any) -> dict[str, Any]:
|
def to_dict(self, spec: Any) -> dict[str, Any]:
|
||||||
"""to_dict."""
|
"""to_dict."""
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -15,6 +15,6 @@ class GetCurrentTaskInfo(Script):
|
||||||
return """Returns the information about the current task."""
|
return """Returns the information about the current task."""
|
||||||
|
|
||||||
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
|
def run(self, script_attributes_context: ScriptAttributesContext, *_args: Any, **kwargs: Any) -> Any:
|
||||||
task_dict = ProcessInstanceProcessor._serializer.task_to_dict(script_attributes_context.task)
|
task_dict = ProcessInstanceProcessor._serializer.to_dict(script_attributes_context.task)
|
||||||
task_dict.pop("data")
|
task_dict.pop("data")
|
||||||
return task_dict
|
return task_dict
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# TODO: clean up this service for a clear distinction between it and the process_instance_service
|
# TODO: clean up this service for a clear distinction between it and the process_instance_service
|
||||||
# where this points to the pi service
|
# where this points to the pi service
|
||||||
|
import copy
|
||||||
import decimal
|
import decimal
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
@ -31,22 +32,25 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ig
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore
|
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
|
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment
|
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment
|
||||||
|
from SpiffWorkflow.bpmn.serializer.default.task_spec import EventConverter # type: ignore
|
||||||
from SpiffWorkflow.bpmn.serializer.helpers.registry import DefaultRegistry # type: ignore
|
from SpiffWorkflow.bpmn.serializer.helpers.registry import DefaultRegistry # type: ignore
|
||||||
from SpiffWorkflow.bpmn.serializer.task_spec import EventBasedGatewayConverter # type: ignore
|
|
||||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec # type: ignore
|
from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec # type: ignore
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||||
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore
|
from SpiffWorkflow.spiff.serializer.config import SPIFF_CONFIG # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||||
from SpiffWorkflow.util.task import TaskIterator # type: ignore
|
from SpiffWorkflow.util.task import TaskIterator # type: ignore
|
||||||
from SpiffWorkflow.util.task import TaskState
|
from SpiffWorkflow.util.task import TaskState
|
||||||
from spiffworkflow_backend.data_stores.json import JSONDataStore
|
from spiffworkflow_backend.data_stores.json import JSONDataStore
|
||||||
|
from spiffworkflow_backend.data_stores.json import JSONDataStoreConverter
|
||||||
from spiffworkflow_backend.data_stores.json import JSONFileDataStore
|
from spiffworkflow_backend.data_stores.json import JSONFileDataStore
|
||||||
|
from spiffworkflow_backend.data_stores.json import JSONFileDataStoreConverter
|
||||||
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
|
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStore
|
||||||
|
from spiffworkflow_backend.data_stores.typeahead import TypeaheadDataStoreConverter
|
||||||
from spiffworkflow_backend.exceptions.api_error import ApiError
|
from spiffworkflow_backend.exceptions.api_error import ApiError
|
||||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||||
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
||||||
|
@ -92,10 +96,10 @@ from spiffworkflow_backend.services.workflow_execution_service import execution_
|
||||||
from spiffworkflow_backend.specs.start_event import StartEvent
|
from spiffworkflow_backend.specs.start_event import StartEvent
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
|
||||||
StartEvent.register_converter(SPIFF_SPEC_CONFIG)
|
SPIFF_CONFIG[StartEvent] = EventConverter
|
||||||
JSONDataStore.register_converter(SPIFF_SPEC_CONFIG)
|
SPIFF_CONFIG[JSONDataStore] = JSONDataStoreConverter
|
||||||
JSONFileDataStore.register_converter(SPIFF_SPEC_CONFIG)
|
SPIFF_CONFIG[JSONFileDataStore] = JSONFileDataStoreConverter
|
||||||
TypeaheadDataStore.register_converter(SPIFF_SPEC_CONFIG)
|
SPIFF_CONFIG[TypeaheadDataStore] = TypeaheadDataStoreConverter
|
||||||
|
|
||||||
# Sorry about all this crap. I wanted to move this thing to another file, but
|
# Sorry about all this crap. I wanted to move this thing to another file, but
|
||||||
# importing a bunch of types causes circular imports.
|
# importing a bunch of types causes circular imports.
|
||||||
|
@ -387,11 +391,10 @@ IdToBpmnProcessSpecMapping = NewType("IdToBpmnProcessSpecMapping", dict[str, Bpm
|
||||||
|
|
||||||
class ProcessInstanceProcessor:
|
class ProcessInstanceProcessor:
|
||||||
_default_script_engine = CustomBpmnScriptEngine()
|
_default_script_engine = CustomBpmnScriptEngine()
|
||||||
SERIALIZER_VERSION = "2"
|
SERIALIZER_VERSION = "3"
|
||||||
|
|
||||||
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG)
|
wf_spec_converter = BpmnWorkflowSerializer.configure(SPIFF_CONFIG)
|
||||||
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION)
|
||||||
_event_serializer = EventBasedGatewayConverter(wf_spec_converter)
|
|
||||||
|
|
||||||
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
PROCESS_INSTANCE_ID_KEY = "process_instance_id"
|
||||||
VALIDATION_PROCESS_KEY = "validate_only"
|
VALIDATION_PROCESS_KEY = "validate_only"
|
||||||
|
@ -779,7 +782,9 @@ class ProcessInstanceProcessor:
|
||||||
process_instance_model,
|
process_instance_model,
|
||||||
bpmn_definition_to_task_definitions_mappings,
|
bpmn_definition_to_task_definitions_mappings,
|
||||||
)
|
)
|
||||||
bpmn_process_instance = ProcessInstanceProcessor._serializer.workflow_from_dict(full_bpmn_process_dict)
|
# FIXME: the from_dict entrypoint in spiff will one day do this copy instead
|
||||||
|
process_copy = copy.deepcopy(full_bpmn_process_dict)
|
||||||
|
bpmn_process_instance = ProcessInstanceProcessor._serializer.from_dict(process_copy)
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
raise err
|
raise err
|
||||||
finally:
|
finally:
|
||||||
|
@ -1116,16 +1121,17 @@ class ProcessInstanceProcessor:
|
||||||
db.session.add(at)
|
db.session.add(at)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
def serialize_task_spec(self, task_spec: SpiffTask) -> Any:
|
def serialize_task_spec(self, task_spec: SpiffTask) -> dict:
|
||||||
"""Get a serialized version of a task spec."""
|
"""Get a serialized version of a task spec."""
|
||||||
# The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask
|
# The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask
|
||||||
# Not sure why mypy accepts this but whatever.
|
# Not sure why mypy accepts this but whatever.
|
||||||
return self._serializer.spec_converter.convert(task_spec)
|
result: dict = self._serializer.to_dict(task_spec)
|
||||||
|
return result
|
||||||
|
|
||||||
def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
|
def send_bpmn_event(self, event_data: dict[str, Any]) -> None:
|
||||||
"""Send an event to the workflow."""
|
"""Send an event to the workflow."""
|
||||||
payload = event_data.pop("payload", None)
|
payload = event_data.pop("payload", None)
|
||||||
event_definition = self._event_serializer.registry.restore(event_data)
|
event_definition = self._serializer.from_dict(event_data)
|
||||||
bpmn_event = BpmnEvent(
|
bpmn_event = BpmnEvent(
|
||||||
event_definition=event_definition,
|
event_definition=event_definition,
|
||||||
payload=payload,
|
payload=payload,
|
||||||
|
@ -1519,7 +1525,7 @@ class ProcessInstanceProcessor:
|
||||||
def serialize(self) -> dict:
|
def serialize(self) -> dict:
|
||||||
self.check_task_data_size()
|
self.check_task_data_size()
|
||||||
self.preserve_script_engine_state()
|
self.preserve_script_engine_state()
|
||||||
return self._serializer.workflow_to_dict(self.bpmn_process_instance) # type: ignore
|
return self._serializer.to_dict(self.bpmn_process_instance) # type: ignore
|
||||||
|
|
||||||
def next_user_tasks(self) -> list[SpiffTask]:
|
def next_user_tasks(self) -> list[SpiffTask]:
|
||||||
return self.bpmn_process_instance.get_tasks(state=TaskState.READY, manual=True) # type: ignore
|
return self.bpmn_process_instance.get_tasks(state=TaskState.READY, manual=True) # type: ignore
|
||||||
|
|
|
@ -5,8 +5,8 @@ from hashlib import sha256
|
||||||
from typing import TypedDict
|
from typing import TypedDict
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflow # type: ignore
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.util.task import TaskState # type: ignore
|
from SpiffWorkflow.util.task import TaskState # type: ignore
|
||||||
|
@ -261,7 +261,7 @@ class TaskService:
|
||||||
It also returns the relating json_data object so they can be imported later.
|
It also returns the relating json_data object so they can be imported later.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
new_properties_json = self.serializer.task_to_dict(spiff_task)
|
new_properties_json = self.serializer.to_dict(spiff_task)
|
||||||
|
|
||||||
if new_properties_json["task_spec"] == "Start":
|
if new_properties_json["task_spec"] == "Start":
|
||||||
new_properties_json["parent"] = None
|
new_properties_json["parent"] = None
|
||||||
|
@ -318,7 +318,7 @@ class TaskService:
|
||||||
if self.process_instance.bpmn_process_id is None:
|
if self.process_instance.bpmn_process_id is None:
|
||||||
spiff_workflow = spiff_task.workflow.top_workflow
|
spiff_workflow = spiff_task.workflow.top_workflow
|
||||||
bpmn_process = self.add_bpmn_process(
|
bpmn_process = self.add_bpmn_process(
|
||||||
bpmn_process_dict=self.serializer.workflow_to_dict(spiff_workflow),
|
bpmn_process_dict=self.serializer.to_dict(spiff_workflow),
|
||||||
spiff_workflow=spiff_workflow,
|
spiff_workflow=spiff_workflow,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
@ -326,7 +326,7 @@ class TaskService:
|
||||||
if bpmn_process is None:
|
if bpmn_process is None:
|
||||||
spiff_workflow = spiff_task.workflow
|
spiff_workflow = spiff_task.workflow
|
||||||
bpmn_process = self.add_bpmn_process(
|
bpmn_process = self.add_bpmn_process(
|
||||||
bpmn_process_dict=self.serializer.subworkflow_to_dict(subprocess),
|
bpmn_process_dict=self.serializer.to_dict(subprocess),
|
||||||
top_level_process=self.process_instance.bpmn_process,
|
top_level_process=self.process_instance.bpmn_process,
|
||||||
bpmn_process_guid=subprocess_guid,
|
bpmn_process_guid=subprocess_guid,
|
||||||
spiff_workflow=spiff_workflow,
|
spiff_workflow=spiff_workflow,
|
||||||
|
@ -745,5 +745,5 @@ class TaskService:
|
||||||
) -> dict:
|
) -> dict:
|
||||||
user_defined_state = spiff_task.workflow.script_engine.environment.user_defined_state()
|
user_defined_state = spiff_task.workflow.script_engine.environment.user_defined_state()
|
||||||
# this helps to convert items like datetime objects to be json serializable
|
# this helps to convert items like datetime objects to be json serializable
|
||||||
converted_data: dict = serializer.data_converter.convert(user_defined_state)
|
converted_data: dict = serializer.registry.convert(user_defined_state)
|
||||||
return converted_data
|
return converted_data
|
||||||
|
|
|
@ -3,8 +3,6 @@ from datetime import timedelta
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.parser.util import full_tag # type: ignore
|
from SpiffWorkflow.bpmn.parser.util import full_tag # type: ignore
|
||||||
from SpiffWorkflow.bpmn.serializer.task_spec import EventConverter # type: ignore
|
|
||||||
from SpiffWorkflow.bpmn.serializer.task_spec import StartEventConverter as DefaultStartEventConverter
|
|
||||||
from SpiffWorkflow.bpmn.specs.defaults import StartEvent as DefaultStartEvent # type: ignore
|
from SpiffWorkflow.bpmn.specs.defaults import StartEvent as DefaultStartEvent # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.event_definitions.simple import NoneEventDefinition # type: ignore
|
from SpiffWorkflow.bpmn.specs.event_definitions.simple import NoneEventDefinition # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.event_definitions.timer import CycleTimerEventDefinition # type: ignore
|
from SpiffWorkflow.bpmn.specs.event_definitions.timer import CycleTimerEventDefinition # type: ignore
|
||||||
|
@ -28,11 +26,6 @@ class StartEvent(DefaultStartEvent): # type: ignore
|
||||||
super().__init__(wf_spec, bpmn_id, event_definition, **kwargs)
|
super().__init__(wf_spec, bpmn_id, event_definition, **kwargs)
|
||||||
self.timer_definition = None
|
self.timer_definition = None
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def register_converter(spec_config: dict[str, Any]) -> None:
|
|
||||||
spec_config["task_specs"].remove(DefaultStartEventConverter)
|
|
||||||
spec_config["task_specs"].append(StartEventConverter)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def register_parser_class(parser_config: dict[str, Any]) -> None:
|
def register_parser_class(parser_config: dict[str, Any]) -> None:
|
||||||
parser_config[full_tag("startEvent")] = (SpiffStartEventParser, StartEvent)
|
parser_config[full_tag("startEvent")] = (SpiffStartEventParser, StartEvent)
|
||||||
|
@ -66,8 +59,3 @@ class StartEvent(DefaultStartEvent): # type: ignore
|
||||||
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
|
if isinstance(self.timer_definition, TimerEventDefinition) and script_engine is not None:
|
||||||
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
|
evaluated_expression = script_engine.evaluate(my_task, self.timer_definition.expression)
|
||||||
return evaluated_expression
|
return evaluated_expression
|
||||||
|
|
||||||
|
|
||||||
class StartEventConverter(EventConverter): # type: ignore
|
|
||||||
def __init__(self, registry): # type: ignore
|
|
||||||
super().__init__(StartEvent, registry)
|
|
||||||
|
|
|
@ -1308,6 +1308,7 @@ class TestProcessApi(BaseTest):
|
||||||
headers=self.logged_in_headers(with_super_admin_user),
|
headers=self.logged_in_headers(with_super_admin_user),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
assert response.json is not None
|
assert response.json is not None
|
||||||
assert isinstance(response.json["updated_at_in_seconds"], int)
|
assert isinstance(response.json["updated_at_in_seconds"], int)
|
||||||
assert response.json["updated_at_in_seconds"] > 0
|
assert response.json["updated_at_in_seconds"] > 0
|
||||||
|
|
|
@ -360,6 +360,7 @@ class TestProcessInstanceProcessor(BaseTest):
|
||||||
assert task_model_to_reset_to is not None
|
assert task_model_to_reset_to is not None
|
||||||
assert len(process_instance.human_tasks) == 3, "expected 3 human tasks before reset"
|
assert len(process_instance.human_tasks) == 3, "expected 3 human tasks before reset"
|
||||||
processor = ProcessInstanceProcessor(process_instance)
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid)
|
ProcessInstanceProcessor.reset_process(process_instance, task_model_to_reset_to.guid)
|
||||||
assert len(process_instance.human_tasks) == 2, "still expected 2 human tasks after reset"
|
assert len(process_instance.human_tasks) == 2, "still expected 2 human tasks after reset"
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue