ergeremote-tracking branch 'origin/main' into feature/carbon_ui

This commit is contained in:
jasquat 2022-11-02 14:17:39 -04:00
commit 2ee06fc53a
36 changed files with 158 additions and 751 deletions

View File

@ -3,23 +3,10 @@ import ast
import copy
import sys
import traceback
import datetime
import dateparser
import pytz
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
from ..operators import Operator
# Would love to get rid of this altogether, as it rightly belongs in the
# backend, but leaving it here because that's the path of least resistance.
DEFAULT_GLOBALS = {
'timedelta': datetime.timedelta,
'datetime': datetime,
'dateparser': dateparser,
'pytz': pytz,
}
# Copyright (C) 2020 Kelly McDonald
#
@ -112,7 +99,7 @@ class PythonScriptEngine(object):
def __init__(self, default_globals=None, scripting_additions=None):
self.globals = default_globals or DEFAULT_GLOBALS
self.globals = default_globals or {}
self.globals.update(scripting_additions or {})
self.error_tasks = {}

View File

@ -25,13 +25,6 @@ from .base import TaskSpec
from ..operators import valueof, Attrib, PathAttrib
from ..util.deep_merge import DeepMerge
try:
from celery.app import default_app
except ImportError:
have_celery = False
else:
have_celery = True
logger = logging.getLogger('spiff')
@ -111,7 +104,10 @@ class Celery(TaskSpec):
:type kwargs: dict
:param kwargs: kwargs to pass to celery task.
"""
if not have_celery:
try:
from celery.app import default_app
except ImportError:
raise Exception("Unable to import python-celery imports.")
assert wf_spec is not None
assert name is not None

View File

@ -74,7 +74,7 @@ Serialization
.. warning::
Serialization Changed in Version 1.1.7.
Serialization Changed in Version 1.1.7.
Support for pre-1.1.7 serialization will be dropped in a future release.
The old serialization method still works but it is deprecated.
To migrate your system to the new version, see "Migrating between
@ -85,8 +85,8 @@ setting. This may not always be the case, we may be executing the workflow in th
may have a user request a web page where we open a specific workflow that we may be in the middle of, do one step of
that workflow and then the user may be back in a few minutes, or maybe a few hours depending on the application.
The :code:`BpmnWorkflowSerializer` class contains a serializer for a workflow containing only standard BPMN Tasks.
Since we are using custom task classes (the Camunda :code:`UserTask` and the DMN :code:`BusinessRuleTask`),
The :code:`BpmnWorkflowSerializer` class contains a serializer for a workflow containing only standard BPMN Tasks.
Since we are using custom task classes (the Camunda :code:`UserTask` and the DMN :code:`BusinessRuleTask`),
we'll need to supply serializers for those task specs as well.
Strictly speaking, these are not serializers per se: they actually convert the tasks into dictionaries of
@ -138,7 +138,7 @@ two components:
- a data converter (which handles workflow and task data).
The default workflow spec converter likely to meet your needs, either on its own, or with the inclusion of
:code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` or :code:`spiff` and :code:`dmn` subpackages
:code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` or :code:`spiff` and :code:`dmn` subpackages
of this library, and all you'll need to do is add them to the list of task converters, as we did above.
However, he default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime`
@ -180,7 +180,7 @@ If you have written any custom task specs, you'll need to implement task spec co
Task Spec converters are also based on the :code:`DictionaryConverter`. You should be able to use the
`BpmnTaskSpecConverter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/bpmn_converters.py>`_
as a basis for your custom specs. It provides some methods for extracting attributes from Spiff base classes as well as
as a basis for your custom specs. It provides some methods for extracting attributes from Spiff base classes as well as
standard BPNN attributes from tasks that inherit from :code:`BMPNSpecMixin`.
The `Camunda User Task Converter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/camunda/serializer/task_spec_converters.py>`_
@ -221,7 +221,7 @@ serialize the workflow in the new format:
new_json = serializer.serialize_json(workflow)
However, if you use custom tasks or data serialization, you'll also need to specify workflow spec or data
serializers, as in the examples in the previous section, before you'll be able to serialize with the new serializer.
serializers, as in the examples in the previous section, before you'll be able to serialize with the new serializer.
The code would then look more like this:
.. code:: python
@ -244,7 +244,7 @@ The code would then look more like this:
new_json = serializer.serialize_json(workflow)
Because the serializer is highly customizable, we've made it possible for you to manage your own versions of the
serialization. You can do this by passing a version number into the serializer, which will be embedded in the
serialization. You can do this by passing a version number into the serializer, which will be embedded in the
json of all workflows. This allow you to modify the serialization and customize it over time, and still manage
the different forms as you make adjustments without leaving people behind.
@ -253,11 +253,11 @@ Versioned Serializer
As we make changes to Spiff, we may change the serialization format. For example, in 1.1.8, we changed
how subprocesses were handled interally in BPMN workflows and updated how they are serialized. If you have
not overridden our version number with one of your own, the serializer will transform the 1.0 format to the
not overridden our version number with one of your own, the serializer will transform the 1.0 format to the
new 1.1 format.
If you've overridden the serializer version, you may need to incorporate our serialization changes with
your own. You can find our conversions in
your own. You can find our conversions in
`version_migrations.py <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/version_migration.py>`_
Custom Script Engines
@ -277,14 +277,9 @@ We'll cover a simple extension of custom script engine here. There is also an e
a similar engine based on `RestrictedPython <https://restrictedpython.readthedocs.io/en/latest/>`_
included alongside this example.
The default script engine imports the following objects:
The default script engine does not import any objects.
- :code:`timedelta`
- :code:`datetime`
- :code:`dateparser`
- :code:`pytz`
You could add other functions or classes from the standard python modules or any code you've
You could add functions or classes from the standard python modules or any code you've
implemented yourself. Your global environment can be passed in using the `default_globals`
argument when initializing the script engine. In our RestrictedPython example, we use their
`safe_globals` which prevents users from executing some potentially unsafe operations.

View File

@ -3,6 +3,4 @@
celery==5.2.3
coverage
lxml
dateparser
pytz
.

View File

@ -22,7 +22,7 @@ setup(name='SpiffWorkflow',
license='lGPLv2',
packages=find_packages(exclude=['tests', 'tests.*']),
package_data={'SpiffWorkflow.bpmn.parser.schema': ['*.xsd']},
install_requires=['configparser', 'lxml', 'celery', 'dateparser', 'pytz',
install_requires=['configparser', 'lxml', 'celery',
# required for python 3.7 - https://stackoverflow.com/a/73932581
'importlib-metadata<5.0; python_version <= "3.7"'],
keywords='spiff workflow bpmn engine',

View File

@ -5,6 +5,7 @@ import datetime
import time
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -15,11 +16,12 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
Non-Interrupting Timer boundary test
"""
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": datetime.timedelta})
spec, subprocesses = self.load_workflow_spec('timer-non-interrupt-boundary.bpmn', 'NonInterruptTimer')
self.workflow = BpmnWorkflow(spec, subprocesses)
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=self.script_engine)
def load_spec(self):
return
return
def testRunThroughHappy(self):
self.actual_test(save_restore=False)
@ -28,7 +30,7 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
self.actual_test(save_restore=True)
def actual_test(self,save_restore = False):
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(1, len(ready_tasks))
self.workflow.complete_task_from_id(ready_tasks[0].id)
@ -47,7 +49,9 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
ready_tasks = self.workflow.get_tasks(TaskState.READY)
if len(ready_tasks) > 1:
break
if save_restore: self.save_restore()
if save_restore:
self.save_restore()
self.workflow.script_engine = self.script_engine
#self.assertEqual(1, len(self.workflow.get_tasks(Task.WAITING)))
time.sleep(0.1)
self.workflow.complete_task_from_id(ready_tasks[0].id)

View File

@ -24,32 +24,11 @@ class PythonScriptEngineTest(BpmnWorkflowTestCase):
workflow.do_engine_steps()
self.task = workflow.last_task
def testDateTimeExpressions(self):
"""Basically, assure that we can use datime, dateutils, and pytz"""
script = """
# Create Current Date as UTC
now_utc = datetime.datetime.now(datetime.timezone.utc)
# Create Current Date at EST
now_est = now_utc.astimezone(pytz.timezone('US/Eastern'))
# Format a date from a date String in UTC
datestr = "2021-09-23 16:11:00 -0000" # 12 pm EST, 4pm UTC
dt = dateparser.parse(datestr)
localtime = dt.astimezone(pytz.timezone('US/Eastern'))
localtime_str = localtime.strftime("%Y-%m-%d %H:%M:%S")
"""
self.expressionEngine.execute(self.task, script)
self.assertEqual(self.task.data['now_utc'].utcoffset().days, 0)
self.assertEqual(self.task.data['now_est'].tzinfo.zone, "US/Eastern")
self.assertEqual(self.task.data['localtime_str'], "2021-09-23 12:11:00")
self.assertTrue(True)
def testFunctionsAndGlobalsAreRemoved(self):
self.assertIn('testvar', self.task.data)
self.assertIn('testvar2', self.task.data)
self.assertIn('sample', self.task.data)
self.assertNotIn('my_function', self.task.data)
self.assertNotIn('datetime', self.task.data)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(PythonScriptEngineTest)

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import datetime
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
@ -8,6 +9,15 @@ from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'sartography'
class CustomScriptEngine(PythonScriptEngine):
"""This is a custom script processor that can be easily injected into Spiff Workflow.
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {
'timedelta': datetime.timedelta,
}
super().__init__(scripting_additions=augment_methods)
class TooManyLoopsTest(BpmnWorkflowTestCase):
@ -23,7 +33,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
def actual_test(self,save_restore = False):
spec, subprocesses = self.load_workflow_spec('too_many_loops*.bpmn', 'loops')
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=PythonScriptEngine())
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine())
counter = 0
data = {}
while not self.workflow.is_completed():
@ -34,6 +44,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
counter += 1 # There is a 10 millisecond wait task.
if save_restore:
self.save_restore()
self.workflow.script_engine = CustomScriptEngine()
self.assertEqual(20, self.workflow.last_task.data['counter'])
def test_with_sub_process(self):
@ -41,7 +52,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
# right after a sub-process. So assuring this is fixed.
counter = 0
spec, subprocesses = self.load_workflow_spec('too_many_loops_sub_process.bpmn', 'loops_sub')
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=PythonScriptEngine())
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine())
data = {}
while not self.workflow.is_completed():
self.workflow.do_engine_steps()
@ -57,7 +68,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
def test_with_two_call_activities(self):
spec, subprocess = self.load_workflow_spec('sub_in_loop*.bpmn', 'main')
self.workflow = BpmnWorkflow(spec, subprocess)
self.workflow = BpmnWorkflow(spec, subprocess, script_engine=CustomScriptEngine())
self.workflow.do_engine_steps()
for loop in range(3):
ready = self.workflow.get_ready_user_tasks()
@ -66,6 +77,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.save_restore()
self.workflow.script_engine = CustomScriptEngine()
def suite():
return unittest.TestLoader().loadTestsFromTestCase(TooManyLoopsTest)

View File

@ -7,8 +7,8 @@
<bpmn:scriptTask id="Activity_1q1wged" name="Set Future Date">
<bpmn:incoming>Flow_1i73q45</bpmn:incoming>
<bpmn:outgoing>Flow_00e79cz</bpmn:outgoing>
<bpmn:script>futuredate = dateparser.parse('in 1 second') - timedelta(seconds=.95)
futuredate2 = dateparser.parse('September 1 2021 at 10am EDT')</bpmn:script>
<bpmn:script>futuredate = datetime.now() + timedelta(0, 1) - timedelta(seconds=.95)
futuredate2 = datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1i73q45" sourceRef="Event_0u1rmur" targetRef="Activity_1q1wged" />
<bpmn:sequenceFlow id="Flow_00e79cz" sourceRef="Activity_1q1wged" targetRef="Event_0eb0w95" />

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import datetime
import unittest
import time
@ -21,7 +22,10 @@ class CustomScriptEngine(PythonScriptEngine):
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {'custom_function': my_custom_function}
augment_methods = {
'custom_function': my_custom_function,
'timedelta': datetime.timedelta,
}
super().__init__(scripting_additions=augment_methods)

View File

@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import datetime
import unittest
import time
@ -21,7 +22,10 @@ class CustomScriptEngine(PythonScriptEngine):
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {'custom_function': my_custom_function}
augment_methods = {
'custom_function': my_custom_function,
'timedelta': datetime.timedelta,
}
super().__init__(scripting_additions=augment_methods)

View File

@ -3,10 +3,10 @@
import unittest
import datetime
import time
import pytz
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -15,8 +15,12 @@ __author__ = 'kellym'
class TimerDateTest(BpmnWorkflowTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={
"datetime": datetime.datetime,
"timedelta": datetime.timedelta,
})
self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
def testRunThroughHappy(self):
self.actual_test(save_restore=False)
@ -42,6 +46,7 @@ class TimerDateTest(BpmnWorkflowTestCase):
break
if save_restore:
self.save_restore()
self.workflow.script_engine = self.script_engine
waiting_tasks = self.workflow.get_tasks(TaskState.WAITING)
@ -50,8 +55,7 @@ class TimerDateTest(BpmnWorkflowTestCase):
loopcount = loopcount +1
endtime = datetime.datetime.now()
self.workflow.do_engine_steps()
tz = pytz.timezone('US/Eastern')
testdate = tz.localize(datetime.datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M'))
testdate = datetime.datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M')
self.assertEqual(self.workflow.last_task.data['futuredate2'],testdate)
self.assertTrue('completed' in self.workflow.last_task.data)
self.assertTrue(self.workflow.last_task.data['completed'])

View File

@ -3,10 +3,12 @@
import unittest
import datetime
import time
from datetime import timedelta
from SpiffWorkflow.bpmn.specs.events import EndEvent
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -14,8 +16,9 @@ __author__ = 'kellym'
class TimerDurationTest(BpmnWorkflowTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
def testRunThroughHappy(self):
self.actual_test(save_restore=False)
@ -43,9 +46,11 @@ class TimerDurationTest(BpmnWorkflowTestCase):
starttime = datetime.datetime.now()
self.workflow = BpmnWorkflow(self.spec)
self.workflow.script_engine = self.script_engine
self.workflow.do_engine_steps()
if save_restore:
self.save_restore()
self.workflow.script_engine = self.script_engine
time.sleep(0.1)
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()

View File

@ -3,8 +3,10 @@
import unittest
import datetime
import time
from datetime import timedelta
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -13,8 +15,9 @@ __author__ = 'kellym'
class TimerDurationTest(BpmnWorkflowTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
def testRunThroughHappy(self):
self.actual_test(save_restore=False)
@ -40,7 +43,9 @@ class TimerDurationTest(BpmnWorkflowTestCase):
while loopcount < 10:
if len(self.workflow.get_tasks(TaskState.READY)) >= 1:
break
if save_restore: self.save_restore()
if save_restore:
self.save_restore()
self.workflow.script_engine = self.script_engine
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
time.sleep(0.1)
self.workflow.refresh_waiting_tasks()

View File

@ -24,7 +24,7 @@ class ExternalMessageBoundaryTest(BaseTestCase):
def actual_test(self,save_restore = False):
self.workflow.do_engine_steps()
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(1, len(ready_tasks),'Expected to have only one ready task')

View File

@ -3,8 +3,10 @@
import unittest
import time
from datetime import timedelta
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from .BaseTestCase import BaseTestCase
__author__ = 'kellym'
@ -13,8 +15,9 @@ __author__ = 'kellym'
class MessageBoundaryTest(BaseTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
def testRunThroughHappy(self):
self.actual_test(save_restore=False)
@ -41,7 +44,9 @@ class MessageBoundaryTest(BaseTestCase):
self.workflow.do_engine_steps()
time.sleep(.01)
self.workflow.refresh_waiting_tasks()
if save_restore: self.save_restore()
if save_restore:
self.save_restore()
self.workflow.script_engine = self.script_engine
ready_tasks = self.workflow.get_tasks(TaskState.READY)
time.sleep(.01)
self.workflow.refresh_waiting_tasks()

View File

@ -1,3 +1,4 @@
import datetime
from decimal import Decimal
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
@ -7,4 +8,5 @@ from ..DecisionRunner import DecisionRunner
class PythonDecisionRunner(DecisionRunner):
def __init__(self, filename):
super().__init__(PythonScriptEngine(scripting_additions={'Decimal': Decimal}), filename, 'python_engine')
scripting_additions={'Decimal': Decimal, 'datetime': datetime}
super().__init__(PythonScriptEngine(scripting_additions=scripting_additions), filename, 'python_engine')

18
flask-bpmn/poetry.lock generated
View File

@ -667,7 +667,7 @@ SQLAlchemy = ">=0.8.0"
[[package]]
name = "furo"
version = "2022.6.21"
version = "2022.9.29"
description = "A clean customisable Sphinx documentation theme."
category = "dev"
optional = false
@ -675,7 +675,7 @@ python-versions = ">=3.7"
[package.dependencies]
beautifulsoup4 = "*"
pygments = "*"
pygments = ">=2.7"
sphinx = ">=4.0,<6.0"
sphinx-basic-ng = "*"
@ -1206,7 +1206,7 @@ tzdata = {version = "*", markers = "python_version >= \"3.6\""}
[[package]]
name = "pyupgrade"
version = "3.1.0"
version = "3.2.0"
description = "A tool to automatically upgrade syntax for newer versions."
category = "dev"
optional = false
@ -1578,7 +1578,7 @@ pytz = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
resolved_reference = "5cdb881edc4621502bfd61ce67565cf1148199f0"
[[package]]
name = "sqlalchemy"
@ -1853,7 +1853,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
[metadata]
lock-version = "1.1"
python-versions = "^3.7"
content-hash = "f5c0fcc30ff491c23da05e4d24c2dc9c66f43a2dfde028345f9dffd5e91f3f0a"
content-hash = "7d1d5e13f2546566277c6f0b5935753c89804db2abb7a1e76498b582f40f9a01"
[metadata.files]
alabaster = [
@ -2191,8 +2191,8 @@ flask-sqlalchemy = [
{file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"},
]
furo = [
{file = "furo-2022.6.21-py3-none-any.whl", hash = "sha256:061b68e323345e27fcba024cf33a1e77f3dfd8d9987410be822749a706e2add6"},
{file = "furo-2022.6.21.tar.gz", hash = "sha256:9aa983b7488a4601d13113884bfb7254502c8729942e073a0acb87a5512af223"},
{file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"},
{file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"},
]
gitdb = [
{file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
@ -2608,8 +2608,8 @@ pytz-deprecation-shim = [
{file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"},
]
pyupgrade = [
{file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"},
{file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"},
{file = "pyupgrade-3.2.0-py2.py3-none-any.whl", hash = "sha256:2aa6c40e49ea5a350e6e45b8c7847b1741aef274a35d4f0b2bf91731ec8ab796"},
{file = "pyupgrade-3.2.0.tar.gz", hash = "sha256:70e1ac1e6b34a90fb21f5cada1907ef035b12dfc1d9f13cefd367acf3b530310"},
]
pyyaml = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},

View File

@ -61,7 +61,7 @@ reorder-python-imports = "^3.9.0"
pre-commit-hooks = "^4.3.0"
sphinx-click = "^4.3.0"
Pygments = "^2.13.0"
pyupgrade = "^3.1.0"
pyupgrade = "^3.2.0"
furo = ">=2021.11.12"
MonkeyType = "^22.2.0"

View File

@ -1,8 +1,8 @@
"""empty message
Revision ID: 3bd6b0b1b8ae
Revision ID: bdd1d64689db
Revises:
Create Date: 2022-10-25 12:31:50.177599
Create Date: 2022-11-02 11:31:50.606843
"""
from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3bd6b0b1b8ae'
revision = 'bdd1d64689db'
down_revision = None
branch_labels = None
depends_on = None
@ -18,13 +18,6 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_session',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('token', sa.String(length=50), nullable=True),
sa.Column('admin_impersonate_uid', sa.String(length=50), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('token')
)
op.create_table('bpmn_process_id_lookup',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
@ -183,25 +176,6 @@ def upgrade():
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
)
op.create_table('file',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('type', sa.String(length=50), nullable=False),
sa.Column('content_type', sa.String(length=50), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=True),
sa.Column('task_spec', sa.String(length=50), nullable=True),
sa.Column('irb_doc_code', sa.String(length=50), nullable=False),
sa.Column('md5_hash', sa.String(length=50), nullable=False),
sa.Column('data', sa.LargeBinary(), nullable=True),
sa.Column('size', sa.Integer(), nullable=True),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('user_uid', sa.String(length=50), nullable=True),
sa.Column('archived', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('message_correlation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
@ -259,28 +233,6 @@ def upgrade():
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('task_event',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
sa.Column('spec_version', sa.String(length=50), nullable=True),
sa.Column('action', sa.String(length=50), nullable=True),
sa.Column('task_id', sa.String(length=50), nullable=True),
sa.Column('task_name', sa.String(length=50), nullable=True),
sa.Column('task_title', sa.String(length=50), nullable=True),
sa.Column('task_type', sa.String(length=50), nullable=True),
sa.Column('task_state', sa.String(length=50), nullable=True),
sa.Column('task_lane', sa.String(length=50), nullable=True),
sa.Column('form_data', sa.JSON(), nullable=True),
sa.Column('mi_type', sa.String(length=50), nullable=True),
sa.Column('mi_count', sa.Integer(), nullable=True),
sa.Column('mi_index', sa.Integer(), nullable=True),
sa.Column('process_name', sa.String(length=50), nullable=True),
sa.Column('date', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('active_task_user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('active_task_id', sa.Integer(), nullable=False),
@ -292,19 +244,6 @@ def upgrade():
)
op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False)
op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False)
op.create_table('data_store',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
sa.Column('key', sa.String(length=50), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=True),
sa.Column('task_spec', sa.String(length=50), nullable=True),
sa.Column('spec_id', sa.String(length=50), nullable=True),
sa.Column('user_id', sa.String(length=50), nullable=True),
sa.Column('file_id', sa.Integer(), nullable=True),
sa.Column('value', sa.String(length=50), nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('message_correlation_message_instance',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('message_instance_id', sa.Integer(), nullable=False),
@ -324,11 +263,9 @@ def downgrade():
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
op.drop_table('message_correlation_message_instance')
op.drop_table('data_store')
op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
op.drop_table('active_task_user')
op.drop_table('task_event')
op.drop_table('spiff_logging')
op.drop_table('permission_assignment')
op.drop_table('message_instance')
@ -337,7 +274,6 @@ def downgrade():
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
op.drop_table('message_correlation')
op.drop_table('file')
op.drop_table('active_task')
op.drop_table('user_group_assignment')
op.drop_table('secret')
@ -363,5 +299,4 @@ def downgrade():
op.drop_table('group')
op.drop_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), table_name='bpmn_process_id_lookup')
op.drop_table('bpmn_process_id_lookup')
op.drop_table('admin_session')
# ### end Alembic commands ###

View File

@ -410,7 +410,7 @@ python-versions = ">=3.6,<4.0"
[[package]]
name = "dateparser"
version = "1.1.1"
version = "1.1.2"
description = "Date parsing library designed to parse dates from HTML pages"
category = "main"
optional = false
@ -639,7 +639,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
resolved_reference = "cedc5253add81a18a274f2cd3289fe36bb138f8b"
resolved_reference = "191f0f32798720c9ce1e5307732c90ac26433298"
[[package]]
name = "Flask-Cors"
@ -820,7 +820,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "importlib-metadata"
version = "5.0.0"
version = "4.13.0"
description = "Read metadata from Python packages"
category = "main"
optional = false
@ -1441,7 +1441,7 @@ docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>
[[package]]
name = "pytz"
version = "2022.5"
version = "2022.6"
description = "World timezone definitions, modern and historical"
category = "main"
optional = false
@ -1873,7 +1873,7 @@ pytz = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
resolved_reference = "a6392d19061f623394f5705fb78af23673d3940d"
[[package]]
name = "SQLAlchemy"
@ -2000,6 +2000,14 @@ category = "main"
optional = false
python-versions = "*"
[[package]]
name = "types-dateparser"
version = "1.1.4.1"
description = "Typing stubs for dateparser"
category = "main"
optional = false
python-versions = "*"
[[package]]
name = "types-Flask"
version = "1.1.6"
@ -2248,7 +2256,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
[metadata]
lock-version = "1.1"
python-versions = ">=3.9,<3.11"
content-hash = "bfb51ebc4ef76d4a74f670f44dc4d7ca7e91874b096f56521c2776f1837f6a63"
content-hash = "995be3a9a60b515b281f017ff32ff27a52ca178b1980611b348dccac6afb6b89"
[metadata.files]
alabaster = [
@ -2454,8 +2462,8 @@ darglint = [
{file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
]
dateparser = [
{file = "dateparser-1.1.1-py2.py3-none-any.whl", hash = "sha256:9600874312ff28a41f96ec7ccdc73be1d1c44435719da47fea3339d55ff5a628"},
{file = "dateparser-1.1.1.tar.gz", hash = "sha256:038196b1f12c7397e38aad3d61588833257f6f552baa63a1499e6987fa8d42d9"},
{file = "dateparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:d31659dc806a7d88e2b510b2c74f68b525ae531f145c62a57a99bd616b7f90cf"},
{file = "dateparser-1.1.2.tar.gz", hash = "sha256:3821bf191f95b2658c4abd91571c09821ce7a2bc179bf6cefd8b4515c3ccf9ef"},
]
distlib = [
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
@ -2632,8 +2640,8 @@ imagesize = [
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
]
importlib-metadata = [
{file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"},
{file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"},
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
]
inflection = [
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
@ -2938,7 +2946,10 @@ orjson = [
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"},
{file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"},
{file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:200eae21c33f1f8b02a11f5d88d76950cd6fd986d88f1afe497a8ae2627c49aa"},
{file = "orjson-3.8.0-cp311-cp311-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:9529990f3eab54b976d327360aa1ff244a4b12cb5e4c5b3712fcdd96e8fe56d4"},
{file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"},
{file = "orjson-3.8.0-cp311-none-win_amd64.whl", hash = "sha256:b21c7af0ff6228ca7105f54f0800636eb49201133e15ddb80ac20c1ce973ef07"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"},
{file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"},
{file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"},
@ -3130,8 +3141,8 @@ python-keycloak = [
{file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"},
]
pytz = [
{file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"},
{file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"},
{file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"},
{file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"},
]
pytz-deprecation-shim = [
{file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"},
@ -3539,6 +3550,10 @@ types-click = [
{file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"},
{file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"},
]
types-dateparser = [
{file = "types-dateparser-1.1.4.1.tar.gz", hash = "sha256:0f76578bbae15c8b8701b5efd94db98a97ce0a27aedfe6f14a531170de6db97d"},
{file = "types_dateparser-1.1.4.1-py3-none-any.whl", hash = "sha256:dd7b2343bb06225c0e358533609b66a8edfb95e5426d8f658664e7d0f27dea68"},
]
types-Flask = [
{file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"},
{file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"},

View File

@ -28,6 +28,7 @@ flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
#SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
sentry-sdk = "^1.10"
sphinx-autoapi = "^2.0"
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
@ -68,6 +69,9 @@ types-pytz = "^2022.1.1"
# for now use my fork
sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" }
simplejson = "^3.17.6"
pytz = "^2022.6"
dateparser = "^1.1.2"
types-dateparser = "^1.1.4.1"
[tool.poetry.dev-dependencies]

View File

@ -21,8 +21,6 @@ from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F4
from spiffworkflow_backend.models.bpmn_process_id_lookup import (
BpmnProcessIdLookup,
) # noqa: F401
from spiffworkflow_backend.models.data_store import DataStoreModel # noqa: F401
from spiffworkflow_backend.models.file import FileModel # noqa: F401
from spiffworkflow_backend.models.message_correlation_property import (
MessageCorrelationPropertyModel,
) # noqa: F401
@ -48,7 +46,6 @@ from spiffworkflow_backend.models.process_instance_report import (
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
from spiffworkflow_backend.models.task_event import TaskEventModel # noqa: F401
from spiffworkflow_backend.models.user import UserModel # noqa: F401
from spiffworkflow_backend.models.group import GroupModel # noqa: F401

View File

@ -1,31 +0,0 @@
"""Data_store."""
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from flask_marshmallow.sqla import SQLAlchemyAutoSchema # type: ignore
class DataStoreModel(SpiffworkflowBaseDBModel):
"""DataStoreModel."""
__tablename__ = "data_store"
id = db.Column(db.Integer, primary_key=True)
updated_at_in_seconds = db.Column(db.Integer)
key = db.Column(db.String(50), nullable=False)
process_instance_id = db.Column(db.Integer)
task_spec = db.Column(db.String(50))
spec_id = db.Column(db.String(50))
user_id = db.Column(db.String(50), nullable=True)
file_id = db.Column(db.Integer, db.ForeignKey("file.id"), nullable=True)
value = db.Column(db.String(50))
class DataStoreSchema(SQLAlchemyAutoSchema): # type: ignore
"""DataStoreSchema."""
class Meta:
"""Meta."""
model = DataStoreModel
load_instance = True
include_fk = True
sqla_session = db.session

View File

@ -4,40 +4,10 @@ from dataclasses import field
from datetime import datetime
from typing import Optional
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from marshmallow import INCLUDE
from marshmallow import Schema
from sqlalchemy.orm import deferred
from sqlalchemy.orm import relationship
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
from spiffworkflow_backend.models.data_store import DataStoreModel
class FileModel(SpiffworkflowBaseDBModel):
"""FileModel."""
__tablename__ = "file"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
type = db.Column(db.String(50), nullable=False)
content_type = db.Column(db.String(50), nullable=False)
process_instance_id = db.Column(
db.Integer, db.ForeignKey("process_instance.id"), nullable=True
)
task_spec = db.Column(db.String(50), nullable=True)
irb_doc_code = db.Column(
db.String(50), nullable=False
) # Code reference to the documents.xlsx reference file.
data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file")
md5_hash = db.Column(db.String(50), unique=False, nullable=False)
data = deferred(db.Column(db.LargeBinary)) # type: ignore
size = db.Column(db.Integer, default=0)
updated_at_in_seconds = db.Column(db.Integer)
created_at_in_seconds = db.Column(db.Integer)
user_uid = db.Column(db.String(50), db.ForeignKey("user.uid"), nullable=True)
archived = db.Column(db.Boolean, default=False)
class FileType(SpiffEnum):

View File

@ -78,7 +78,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
process_initiator = relationship("UserModel")
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
task_events = relationship("TaskEventModel", cascade="delete") # type: ignore
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore

View File

@ -1,100 +0,0 @@
"""Task_event."""
from __future__ import annotations
import enum
from typing import TYPE_CHECKING
from flask_bpmn.models.db import db
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
from marshmallow import fields
from marshmallow import INCLUDE
from marshmallow import Schema
from sqlalchemy import func
if TYPE_CHECKING:
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceModel,
) # noqa: F401
class TaskAction(enum.Enum):
"""TaskAction."""
COMPLETE = "COMPLETE"
TOKEN_RESET = "TOKEN_RESET" # noqa: S105
HARD_RESET = "HARD_RESET"
SOFT_RESET = "SOFT_RESET"
ASSIGNMENT = "ASSIGNMENT" # Whenever the lane changes between tasks we assign the task to specific user.
class TaskEventModel(SpiffworkflowBaseDBModel):
"""TaskEventModel."""
__tablename__ = "task_event"
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(
db.Integer, db.ForeignKey("user.id"), nullable=False
) # In some cases the unique user id may not exist in the db yet.
process_instance_id = db.Column(
db.Integer, db.ForeignKey("process_instance.id"), nullable=False
)
spec_version = db.Column(db.String(50))
action = db.Column(db.String(50))
task_id = db.Column(db.String(50))
task_name = db.Column(db.String(50))
task_title = db.Column(db.String(50))
task_type = db.Column(db.String(50))
task_state = db.Column(db.String(50))
task_lane = db.Column(db.String(50))
form_data = db.Column(
db.JSON
) # And form data submitted when the task was completed.
mi_type = db.Column(db.String(50))
mi_count = db.Column(db.Integer)
mi_index = db.Column(db.Integer)
process_name = db.Column(db.String(50))
date = db.Column(db.DateTime(timezone=True), default=func.now())
class TaskEvent:
"""TaskEvent."""
def __init__(self, model: TaskEventModel, process_instance: ProcessInstanceModel):
"""__init__."""
self.id = model.id
self.process_instance = process_instance
self.user_id = model.user_id
self.action = model.action
self.task_id = model.task_id
self.task_title = model.task_title
self.task_name = model.task_name
self.task_type = model.task_type
self.task_state = model.task_state
self.task_lane = model.task_lane
self.date = model.date
class TaskEventSchema(Schema):
"""TaskEventSchema."""
process_instance = fields.Nested("ProcessInstanceMetadataSchema", dump_only=True)
task_lane = fields.String(allow_none=True, required=False)
class Meta:
"""Meta."""
model = TaskEvent
additional = [
"id",
"user_id",
"action",
"task_id",
"task_title",
"task_name",
"task_type",
"task_state",
"task_lane",
"date",
]
unknown = INCLUDE

View File

@ -112,12 +112,3 @@ class UserModelSchema(Schema):
id = marshmallow.fields.String(required=True)
username = marshmallow.fields.String(required=True)
class AdminSessionModel(SpiffworkflowBaseDBModel):
"""AdminSessionModel."""
__tablename__ = "admin_session"
id = db.Column(db.Integer, primary_key=True)
token = db.Column(db.String(50), unique=True)
admin_impersonate_uid = db.Column(db.String(50))

View File

@ -424,7 +424,6 @@ def process_instance_run(
task=task,
) from e
processor.save()
ProcessInstanceService.update_task_assignments(processor)
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
MessageService.process_message_instances()
@ -1071,7 +1070,9 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
task.form_ui_schema = ui_form_contents
if task.properties and task.data and "instructionsForEndUser" in task.properties:
print(f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}")
print(
f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}"
)
if task.properties["instructionsForEndUser"]:
task.properties["instructionsForEndUser"] = render_jinja_template(
task.properties["instructionsForEndUser"], task.data
@ -1121,8 +1122,6 @@ def task_submit(
# last_index = next_task.task_info()["mi_index"]
# next_task = processor.next_task()
ProcessInstanceService.update_task_assignments(processor)
next_active_task_assigned_to_me = (
ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id)
.order_by(asc(ActiveTaskModel.id)) # type: ignore

View File

@ -1,5 +1,6 @@
"""Get_env."""
from typing import Any
from flask import current_app
from spiffworkflow_backend.models.script_attributes_context import (
@ -22,4 +23,4 @@ class GetFrontendUrl(Script):
**kwargs: Any
) -> Any:
"""Run."""
return current_app.config['SPIFFWORKFLOW_FRONTEND_URL']
return current_app.config["SPIFFWORKFLOW_FRONTEND_URL"]

View File

@ -7,6 +7,7 @@ import os
import re
import time
from datetime import datetime
from datetime import timedelta
from typing import Any
from typing import Callable
from typing import Dict
@ -17,6 +18,8 @@ from typing import Tuple
from typing import TypedDict
from typing import Union
import dateparser
import pytz
from flask import current_app
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
@ -25,7 +28,6 @@ from RestrictedPython import safe_globals # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngine import DEFAULT_GLOBALS
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
@ -77,8 +79,6 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.models.task_event import TaskAction
from spiffworkflow_backend.models.task_event import TaskEventModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserModelSchema
from spiffworkflow_backend.scripts.script import Script
@ -98,19 +98,6 @@ def _import(name: str, glbls: Dict[str, Any], *args: Any) -> None:
raise ImportError(f"Import not allowed: {name}", name=name)
DEFAULT_GLOBALS.update(
{
"datetime": datetime,
"time": time,
"decimal": decimal,
"_strptime": _strptime,
}
)
# This will overwrite the standard builtins
DEFAULT_GLOBALS.update(safe_globals)
DEFAULT_GLOBALS["__builtins__"]["__import__"] = _import
class PotentialOwnerIdList(TypedDict):
"""PotentialOwnerIdList."""
@ -143,7 +130,21 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
def __init__(self) -> None:
"""__init__."""
super().__init__(default_globals=DEFAULT_GLOBALS)
default_globals = {
"timedelta": timedelta,
"datetime": datetime,
"dateparser": dateparser,
"pytz": pytz,
"time": time,
"decimal": decimal,
"_strptime": _strptime,
}
# This will overwrite the standard builtins
default_globals.update(safe_globals)
default_globals["__builtins__"]["__import__"] = _import
super().__init__(default_globals=default_globals)
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
"""__get_augment_methods."""
@ -416,7 +417,7 @@ class ProcessInstanceProcessor:
"""Add_user_info_to_process_instance."""
current_user = None
if UserService.has_user():
current_user = UserService.current_user(allow_admin_impersonate=True)
current_user = UserService.current_user()
# fall back to initiator if g.user is not set
# this is for background processes when there will not be a user
@ -430,59 +431,6 @@ class ProcessInstanceProcessor:
for task in tasks:
task.data["current_user"] = current_user_data
@staticmethod
def reset(
process_instance_model: ProcessInstanceModel, clear_data: bool = False
) -> None:
"""Resets the process_instance back to an unstarted state - where nothing has happened yet.
If clear_data is set to false, then the information
previously used in forms will be re-populated when the form is re-
displayed, and any files that were updated will remain in place, otherwise
files will also be cleared out.
"""
# Try to execute a cancel notify
try:
bpmn_process_instance = (
ProcessInstanceProcessor.__get_bpmn_process_instance(
process_instance_model
)
)
ProcessInstanceProcessor.__cancel_notify(bpmn_process_instance)
except Exception as e:
db.session.rollback() # in case the above left the database with a bad transaction
current_app.logger.error(
"Unable to send a cancel notify for process_instance %s during a reset."
" Continuing with the reset anyway so we don't get in an unresolvable"
" state. An %s error occured with the following information: %s"
% (process_instance_model.id, e.__class__.__name__, str(e))
)
process_instance_model.bpmn_json = None
process_instance_model.status = ProcessInstanceStatus.not_started.value
# clear out any task assignments
db.session.query(TaskEventModel).filter(
TaskEventModel.process_instance_id == process_instance_model.id
).filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete()
if clear_data:
# Clear out data in previous task events
task_events = (
db.session.query(TaskEventModel)
.filter(TaskEventModel.process_instance_id == process_instance_model.id)
.all()
)
for task_event in task_events:
task_event.form_data = {}
db.session.add(task_event)
# Remove any uploaded files.
# TODO: grab UserFileService
# files = FileModel.query.filter(FileModel.process_instance_id == process_instance_model.id).all()
# for file in files:
# UserFileService().delete_file(file.id)
db.session.commit()
@staticmethod
def get_bpmn_process_instance_from_workflow_spec(
spec: BpmnProcessSpec,

View File

@ -1,7 +1,6 @@
"""Process_instance_service."""
import time
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
@ -9,15 +8,12 @@ from flask import current_app
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.task import MultiInstanceType
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task_event import TaskAction
from spiffworkflow_backend.models.task_event import TaskEventModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.git_service import GitService
@ -108,70 +104,9 @@ class ProcessInstanceService:
is_review=is_review_value,
title=title_value,
)
next_task_trying_again = next_task
if (
not next_task
): # The Next Task can be requested to be a certain task, useful for parallel tasks.
# This may or may not work, sometimes there is no next task to complete.
next_task_trying_again = processor.next_task()
if next_task_trying_again is not None:
previous_form_data = ProcessInstanceService.get_previously_submitted_data(
processor.process_instance_model.id, next_task_trying_again
)
# DeepMerge.merge(next_task_trying_again.data, previous_form_data)
next_task_trying_again.data = DeepMerge.merge(
previous_form_data, next_task_trying_again.data
)
process_instance_api.next_task = (
ProcessInstanceService.spiff_task_to_api_task(
next_task_trying_again, add_docs_and_forms=True
)
)
# TODO: Hack for now, until we decide how to implment forms
process_instance_api.next_task.form = None
# Update the state of the task to locked if the current user does not own the task.
# user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task)
# if not UserService.in_list(user_uids, allow_admin_impersonate=True):
# workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED
return process_instance_api
@staticmethod
def get_previously_submitted_data(
process_instance_id: int, spiff_task: SpiffTask
) -> Dict[Any, Any]:
"""If the user has completed this task previously, find the form data for the last submission."""
query = (
db.session.query(TaskEventModel)
.filter_by(process_instance_id=process_instance_id)
.filter_by(task_name=spiff_task.task_spec.name)
.filter_by(action=TaskAction.COMPLETE.value)
)
if (
hasattr(spiff_task, "internal_data")
and "runtimes" in spiff_task.internal_data
):
query = query.filter_by(mi_index=spiff_task.internal_data["runtimes"])
latest_event = query.order_by(TaskEventModel.date.desc()).first()
if latest_event:
if latest_event.form_data is not None:
return latest_event.form_data # type: ignore
else:
missing_form_error = (
f"We have lost data for workflow {process_instance_id}, "
f"task {spiff_task.task_spec.name}, it is not in the task event model, "
f"and it should be."
)
current_app.logger.exception("missing_form_data", missing_form_error)
return {}
else:
return {}
def get_process_instance(self, process_instance_id: int) -> Any:
"""Get_process_instance."""
result = (
@ -181,30 +116,6 @@ class ProcessInstanceService:
)
return result
@staticmethod
def update_task_assignments(processor: ProcessInstanceProcessor) -> None:
"""For every upcoming user task, log a task action that connects the assigned user(s) to that task.
All existing assignment actions for this workflow are removed from the database,
so that only the current valid actions are available. update_task_assignments
should be called whenever progress is made on a workflow.
"""
db.session.query(TaskEventModel).filter(
TaskEventModel.process_instance_id == processor.process_instance_model.id
).filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete()
db.session.commit()
tasks = processor.get_current_user_tasks()
for task in tasks:
user_ids = ProcessInstanceService.get_users_assigned_to_task(
processor, task
)
for user_id in user_ids:
ProcessInstanceService().log_task_action(
user_id, processor, task, TaskAction.ASSIGNMENT.value
)
@staticmethod
def get_users_assigned_to_task(
processor: ProcessInstanceProcessor, spiff_task: SpiffTask
@ -279,52 +190,8 @@ class ProcessInstanceService:
spiff_task.update_data(dot_dct)
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
processor.complete_task(spiff_task)
# Log the action before doing the engine steps, as doing so could effect the state of the task
# the workflow could wrap around in the ngine steps, and the task could jump from being completed to
# another state. What we are logging here is the completion.
ProcessInstanceService.log_task_action(
user.id, processor, spiff_task, TaskAction.COMPLETE.value
)
processor.do_engine_steps(save=True)
@staticmethod
def log_task_action(
user_id: int,
processor: ProcessInstanceProcessor,
spiff_task: SpiffTask,
action: str,
) -> None:
"""Log_task_action."""
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
form_data = ProcessInstanceService.extract_form_data(
spiff_task.data, spiff_task
)
multi_instance_type_value = ""
if task.multi_instance_type:
multi_instance_type_value = task.multi_instance_type.value
task_event = TaskEventModel(
# study_id=processor.workflow_model.study_id,
user_id=user_id,
process_instance_id=processor.process_instance_model.id,
# workflow_spec_id=processor.workflow_model.workflow_spec_id,
action=action,
task_id=str(task.id),
task_name=task.name,
task_title=task.title,
task_type=str(task.type),
task_state=task.state,
task_lane=task.lane,
form_data=form_data,
mi_type=multi_instance_type_value, # Some tasks have a repeat behavior.
mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
mi_index=task.multi_instance_index, # And the index of the currently repeating task.
process_name=task.process_name,
# date=datetime.utcnow(), <=== For future reference, NEVER do this. Let the database set the time.
)
db.session.add(task_event)
db.session.commit()
@staticmethod
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
"""Extracts data from the latest_data that is directly related to the form that is being submitted."""

View File

@ -11,7 +11,6 @@ from spiffworkflow_backend.models.active_task import ActiveTaskModel
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import AdminSessionModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
@ -103,27 +102,6 @@ class UserService:
"""Has_user."""
return "token" in g and bool(g.token) and "user" in g and bool(g.user)
# Returns true if the current user is an admin.
@staticmethod
def user_is_admin() -> bool:
"""User_is_admin."""
return UserService.has_user() and g.user.is_admin()
# Returns true if the current admin user is impersonating another user.
@staticmethod
def admin_is_impersonating() -> bool:
"""Admin_is_impersonating."""
if UserService.user_is_admin():
admin_session = UserService.get_admin_session()
return admin_session is not None
else:
raise ApiError(
"unauthorized",
"You do not have permissions to do this.",
status_code=403,
)
# Returns true if the given user uid is different from the current user's uid.
@staticmethod
def is_different_user(uid: str) -> bool:
@ -131,84 +109,16 @@ class UserService:
return UserService.has_user() and uid is not None and uid is not g.user.uid
@staticmethod
def current_user(allow_admin_impersonate: bool = False) -> Any:
def current_user() -> Any:
"""Current_user."""
if not UserService.has_user():
raise ApiError(
"logged_out", "You are no longer logged in.", status_code=401
)
# Admins can pretend to be different users and act on a user's behalf in
# some circumstances.
if (
UserService.user_is_admin()
and allow_admin_impersonate
and UserService.admin_is_impersonating()
):
return UserService.get_admin_session_user()
else:
return g.user
# Admins can pretend to be different users and act on a user's behalf in some circumstances.
# This method allows an admin user to start impersonating another user with the given uid.
# Stops impersonating if the uid is None or invalid.
@staticmethod
def start_impersonating(uid: Optional[str] = None) -> None:
"""Start_impersonating."""
if not UserService.has_user():
raise ApiError(
"logged_out", "You are no longer logged in.", status_code=401
)
if not UserService.user_is_admin():
raise ApiError(
"unauthorized",
"You do not have permissions to do this.",
status_code=403,
)
if uid is None:
raise ApiError("invalid_uid", "Please provide a valid user uid.")
if UserService.is_different_user(uid):
# Impersonate the user if the given uid is valid.
impersonate_user = (
db.session.query(UserModel).filter(UserModel.uid == uid).first()
)
if impersonate_user is not None:
g.impersonate_user = impersonate_user
# Store the uid and user session token.
db.session.query(AdminSessionModel).filter(
AdminSessionModel.token == g.token
).delete()
db.session.add(
AdminSessionModel(token=g.token, admin_impersonate_uid=uid)
)
db.session.commit()
else:
raise ApiError("invalid_uid", "The uid provided is not valid.")
return g.user
@staticmethod
def stop_impersonating() -> None:
"""Stop_impersonating."""
if not UserService.has_user():
raise ApiError(
"logged_out", "You are no longer logged in.", status_code=401
)
# Clear out the current impersonating user.
if "impersonate_user" in g:
del g.impersonate_user
admin_session = UserService.get_admin_session()
if admin_session:
db.session.delete(admin_session)
db.session.commit()
@staticmethod
def in_list(uids: list[str], allow_admin_impersonate: bool = False) -> bool:
def in_list(uids: list[str]) -> bool:
"""Returns true if the current user's id is in the given list of ids.
False if there is no user, or the user is not in the list.
@ -216,46 +126,11 @@ class UserService:
if (
UserService.has_user()
): # If someone is logged in, lock tasks that don't belong to them.
user = UserService.current_user(allow_admin_impersonate)
user = UserService.current_user()
if user.uid in uids:
return True
return False
@staticmethod
def get_admin_session() -> Any:
"""Get_admin_session."""
if UserService.user_is_admin():
return (
db.session.query(AdminSessionModel)
.filter(AdminSessionModel.token == g.token)
.first()
)
else:
raise ApiError(
"unauthorized",
"You do not have permissions to do this.",
status_code=403,
)
@staticmethod
def get_admin_session_user() -> Any:
"""Get_admin_session_user."""
if UserService.user_is_admin():
admin_session = UserService.get_admin_session()
if admin_session is not None:
return (
db.session.query(UserModel)
.filter(UserModel.uid == admin_session.admin_impersonate_uid)
.first()
)
else:
raise ApiError(
"unauthorized",
"You do not have permissions to do this.",
status_code=403,
)
@staticmethod
def get_principal_by_user_id(user_id: int) -> PrincipalModel:
"""Get_principal_by_user_id."""

View File

@ -11,13 +11,13 @@
<bpmn:scriptTask id="get_group_a">
<bpmn:incoming>Flow_1j4jzft</bpmn:incoming>
<bpmn:outgoing>Flow_10xyk22</bpmn:outgoing>
<bpmn:script>members_a = get_group_members("GroupA")</bpmn:script>
<bpmn:script>members_a = get_group_members("groupA")</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_10xyk22" sourceRef="get_group_a" targetRef="get_group_b" />
<bpmn:scriptTask id="get_group_b">
<bpmn:incoming>Flow_10xyk22</bpmn:incoming>
<bpmn:outgoing>Flow_01xr2ac</bpmn:outgoing>
<bpmn:script>members_b = get_group_members("GroupB")</bpmn:script>
<bpmn:script>members_b = get_group_members("groupB")</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_01xr2ac" sourceRef="get_group_b" targetRef="Event_1s123jg" />
</bpmn:process>

View File

@ -275,25 +275,6 @@ class BaseTest:
user: UserModel, _redirect_url: str = "http://some/frontend/url"
) -> Dict[str, str]:
"""Logged_in_headers."""
# if user is None:
# uid = 'test_user'
# user_info = {'uid': 'test_user'}
# else:
# uid = user.uid
# user_info = {'uid': user.uid}
# query_string = user_info_to_query_string(user_info, redirect_url)
# rv = self.app.get("/v1.0/login%s" % query_string, follow_redirects=False)
# self.assertTrue(rv.status_code == 302)
# self.assertTrue(str.startswith(rv.location, redirect_url))
#
# user_model = session.query(UserModel).filter_by(uid=uid).first()
# self.assertIsNotNone(user_model.ldap_info.display_name)
# self.assertEqual(user_model.uid, uid)
# self.assertTrue('user' in g, 'User should be in Flask globals')
# user = UserService.current_user(allow_admin_impersonate=True)
# self.assertEqual(uid, user.uid, 'Logged in user should match given user uid')
return dict(Authorization="Bearer " + user.encode_auth_token())
def get_test_data_file_contents(

View File

@ -25,7 +25,6 @@ from spiffworkflow_backend.models.process_instance_report import (
)
from spiffworkflow_backend.models.process_model import NotificationType
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.task_event import TaskEventModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.file_system_service import FileSystemService
@ -1088,16 +1087,7 @@ class TestProcessApi(BaseTest):
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
task_events = (
db.session.query(TaskEventModel)
.filter(TaskEventModel.process_instance_id == process_instance_id)
.all()
)
assert len(task_events) == 1
task_event = task_events[0]
assert task_event.user_id == with_super_admin_user.id
delete_response = client.delete(
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}",
@ -1105,40 +1095,6 @@ class TestProcessApi(BaseTest):
)
assert delete_response.status_code == 200
def test_process_instance_run_user_task_creates_task_event(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_run_user_task."""
process_group_id = "my_process_group"
process_model_id = "user_task"
headers = self.logged_in_headers(with_super_admin_user)
response = self.create_process_instance(
client, process_group_id, process_model_id, headers
)
assert response.json is not None
process_instance_id = response.json["id"]
response = client.post(
f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
task_events = (
db.session.query(TaskEventModel)
.filter(TaskEventModel.process_instance_id == process_instance_id)
.all()
)
assert len(task_events) == 1
task_event = task_events[0]
assert task_event.user_id == with_super_admin_user.id
# TODO: When user tasks work, we need to add some more assertions for action, task_state, etc.
def test_task_show(
self,
app: Flask,