mirror of
https://github.com/sartography/spiff-arena.git
synced 2025-02-23 06:38:24 +00:00
SpiffWorkflow cold start improvements (#13)
This commit is contained in:
parent
cf8312cf36
commit
fdec3e1e12
@ -3,23 +3,10 @@ import ast
|
||||
import copy
|
||||
import sys
|
||||
import traceback
|
||||
import datetime
|
||||
|
||||
import dateparser
|
||||
import pytz
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||
from ..operators import Operator
|
||||
|
||||
# Would love to get rid of this altogether, as it rightly belongs in the
|
||||
# backend, but leaving it here because that's the path of least resistance.
|
||||
DEFAULT_GLOBALS = {
|
||||
'timedelta': datetime.timedelta,
|
||||
'datetime': datetime,
|
||||
'dateparser': dateparser,
|
||||
'pytz': pytz,
|
||||
}
|
||||
|
||||
|
||||
# Copyright (C) 2020 Kelly McDonald
|
||||
#
|
||||
@ -112,7 +99,7 @@ class PythonScriptEngine(object):
|
||||
|
||||
def __init__(self, default_globals=None, scripting_additions=None):
|
||||
|
||||
self.globals = default_globals or DEFAULT_GLOBALS
|
||||
self.globals = default_globals or {}
|
||||
self.globals.update(scripting_additions or {})
|
||||
self.error_tasks = {}
|
||||
|
||||
|
@ -25,13 +25,6 @@ from .base import TaskSpec
|
||||
from ..operators import valueof, Attrib, PathAttrib
|
||||
from ..util.deep_merge import DeepMerge
|
||||
|
||||
try:
|
||||
from celery.app import default_app
|
||||
except ImportError:
|
||||
have_celery = False
|
||||
else:
|
||||
have_celery = True
|
||||
|
||||
logger = logging.getLogger('spiff')
|
||||
|
||||
|
||||
@ -111,7 +104,10 @@ class Celery(TaskSpec):
|
||||
:type kwargs: dict
|
||||
:param kwargs: kwargs to pass to celery task.
|
||||
"""
|
||||
if not have_celery:
|
||||
|
||||
try:
|
||||
from celery.app import default_app
|
||||
except ImportError:
|
||||
raise Exception("Unable to import python-celery imports.")
|
||||
assert wf_spec is not None
|
||||
assert name is not None
|
||||
|
@ -74,7 +74,7 @@ Serialization
|
||||
|
||||
.. warning::
|
||||
|
||||
Serialization Changed in Version 1.1.7.
|
||||
Serialization Changed in Version 1.1.7.
|
||||
Support for pre-1.1.7 serialization will be dropped in a future release.
|
||||
The old serialization method still works but it is deprecated.
|
||||
To migrate your system to the new version, see "Migrating between
|
||||
@ -85,8 +85,8 @@ setting. This may not always be the case, we may be executing the workflow in th
|
||||
may have a user request a web page where we open a specific workflow that we may be in the middle of, do one step of
|
||||
that workflow and then the user may be back in a few minutes, or maybe a few hours depending on the application.
|
||||
|
||||
The :code:`BpmnWorkflowSerializer` class contains a serializer for a workflow containing only standard BPMN Tasks.
|
||||
Since we are using custom task classes (the Camunda :code:`UserTask` and the DMN :code:`BusinessRuleTask`),
|
||||
The :code:`BpmnWorkflowSerializer` class contains a serializer for a workflow containing only standard BPMN Tasks.
|
||||
Since we are using custom task classes (the Camunda :code:`UserTask` and the DMN :code:`BusinessRuleTask`),
|
||||
we'll need to supply serializers for those task specs as well.
|
||||
|
||||
Strictly speaking, these are not serializers per se: they actually convert the tasks into dictionaries of
|
||||
@ -138,7 +138,7 @@ two components:
|
||||
- a data converter (which handles workflow and task data).
|
||||
|
||||
The default workflow spec converter likely to meet your needs, either on its own, or with the inclusion of
|
||||
:code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` or :code:`spiff` and :code:`dmn` subpackages
|
||||
:code:`UserTask` and :code:`BusinessRuleTask` in the :code:`camnuda` or :code:`spiff` and :code:`dmn` subpackages
|
||||
of this library, and all you'll need to do is add them to the list of task converters, as we did above.
|
||||
|
||||
However, he default data converter is very simple, adding only JSON-serializable conversions of :code:`datetime`
|
||||
@ -180,7 +180,7 @@ If you have written any custom task specs, you'll need to implement task spec co
|
||||
|
||||
Task Spec converters are also based on the :code:`DictionaryConverter`. You should be able to use the
|
||||
`BpmnTaskSpecConverter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/bpmn_converters.py>`_
|
||||
as a basis for your custom specs. It provides some methods for extracting attributes from Spiff base classes as well as
|
||||
as a basis for your custom specs. It provides some methods for extracting attributes from Spiff base classes as well as
|
||||
standard BPNN attributes from tasks that inherit from :code:`BMPNSpecMixin`.
|
||||
|
||||
The `Camunda User Task Converter <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/camunda/serializer/task_spec_converters.py>`_
|
||||
@ -221,7 +221,7 @@ serialize the workflow in the new format:
|
||||
new_json = serializer.serialize_json(workflow)
|
||||
|
||||
However, if you use custom tasks or data serialization, you'll also need to specify workflow spec or data
|
||||
serializers, as in the examples in the previous section, before you'll be able to serialize with the new serializer.
|
||||
serializers, as in the examples in the previous section, before you'll be able to serialize with the new serializer.
|
||||
The code would then look more like this:
|
||||
|
||||
.. code:: python
|
||||
@ -244,7 +244,7 @@ The code would then look more like this:
|
||||
new_json = serializer.serialize_json(workflow)
|
||||
|
||||
Because the serializer is highly customizable, we've made it possible for you to manage your own versions of the
|
||||
serialization. You can do this by passing a version number into the serializer, which will be embedded in the
|
||||
serialization. You can do this by passing a version number into the serializer, which will be embedded in the
|
||||
json of all workflows. This allow you to modify the serialization and customize it over time, and still manage
|
||||
the different forms as you make adjustments without leaving people behind.
|
||||
|
||||
@ -253,11 +253,11 @@ Versioned Serializer
|
||||
|
||||
As we make changes to Spiff, we may change the serialization format. For example, in 1.1.8, we changed
|
||||
how subprocesses were handled interally in BPMN workflows and updated how they are serialized. If you have
|
||||
not overridden our version number with one of your own, the serializer will transform the 1.0 format to the
|
||||
not overridden our version number with one of your own, the serializer will transform the 1.0 format to the
|
||||
new 1.1 format.
|
||||
|
||||
If you've overridden the serializer version, you may need to incorporate our serialization changes with
|
||||
your own. You can find our conversions in
|
||||
your own. You can find our conversions in
|
||||
`version_migrations.py <https://github.com/sartography/SpiffWorkflow/blob/main/SpiffWorkflow/bpmn/serializer/version_migration.py>`_
|
||||
|
||||
Custom Script Engines
|
||||
@ -277,14 +277,9 @@ We'll cover a simple extension of custom script engine here. There is also an e
|
||||
a similar engine based on `RestrictedPython <https://restrictedpython.readthedocs.io/en/latest/>`_
|
||||
included alongside this example.
|
||||
|
||||
The default script engine imports the following objects:
|
||||
The default script engine does not import any objects.
|
||||
|
||||
- :code:`timedelta`
|
||||
- :code:`datetime`
|
||||
- :code:`dateparser`
|
||||
- :code:`pytz`
|
||||
|
||||
You could add other functions or classes from the standard python modules or any code you've
|
||||
You could add functions or classes from the standard python modules or any code you've
|
||||
implemented yourself. Your global environment can be passed in using the `default_globals`
|
||||
argument when initializing the script engine. In our RestrictedPython example, we use their
|
||||
`safe_globals` which prevents users from executing some potentially unsafe operations.
|
||||
|
@ -3,6 +3,4 @@
|
||||
celery==5.2.3
|
||||
coverage
|
||||
lxml
|
||||
dateparser
|
||||
pytz
|
||||
.
|
||||
|
@ -22,7 +22,7 @@ setup(name='SpiffWorkflow',
|
||||
license='lGPLv2',
|
||||
packages=find_packages(exclude=['tests', 'tests.*']),
|
||||
package_data={'SpiffWorkflow.bpmn.parser.schema': ['*.xsd']},
|
||||
install_requires=['configparser', 'lxml', 'celery', 'dateparser', 'pytz',
|
||||
install_requires=['configparser', 'lxml', 'celery',
|
||||
# required for python 3.7 - https://stackoverflow.com/a/73932581
|
||||
'importlib-metadata<5.0; python_version <= "3.7"'],
|
||||
keywords='spiff workflow bpmn engine',
|
||||
|
@ -5,6 +5,7 @@ import datetime
|
||||
import time
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
@ -15,11 +16,12 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
|
||||
Non-Interrupting Timer boundary test
|
||||
"""
|
||||
def setUp(self):
|
||||
self.script_engine = PythonScriptEngine(default_globals={"timedelta": datetime.timedelta})
|
||||
spec, subprocesses = self.load_workflow_spec('timer-non-interrupt-boundary.bpmn', 'NonInterruptTimer')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=self.script_engine)
|
||||
|
||||
def load_spec(self):
|
||||
return
|
||||
return
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(save_restore=False)
|
||||
@ -28,7 +30,7 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
|
||||
self.actual_test(save_restore=True)
|
||||
|
||||
def actual_test(self,save_restore = False):
|
||||
|
||||
|
||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||
self.assertEqual(1, len(ready_tasks))
|
||||
self.workflow.complete_task_from_id(ready_tasks[0].id)
|
||||
@ -47,7 +49,9 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
|
||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||
if len(ready_tasks) > 1:
|
||||
break
|
||||
if save_restore: self.save_restore()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = self.script_engine
|
||||
#self.assertEqual(1, len(self.workflow.get_tasks(Task.WAITING)))
|
||||
time.sleep(0.1)
|
||||
self.workflow.complete_task_from_id(ready_tasks[0].id)
|
||||
|
@ -24,32 +24,11 @@ class PythonScriptEngineTest(BpmnWorkflowTestCase):
|
||||
workflow.do_engine_steps()
|
||||
self.task = workflow.last_task
|
||||
|
||||
def testDateTimeExpressions(self):
|
||||
"""Basically, assure that we can use datime, dateutils, and pytz"""
|
||||
script = """
|
||||
# Create Current Date as UTC
|
||||
now_utc = datetime.datetime.now(datetime.timezone.utc)
|
||||
# Create Current Date at EST
|
||||
now_est = now_utc.astimezone(pytz.timezone('US/Eastern'))
|
||||
|
||||
# Format a date from a date String in UTC
|
||||
datestr = "2021-09-23 16:11:00 -0000" # 12 pm EST, 4pm UTC
|
||||
dt = dateparser.parse(datestr)
|
||||
localtime = dt.astimezone(pytz.timezone('US/Eastern'))
|
||||
localtime_str = localtime.strftime("%Y-%m-%d %H:%M:%S")
|
||||
"""
|
||||
self.expressionEngine.execute(self.task, script)
|
||||
self.assertEqual(self.task.data['now_utc'].utcoffset().days, 0)
|
||||
self.assertEqual(self.task.data['now_est'].tzinfo.zone, "US/Eastern")
|
||||
self.assertEqual(self.task.data['localtime_str'], "2021-09-23 12:11:00")
|
||||
self.assertTrue(True)
|
||||
|
||||
def testFunctionsAndGlobalsAreRemoved(self):
|
||||
self.assertIn('testvar', self.task.data)
|
||||
self.assertIn('testvar2', self.task.data)
|
||||
self.assertIn('sample', self.task.data)
|
||||
self.assertNotIn('my_function', self.task.data)
|
||||
self.assertNotIn('datetime', self.task.data)
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(PythonScriptEngineTest)
|
||||
|
@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
@ -8,6 +9,15 @@ from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'sartography'
|
||||
|
||||
class CustomScriptEngine(PythonScriptEngine):
|
||||
"""This is a custom script processor that can be easily injected into Spiff Workflow.
|
||||
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||
scripts directory available for execution. """
|
||||
def __init__(self):
|
||||
augment_methods = {
|
||||
'timedelta': datetime.timedelta,
|
||||
}
|
||||
super().__init__(scripting_additions=augment_methods)
|
||||
|
||||
class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||
|
||||
@ -23,7 +33,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||
|
||||
def actual_test(self,save_restore = False):
|
||||
spec, subprocesses = self.load_workflow_spec('too_many_loops*.bpmn', 'loops')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=PythonScriptEngine())
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine())
|
||||
counter = 0
|
||||
data = {}
|
||||
while not self.workflow.is_completed():
|
||||
@ -34,6 +44,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||
counter += 1 # There is a 10 millisecond wait task.
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = CustomScriptEngine()
|
||||
self.assertEqual(20, self.workflow.last_task.data['counter'])
|
||||
|
||||
def test_with_sub_process(self):
|
||||
@ -41,7 +52,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||
# right after a sub-process. So assuring this is fixed.
|
||||
counter = 0
|
||||
spec, subprocesses = self.load_workflow_spec('too_many_loops_sub_process.bpmn', 'loops_sub')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=PythonScriptEngine())
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine())
|
||||
data = {}
|
||||
while not self.workflow.is_completed():
|
||||
self.workflow.do_engine_steps()
|
||||
@ -57,7 +68,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||
|
||||
def test_with_two_call_activities(self):
|
||||
spec, subprocess = self.load_workflow_spec('sub_in_loop*.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(spec, subprocess)
|
||||
self.workflow = BpmnWorkflow(spec, subprocess, script_engine=CustomScriptEngine())
|
||||
self.workflow.do_engine_steps()
|
||||
for loop in range(3):
|
||||
ready = self.workflow.get_ready_user_tasks()
|
||||
@ -66,6 +77,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = CustomScriptEngine()
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(TooManyLoopsTest)
|
||||
|
@ -7,8 +7,8 @@
|
||||
<bpmn:scriptTask id="Activity_1q1wged" name="Set Future Date">
|
||||
<bpmn:incoming>Flow_1i73q45</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_00e79cz</bpmn:outgoing>
|
||||
<bpmn:script>futuredate = dateparser.parse('in 1 second') - timedelta(seconds=.95)
|
||||
futuredate2 = dateparser.parse('September 1 2021 at 10am EDT')</bpmn:script>
|
||||
<bpmn:script>futuredate = datetime.now() + timedelta(0, 1) - timedelta(seconds=.95)
|
||||
futuredate2 = datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M')</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1i73q45" sourceRef="Event_0u1rmur" targetRef="Activity_1q1wged" />
|
||||
<bpmn:sequenceFlow id="Flow_00e79cz" sourceRef="Activity_1q1wged" targetRef="Event_0eb0w95" />
|
||||
|
@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
import time
|
||||
|
||||
@ -21,7 +22,10 @@ class CustomScriptEngine(PythonScriptEngine):
|
||||
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||
scripts directory available for execution. """
|
||||
def __init__(self):
|
||||
augment_methods = {'custom_function': my_custom_function}
|
||||
augment_methods = {
|
||||
'custom_function': my_custom_function,
|
||||
'timedelta': datetime.timedelta,
|
||||
}
|
||||
super().__init__(scripting_additions=augment_methods)
|
||||
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
import time
|
||||
|
||||
@ -21,7 +22,10 @@ class CustomScriptEngine(PythonScriptEngine):
|
||||
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||
scripts directory available for execution. """
|
||||
def __init__(self):
|
||||
augment_methods = {'custom_function': my_custom_function}
|
||||
augment_methods = {
|
||||
'custom_function': my_custom_function,
|
||||
'timedelta': datetime.timedelta,
|
||||
}
|
||||
super().__init__(scripting_additions=augment_methods)
|
||||
|
||||
|
||||
|
@ -3,10 +3,10 @@
|
||||
import unittest
|
||||
import datetime
|
||||
import time
|
||||
import pytz
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
@ -15,8 +15,12 @@ __author__ = 'kellym'
|
||||
class TimerDateTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.script_engine = PythonScriptEngine(default_globals={
|
||||
"datetime": datetime.datetime,
|
||||
"timedelta": datetime.timedelta,
|
||||
})
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer')
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(save_restore=False)
|
||||
@ -42,6 +46,7 @@ class TimerDateTest(BpmnWorkflowTestCase):
|
||||
break
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = self.script_engine
|
||||
|
||||
|
||||
waiting_tasks = self.workflow.get_tasks(TaskState.WAITING)
|
||||
@ -50,8 +55,7 @@ class TimerDateTest(BpmnWorkflowTestCase):
|
||||
loopcount = loopcount +1
|
||||
endtime = datetime.datetime.now()
|
||||
self.workflow.do_engine_steps()
|
||||
tz = pytz.timezone('US/Eastern')
|
||||
testdate = tz.localize(datetime.datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M'))
|
||||
testdate = datetime.datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M')
|
||||
self.assertEqual(self.workflow.last_task.data['futuredate2'],testdate)
|
||||
self.assertTrue('completed' in self.workflow.last_task.data)
|
||||
self.assertTrue(self.workflow.last_task.data['completed'])
|
||||
|
@ -3,10 +3,12 @@
|
||||
import unittest
|
||||
import datetime
|
||||
import time
|
||||
from datetime import timedelta
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events import EndEvent
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
__author__ = 'kellym'
|
||||
|
||||
@ -14,8 +16,9 @@ __author__ = 'kellym'
|
||||
class TimerDurationTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer')
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(save_restore=False)
|
||||
@ -43,9 +46,11 @@ class TimerDurationTest(BpmnWorkflowTestCase):
|
||||
|
||||
starttime = datetime.datetime.now()
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
self.workflow.script_engine = self.script_engine
|
||||
self.workflow.do_engine_steps()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = self.script_engine
|
||||
time.sleep(0.1)
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
|
@ -3,8 +3,10 @@
|
||||
import unittest
|
||||
import datetime
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
@ -13,8 +15,9 @@ __author__ = 'kellym'
|
||||
class TimerDurationTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer')
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(save_restore=False)
|
||||
@ -40,7 +43,9 @@ class TimerDurationTest(BpmnWorkflowTestCase):
|
||||
while loopcount < 10:
|
||||
if len(self.workflow.get_tasks(TaskState.READY)) >= 1:
|
||||
break
|
||||
if save_restore: self.save_restore()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = self.script_engine
|
||||
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
|
||||
time.sleep(0.1)
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
|
@ -24,7 +24,7 @@ class ExternalMessageBoundaryTest(BaseTestCase):
|
||||
|
||||
|
||||
def actual_test(self,save_restore = False):
|
||||
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||
self.assertEqual(1, len(ready_tasks),'Expected to have only one ready task')
|
||||
|
@ -3,8 +3,10 @@
|
||||
|
||||
import unittest
|
||||
import time
|
||||
from datetime import timedelta
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from .BaseTestCase import BaseTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
@ -13,8 +15,9 @@ __author__ = 'kellym'
|
||||
class MessageBoundaryTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs')
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(save_restore=False)
|
||||
@ -41,7 +44,9 @@ class MessageBoundaryTest(BaseTestCase):
|
||||
self.workflow.do_engine_steps()
|
||||
time.sleep(.01)
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
if save_restore: self.save_restore()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = self.script_engine
|
||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||
time.sleep(.01)
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
|
@ -1,3 +1,4 @@
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
@ -7,4 +8,5 @@ from ..DecisionRunner import DecisionRunner
|
||||
class PythonDecisionRunner(DecisionRunner):
|
||||
|
||||
def __init__(self, filename):
|
||||
super().__init__(PythonScriptEngine(scripting_additions={'Decimal': Decimal}), filename, 'python_engine')
|
||||
scripting_additions={'Decimal': Decimal, 'datetime': datetime}
|
||||
super().__init__(PythonScriptEngine(scripting_additions=scripting_additions), filename, 'python_engine')
|
||||
|
48
spiffworkflow-backend/poetry.lock
generated
48
spiffworkflow-backend/poetry.lock
generated
@ -410,7 +410,7 @@ python-versions = ">=3.6,<4.0"
|
||||
|
||||
[[package]]
|
||||
name = "dateparser"
|
||||
version = "1.1.1"
|
||||
version = "1.1.2"
|
||||
description = "Date parsing library designed to parse dates from HTML pages"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -639,7 +639,7 @@ werkzeug = "*"
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "cedc5253add81a18a274f2cd3289fe36bb138f8b"
|
||||
resolved_reference = "191f0f32798720c9ce1e5307732c90ac26433298"
|
||||
|
||||
[[package]]
|
||||
name = "Flask-Cors"
|
||||
@ -820,7 +820,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "5.0.0"
|
||||
version = "4.13.0"
|
||||
description = "Read metadata from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -1441,7 +1441,7 @@ docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2022.5"
|
||||
version = "2022.6"
|
||||
description = "World timezone definitions, modern and historical"
|
||||
category = "main"
|
||||
optional = false
|
||||
@ -1873,7 +1873,7 @@ pytz = "*"
|
||||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
|
||||
resolved_reference = "5cdb881edc4621502bfd61ce67565cf1148199f0"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
@ -2000,6 +2000,14 @@ category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-dateparser"
|
||||
version = "1.1.4.1"
|
||||
description = "Typing stubs for dateparser"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-Flask"
|
||||
version = "1.1.6"
|
||||
@ -2248,7 +2256,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<3.11"
|
||||
content-hash = "bfb51ebc4ef76d4a74f670f44dc4d7ca7e91874b096f56521c2776f1837f6a63"
|
||||
content-hash = "995be3a9a60b515b281f017ff32ff27a52ca178b1980611b348dccac6afb6b89"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
@ -2454,8 +2462,8 @@ darglint = [
|
||||
{file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
|
||||
]
|
||||
dateparser = [
|
||||
{file = "dateparser-1.1.1-py2.py3-none-any.whl", hash = "sha256:9600874312ff28a41f96ec7ccdc73be1d1c44435719da47fea3339d55ff5a628"},
|
||||
{file = "dateparser-1.1.1.tar.gz", hash = "sha256:038196b1f12c7397e38aad3d61588833257f6f552baa63a1499e6987fa8d42d9"},
|
||||
{file = "dateparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:d31659dc806a7d88e2b510b2c74f68b525ae531f145c62a57a99bd616b7f90cf"},
|
||||
{file = "dateparser-1.1.2.tar.gz", hash = "sha256:3821bf191f95b2658c4abd91571c09821ce7a2bc179bf6cefd8b4515c3ccf9ef"},
|
||||
]
|
||||
distlib = [
|
||||
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
|
||||
@ -2613,7 +2621,6 @@ greenlet = [
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"},
|
||||
{file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"},
|
||||
{file = "greenlet-1.1.3.post0.tar.gz", hash = "sha256:f5e09dc5c6e1796969fd4b775ea1417d70e49a5df29aaa8e5d10675d9e11872c"},
|
||||
]
|
||||
gunicorn = [
|
||||
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
|
||||
@ -2632,8 +2639,8 @@ imagesize = [
|
||||
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
|
||||
]
|
||||
importlib-metadata = [
|
||||
{file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"},
|
||||
{file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"},
|
||||
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
|
||||
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
|
||||
]
|
||||
inflection = [
|
||||
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
|
||||
@ -3051,7 +3058,18 @@ py = [
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
@ -3130,8 +3148,8 @@ python-keycloak = [
|
||||
{file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"},
|
||||
]
|
||||
pytz = [
|
||||
{file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"},
|
||||
{file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"},
|
||||
{file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"},
|
||||
{file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"},
|
||||
]
|
||||
pytz-deprecation-shim = [
|
||||
{file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"},
|
||||
@ -3539,6 +3557,10 @@ types-click = [
|
||||
{file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"},
|
||||
{file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"},
|
||||
]
|
||||
types-dateparser = [
|
||||
{file = "types-dateparser-1.1.4.1.tar.gz", hash = "sha256:0f76578bbae15c8b8701b5efd94db98a97ce0a27aedfe6f14a531170de6db97d"},
|
||||
{file = "types_dateparser-1.1.4.1-py3-none-any.whl", hash = "sha256:dd7b2343bb06225c0e358533609b66a8edfb95e5426d8f658664e7d0f27dea68"},
|
||||
]
|
||||
types-Flask = [
|
||||
{file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"},
|
||||
{file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"},
|
||||
|
@ -28,6 +28,7 @@ flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||
#SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
|
||||
sentry-sdk = "^1.10"
|
||||
sphinx-autoapi = "^2.0"
|
||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||
@ -68,6 +69,9 @@ types-pytz = "^2022.1.1"
|
||||
# for now use my fork
|
||||
sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" }
|
||||
simplejson = "^3.17.6"
|
||||
pytz = "^2022.6"
|
||||
dateparser = "^1.1.2"
|
||||
types-dateparser = "^1.1.4.1"
|
||||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
|
@ -1071,7 +1071,9 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
||||
task.form_ui_schema = ui_form_contents
|
||||
|
||||
if task.properties and task.data and "instructionsForEndUser" in task.properties:
|
||||
print(f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}")
|
||||
print(
|
||||
f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}"
|
||||
)
|
||||
if task.properties["instructionsForEndUser"]:
|
||||
task.properties["instructionsForEndUser"] = render_jinja_template(
|
||||
task.properties["instructionsForEndUser"], task.data
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Get_env."""
|
||||
from typing import Any
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from spiffworkflow_backend.models.script_attributes_context import (
|
||||
@ -22,4 +23,4 @@ class GetFrontendUrl(Script):
|
||||
**kwargs: Any
|
||||
) -> Any:
|
||||
"""Run."""
|
||||
return current_app.config['SPIFFWORKFLOW_FRONTEND_URL']
|
||||
return current_app.config["SPIFFWORKFLOW_FRONTEND_URL"]
|
||||
|
@ -7,6 +7,7 @@ import os
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
@ -17,6 +18,8 @@ from typing import Tuple
|
||||
from typing import TypedDict
|
||||
from typing import Union
|
||||
|
||||
import dateparser
|
||||
import pytz
|
||||
from flask import current_app
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
@ -25,7 +28,6 @@ from RestrictedPython import safe_globals # type: ignore
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import DEFAULT_GLOBALS
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer # type: ignore
|
||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
||||
@ -98,19 +100,6 @@ def _import(name: str, glbls: Dict[str, Any], *args: Any) -> None:
|
||||
raise ImportError(f"Import not allowed: {name}", name=name)
|
||||
|
||||
|
||||
DEFAULT_GLOBALS.update(
|
||||
{
|
||||
"datetime": datetime,
|
||||
"time": time,
|
||||
"decimal": decimal,
|
||||
"_strptime": _strptime,
|
||||
}
|
||||
)
|
||||
# This will overwrite the standard builtins
|
||||
DEFAULT_GLOBALS.update(safe_globals)
|
||||
DEFAULT_GLOBALS["__builtins__"]["__import__"] = _import
|
||||
|
||||
|
||||
class PotentialOwnerIdList(TypedDict):
|
||||
"""PotentialOwnerIdList."""
|
||||
|
||||
@ -143,7 +132,21 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""__init__."""
|
||||
super().__init__(default_globals=DEFAULT_GLOBALS)
|
||||
default_globals = {
|
||||
"timedelta": timedelta,
|
||||
"datetime": datetime,
|
||||
"dateparser": dateparser,
|
||||
"pytz": pytz,
|
||||
"time": time,
|
||||
"decimal": decimal,
|
||||
"_strptime": _strptime,
|
||||
}
|
||||
|
||||
# This will overwrite the standard builtins
|
||||
default_globals.update(safe_globals)
|
||||
default_globals["__builtins__"]["__import__"] = _import
|
||||
|
||||
super().__init__(default_globals=default_globals)
|
||||
|
||||
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
|
||||
"""__get_augment_methods."""
|
||||
|
Loading…
x
Reference in New Issue
Block a user