mirror of
https://github.com/status-im/spiff-arena.git
synced 2025-01-16 21:24:19 +00:00
Merge branch 'main' into feature/nested-groups
# Conflicts: # spiffworkflow-backend/src/spiffworkflow_backend/models/process_model.py # spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py # spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py
This commit is contained in:
commit
23517b8953
@ -3,23 +3,10 @@ import ast
|
|||||||
import copy
|
import copy
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import datetime
|
|
||||||
|
|
||||||
import dateparser
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||||
from ..operators import Operator
|
from ..operators import Operator
|
||||||
|
|
||||||
# Would love to get rid of this altogether, as it rightly belongs in the
|
|
||||||
# backend, but leaving it here because that's the path of least resistance.
|
|
||||||
DEFAULT_GLOBALS = {
|
|
||||||
'timedelta': datetime.timedelta,
|
|
||||||
'datetime': datetime,
|
|
||||||
'dateparser': dateparser,
|
|
||||||
'pytz': pytz,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Copyright (C) 2020 Kelly McDonald
|
# Copyright (C) 2020 Kelly McDonald
|
||||||
#
|
#
|
||||||
@ -112,7 +99,7 @@ class PythonScriptEngine(object):
|
|||||||
|
|
||||||
def __init__(self, default_globals=None, scripting_additions=None):
|
def __init__(self, default_globals=None, scripting_additions=None):
|
||||||
|
|
||||||
self.globals = default_globals or DEFAULT_GLOBALS
|
self.globals = default_globals or {}
|
||||||
self.globals.update(scripting_additions or {})
|
self.globals.update(scripting_additions or {})
|
||||||
self.error_tasks = {}
|
self.error_tasks = {}
|
||||||
|
|
||||||
|
@ -27,8 +27,10 @@ from SpiffWorkflow.bpmn.specs.events.event_definitions import NoneEventDefinitio
|
|||||||
|
|
||||||
from .ValidationException import ValidationException
|
from .ValidationException import ValidationException
|
||||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||||
from ..specs.events import StartEvent, EndEvent, BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
from ..specs.events.EndEvent import EndEvent
|
||||||
from ..specs.events import SendTask, ReceiveTask
|
from ..specs.events.StartEvent import StartEvent
|
||||||
|
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
||||||
|
from ..specs.events.IntermediateEvent import SendTask, ReceiveTask
|
||||||
from ..specs.SubWorkflowTask import CallActivity, SubWorkflowTask, TransactionSubprocess
|
from ..specs.SubWorkflowTask import CallActivity, SubWorkflowTask, TransactionSubprocess
|
||||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||||
from ..specs.InclusiveGateway import InclusiveGateway
|
from ..specs.InclusiveGateway import InclusiveGateway
|
||||||
|
@ -23,7 +23,8 @@ from .ValidationException import ValidationException
|
|||||||
from ..specs.NoneTask import NoneTask
|
from ..specs.NoneTask import NoneTask
|
||||||
from ..specs.ScriptTask import ScriptTask
|
from ..specs.ScriptTask import ScriptTask
|
||||||
from ..specs.UserTask import UserTask
|
from ..specs.UserTask import UserTask
|
||||||
from ..specs.events import _BoundaryEventParent, CancelEventDefinition
|
from ..specs.events.IntermediateEvent import _BoundaryEventParent
|
||||||
|
from ..specs.events.event_definitions import CancelEventDefinition
|
||||||
from ..specs.MultiInstanceTask import getDynamicMIClass
|
from ..specs.MultiInstanceTask import getDynamicMIClass
|
||||||
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess, SubWorkflowTask
|
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess, SubWorkflowTask
|
||||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||||
|
@ -5,7 +5,7 @@ from SpiffWorkflow.bpmn.specs.events.event_definitions import CorrelationPropert
|
|||||||
from .ValidationException import ValidationException
|
from .ValidationException import ValidationException
|
||||||
from .TaskParser import TaskParser
|
from .TaskParser import TaskParser
|
||||||
from .util import first, one
|
from .util import first, one
|
||||||
from ..specs.events import (TimerEventDefinition, MessageEventDefinition,
|
from ..specs.events.event_definitions import (TimerEventDefinition, MessageEventDefinition,
|
||||||
ErrorEventDefinition, EscalationEventDefinition,
|
ErrorEventDefinition, EscalationEventDefinition,
|
||||||
SignalEventDefinition,
|
SignalEventDefinition,
|
||||||
CancelEventDefinition, CycleTimerEventDefinition,
|
CancelEventDefinition, CycleTimerEventDefinition,
|
||||||
|
@ -35,7 +35,8 @@ class NodeParser:
|
|||||||
return expression.text if expression is not None else None
|
return expression.text if expression is not None else None
|
||||||
|
|
||||||
def parse_documentation(self, sequence_flow=None):
|
def parse_documentation(self, sequence_flow=None):
|
||||||
documentation_node = first(self._xpath(sequence_flow or self.node, './/bpmn:documentation'))
|
node = sequence_flow if sequence_flow is not None else self.node
|
||||||
|
documentation_node = first(self._xpath(node, './/bpmn:documentation'))
|
||||||
return None if documentation_node is None else documentation_node.text
|
return None if documentation_node is None else documentation_node.text
|
||||||
|
|
||||||
def parse_incoming_data_references(self):
|
def parse_incoming_data_references(self):
|
||||||
|
@ -23,7 +23,7 @@ from builtins import object
|
|||||||
from collections import deque
|
from collections import deque
|
||||||
import json
|
import json
|
||||||
from ...task import TaskState
|
from ...task import TaskState
|
||||||
from ...specs import SubWorkflow
|
from ...specs.Subworkflow import SubWorkflow
|
||||||
from ...serializer.base import Serializer
|
from ...serializer.base import Serializer
|
||||||
from ..workflow import BpmnWorkflow
|
from ..workflow import BpmnWorkflow
|
||||||
|
|
||||||
|
@ -15,6 +15,3 @@
|
|||||||
# License along with this library; if not, write to the Free Software
|
# License along with this library; if not, write to the Free Software
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
|
|
||||||
from .workflow import BpmnWorkflowSerializer
|
|
||||||
from .bpmn_converters import BpmnDataConverter
|
|
@ -7,9 +7,9 @@ from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification
|
|||||||
|
|
||||||
from .dictionary import DictionaryConverter
|
from .dictionary import DictionaryConverter
|
||||||
|
|
||||||
from ..specs.events import SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
|
from ..specs.events.event_definitions import SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
|
||||||
from ..specs.events import TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition
|
from ..specs.events.event_definitions import TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition
|
||||||
from ..specs.events import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition
|
from ..specs.events.event_definitions import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition
|
||||||
from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition
|
from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition
|
||||||
|
|
||||||
from ..specs.BpmnSpecMixin import BpmnSpecMixin, SequenceFlow
|
from ..specs.BpmnSpecMixin import BpmnSpecMixin, SequenceFlow
|
||||||
|
@ -2,7 +2,7 @@ from uuid import UUID
|
|||||||
|
|
||||||
from .bpmn_converters import BpmnTaskSpecConverter
|
from .bpmn_converters import BpmnTaskSpecConverter
|
||||||
|
|
||||||
from ...specs import StartTask
|
from ...specs.StartTask import StartTask
|
||||||
from ...specs.Simple import Simple
|
from ...specs.Simple import Simple
|
||||||
from ...specs.LoopResetTask import LoopResetTask
|
from ...specs.LoopResetTask import LoopResetTask
|
||||||
|
|
||||||
@ -19,7 +19,9 @@ from ..specs.ExclusiveGateway import ExclusiveGateway
|
|||||||
from ..specs.InclusiveGateway import InclusiveGateway
|
from ..specs.InclusiveGateway import InclusiveGateway
|
||||||
from ..specs.ParallelGateway import ParallelGateway
|
from ..specs.ParallelGateway import ParallelGateway
|
||||||
|
|
||||||
from ..specs.events import StartEvent, EndEvent, BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
from ..specs.events.StartEvent import StartEvent
|
||||||
|
from ..specs.events.EndEvent import EndEvent
|
||||||
|
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
||||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask
|
from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask
|
||||||
|
|
||||||
from ..workflow import BpmnWorkflow
|
from ..workflow import BpmnWorkflow
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
from ...task import TaskState
|
from ...task import TaskState
|
||||||
from ...operators import Operator
|
from ...operators import Operator
|
||||||
from ...specs import TaskSpec
|
from ...specs.base import TaskSpec
|
||||||
|
|
||||||
|
|
||||||
class _BpmnCondition(Operator):
|
class _BpmnCondition(Operator):
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
from ...exceptions import WorkflowException
|
from ...exceptions import WorkflowException
|
||||||
|
|
||||||
from .BpmnSpecMixin import BpmnSpecMixin
|
from .BpmnSpecMixin import BpmnSpecMixin
|
||||||
from ...specs import TaskSpec
|
from ...specs.base import TaskSpec
|
||||||
from ...specs.ExclusiveChoice import ExclusiveChoice
|
from ...specs.ExclusiveChoice import ExclusiveChoice
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||||
|
|
||||||
from ...specs import Simple
|
from ...specs.Simple import Simple
|
||||||
|
|
||||||
|
|
||||||
class ManualTask(Simple, BpmnSpecMixin):
|
class ManualTask(Simple, BpmnSpecMixin):
|
||||||
|
@ -29,7 +29,7 @@ from .ScriptTask import ScriptTask
|
|||||||
from .ExclusiveGateway import ExclusiveGateway
|
from .ExclusiveGateway import ExclusiveGateway
|
||||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||||
from ...operators import valueof, is_number
|
from ...operators import valueof, is_number
|
||||||
from ...specs import SubWorkflow
|
from ...specs.SubWorkflow import SubWorkflow
|
||||||
from ...specs.base import TaskSpec
|
from ...specs.base import TaskSpec
|
||||||
from ...util.impl import get_class
|
from ...util.impl import get_class
|
||||||
from ...task import Task, TaskState
|
from ...task import Task, TaskState
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
# License along with this library; if not, write to the Free Software
|
# License along with this library; if not, write to the Free Software
|
||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
from ...specs import Simple
|
from ...specs.Simple import Simple
|
||||||
|
|
||||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ from copy import deepcopy
|
|||||||
|
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from .BpmnSpecMixin import BpmnSpecMixin
|
from .BpmnSpecMixin import BpmnSpecMixin
|
||||||
from ...specs import TaskSpec
|
from ...specs.base import TaskSpec
|
||||||
|
|
||||||
|
|
||||||
class SubWorkflowTask(BpmnSpecMixin):
|
class SubWorkflowTask(BpmnSpecMixin):
|
||||||
|
@ -1,5 +0,0 @@
|
|||||||
from .StartEvent import StartEvent
|
|
||||||
from .EndEvent import EndEvent
|
|
||||||
from .IntermediateEvent import IntermediateCatchEvent, IntermediateThrowEvent, BoundaryEvent, _BoundaryEventParent, SendTask, ReceiveTask
|
|
||||||
from .event_definitions import (NoneEventDefinition, CancelEventDefinition, ErrorEventDefinition, EscalationEventDefinition, MessageEventDefinition,
|
|
||||||
SignalEventDefinition, TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition)
|
|
@ -161,7 +161,9 @@ class BpmnWorkflow(Workflow):
|
|||||||
event_definition.payload = payload
|
event_definition.payload = payload
|
||||||
self.catch(event_definition, correlations=correlations)
|
self.catch(event_definition, correlations=correlations)
|
||||||
|
|
||||||
def do_engine_steps(self, exit_at = None):
|
def do_engine_steps(self, exit_at = None,
|
||||||
|
will_complete_task=None,
|
||||||
|
did_complete_task=None):
|
||||||
"""
|
"""
|
||||||
Execute any READY tasks that are engine specific (for example, gateways
|
Execute any READY tasks that are engine specific (for example, gateways
|
||||||
or script tasks). This is done in a loop, so it will keep completing
|
or script tasks). This is done in a loop, so it will keep completing
|
||||||
@ -169,6 +171,8 @@ class BpmnWorkflow(Workflow):
|
|||||||
left.
|
left.
|
||||||
|
|
||||||
:param exit_at: After executing a task with a name matching this param return the task object
|
:param exit_at: After executing a task with a name matching this param return the task object
|
||||||
|
:param will_complete_task: Callback that will be called prior to completing a task
|
||||||
|
:param did_complete_task: Callback that will be called after completing a task
|
||||||
"""
|
"""
|
||||||
assert not self.read_only
|
assert not self.read_only
|
||||||
engine_steps = list(
|
engine_steps = list(
|
||||||
@ -176,21 +180,34 @@ class BpmnWorkflow(Workflow):
|
|||||||
if self._is_engine_task(t.task_spec)])
|
if self._is_engine_task(t.task_spec)])
|
||||||
while engine_steps:
|
while engine_steps:
|
||||||
for task in engine_steps:
|
for task in engine_steps:
|
||||||
|
if will_complete_task is not None:
|
||||||
|
will_complete_task(task)
|
||||||
task.complete()
|
task.complete()
|
||||||
|
if did_complete_task is not None:
|
||||||
|
did_complete_task(task)
|
||||||
if task.task_spec.name == exit_at:
|
if task.task_spec.name == exit_at:
|
||||||
return task
|
return task
|
||||||
engine_steps = list(
|
engine_steps = list(
|
||||||
[t for t in self.get_tasks(TaskState.READY)
|
[t for t in self.get_tasks(TaskState.READY)
|
||||||
if self._is_engine_task(t.task_spec)])
|
if self._is_engine_task(t.task_spec)])
|
||||||
|
|
||||||
def refresh_waiting_tasks(self):
|
def refresh_waiting_tasks(self,
|
||||||
|
will_refresh_task=None,
|
||||||
|
did_refresh_task=None):
|
||||||
"""
|
"""
|
||||||
Refresh the state of all WAITING tasks. This will, for example, update
|
Refresh the state of all WAITING tasks. This will, for example, update
|
||||||
Catching Timer Events whose waiting time has passed.
|
Catching Timer Events whose waiting time has passed.
|
||||||
|
|
||||||
|
:param will_refresh_task: Callback that will be called prior to refreshing a task
|
||||||
|
:param did_refresh_task: Callback that will be called after refreshing a task
|
||||||
"""
|
"""
|
||||||
assert not self.read_only
|
assert not self.read_only
|
||||||
for my_task in self.get_tasks(TaskState.WAITING):
|
for my_task in self.get_tasks(TaskState.WAITING):
|
||||||
|
if will_refresh_task is not None:
|
||||||
|
will_refresh_task(my_task)
|
||||||
my_task.task_spec._update(my_task)
|
my_task.task_spec._update(my_task)
|
||||||
|
if did_refresh_task is not None:
|
||||||
|
did_refresh_task(my_task)
|
||||||
|
|
||||||
def get_tasks_from_spec_name(self, name, workflow=None):
|
def get_tasks_from_spec_name(self, name, workflow=None):
|
||||||
return [t for t in self.get_tasks(workflow=workflow) if t.task_spec.name == name]
|
return [t for t in self.get_tasks(workflow=workflow) if t.task_spec.name == name]
|
||||||
|
@ -6,7 +6,9 @@ from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
|||||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||||
from SpiffWorkflow.camunda.parser.task_spec import BusinessRuleTaskParser
|
from SpiffWorkflow.camunda.parser.task_spec import BusinessRuleTaskParser
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, IntermediateThrowEvent, StartEvent, IntermediateCatchEvent, BoundaryEvent
|
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
|
||||||
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||||
|
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
|
||||||
from .event_parsers import CamundaStartEventParser, CamundaEndEventParser, \
|
from .event_parsers import CamundaStartEventParser, CamundaEndEventParser, \
|
||||||
CamundaIntermediateCatchEventParser, CamundaIntermediateThrowEventParser, CamundaBoundaryEventParser
|
CamundaIntermediateCatchEventParser, CamundaIntermediateThrowEventParser, CamundaBoundaryEventParser
|
||||||
|
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
from .task_spec_converters import UserTaskConverter, StartEventConverter, EndEventConverter, \
|
|
||||||
BoundaryEventConverter, IntermediateCatchEventConverter, IntermediateThrowEventConverter
|
|
@ -1,6 +1,8 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, IntermediateThrowEvent, StartEvent, IntermediateCatchEvent, BoundaryEvent
|
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
|
||||||
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||||
|
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
|
||||||
from ..specs.events.event_definitions import MessageEventDefinition
|
from ..specs.events.event_definitions import MessageEventDefinition
|
||||||
from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
||||||
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
from .task_spec_converters import BusinessRuleTaskConverter
|
|
@ -1,6 +1,6 @@
|
|||||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException
|
||||||
|
|
||||||
from ...specs import Simple
|
from ...specs.Simple import Simple
|
||||||
|
|
||||||
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
from ...bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||||
from ...util.deep_merge import DeepMerge
|
from ...util.deep_merge import DeepMerge
|
||||||
|
@ -16,6 +16,71 @@ from builtins import object
|
|||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
|
|
||||||
|
import re
|
||||||
|
from .. import operators
|
||||||
|
from .. import specs
|
||||||
|
from ..specs.AcquireMutex import AcquireMutex
|
||||||
|
from ..specs.Cancel import Cancel
|
||||||
|
from ..specs.CancelTask import CancelTask
|
||||||
|
from ..specs.Celery import Celery
|
||||||
|
from ..specs.Choose import Choose
|
||||||
|
from ..specs.ExclusiveChoice import ExclusiveChoice
|
||||||
|
from ..specs.Execute import Execute
|
||||||
|
from ..specs.Gate import Gate
|
||||||
|
from ..specs.Join import Join
|
||||||
|
from ..specs.Merge import Merge
|
||||||
|
from ..specs.MultiChoice import MultiChoice
|
||||||
|
from ..specs.MultiInstance import MultiInstance
|
||||||
|
from ..specs.ReleaseMutex import ReleaseMutex
|
||||||
|
from ..specs.Simple import Simple
|
||||||
|
from ..specs.StartTask import StartTask
|
||||||
|
from ..specs.SubWorkflow import SubWorkflow
|
||||||
|
from ..specs.ThreadStart import ThreadStart
|
||||||
|
from ..specs.ThreadMerge import ThreadMerge
|
||||||
|
from ..specs.ThreadSplit import ThreadSplit
|
||||||
|
from ..specs.Transform import Transform
|
||||||
|
from ..specs.Trigger import Trigger
|
||||||
|
from ..specs.WorkflowSpec import WorkflowSpec
|
||||||
|
from ..specs.LoopResetTask import LoopResetTask
|
||||||
|
|
||||||
|
# Create a list of tag names out of the spec names.
|
||||||
|
def spec_map():
|
||||||
|
return {
|
||||||
|
'acquire-mutex': AcquireMutex,
|
||||||
|
'cancel': Cancel,
|
||||||
|
'cancel-task': CancelTask,
|
||||||
|
'celery': Celery,
|
||||||
|
'choose': Choose,
|
||||||
|
'exclusive-choice': ExclusiveChoice,
|
||||||
|
'execute': Execute,
|
||||||
|
'gate': Gate,
|
||||||
|
'join': Join,
|
||||||
|
'merge': Merge,
|
||||||
|
'multi-choice': MultiChoice,
|
||||||
|
'multi-instance': MultiInstance,
|
||||||
|
'release-mutex': ReleaseMutex,
|
||||||
|
'simple': Simple,
|
||||||
|
'start-task': StartTask,
|
||||||
|
'sub-workflow': SubWorkflow,
|
||||||
|
'thread-start': ThreadStart,
|
||||||
|
'thread-merge': ThreadMerge,
|
||||||
|
'thread-split': ThreadSplit,
|
||||||
|
'transform': Transform,
|
||||||
|
'trigger': Trigger,
|
||||||
|
'workflow-spec': WorkflowSpec,
|
||||||
|
'loop-reset-task': LoopResetTask,
|
||||||
|
'task': Simple,
|
||||||
|
}
|
||||||
|
|
||||||
|
def op_map():
|
||||||
|
return {
|
||||||
|
'equals': operators.Equal,
|
||||||
|
'not-equals': operators.NotEqual,
|
||||||
|
'less-than': operators.LessThan,
|
||||||
|
'greater-than': operators.GreaterThan,
|
||||||
|
'matches': operators.Match
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class Serializer(object):
|
class Serializer(object):
|
||||||
|
|
||||||
|
@ -24,11 +24,30 @@ from ..util.impl import get_class
|
|||||||
from ..task import Task
|
from ..task import Task
|
||||||
from ..operators import (Attrib, PathAttrib, Equal, NotEqual,
|
from ..operators import (Attrib, PathAttrib, Equal, NotEqual,
|
||||||
Operator, GreaterThan, LessThan, Match)
|
Operator, GreaterThan, LessThan, Match)
|
||||||
from ..specs import (Cancel, AcquireMutex, CancelTask, Celery, Choose,
|
from ..specs.base import TaskSpec
|
||||||
ExclusiveChoice, Execute, Gate, Join, MultiChoice,
|
from ..specs.AcquireMutex import AcquireMutex
|
||||||
MultiInstance, ReleaseMutex, Simple, WorkflowSpec,
|
from ..specs.Cancel import Cancel
|
||||||
TaskSpec, SubWorkflow, StartTask, ThreadMerge,
|
from ..specs.CancelTask import CancelTask
|
||||||
ThreadSplit, ThreadStart, Merge, Trigger, LoopResetTask)
|
from ..specs.Celery import Celery
|
||||||
|
from ..specs.Choose import Choose
|
||||||
|
from ..specs.ExclusiveChoice import ExclusiveChoice
|
||||||
|
from ..specs.Execute import Execute
|
||||||
|
from ..specs.Gate import Gate
|
||||||
|
from ..specs.Join import Join
|
||||||
|
from ..specs.Merge import Merge
|
||||||
|
from ..specs.MultiChoice import MultiChoice
|
||||||
|
from ..specs.MultiInstance import MultiInstance
|
||||||
|
from ..specs.ReleaseMutex import ReleaseMutex
|
||||||
|
from ..specs.Simple import Simple
|
||||||
|
from ..specs.StartTask import StartTask
|
||||||
|
from ..specs.SubWorkflow import SubWorkflow
|
||||||
|
from ..specs.ThreadStart import ThreadStart
|
||||||
|
from ..specs.ThreadMerge import ThreadMerge
|
||||||
|
from ..specs.ThreadSplit import ThreadSplit
|
||||||
|
from ..specs.Transform import Transform
|
||||||
|
from ..specs.Trigger import Trigger
|
||||||
|
from ..specs.WorkflowSpec import WorkflowSpec
|
||||||
|
from ..specs.LoopResetTask import LoopResetTask
|
||||||
from .base import Serializer
|
from .base import Serializer
|
||||||
from .exceptions import TaskNotSupportedError, MissingSpecError
|
from .exceptions import TaskNotSupportedError, MissingSpecError
|
||||||
import warnings
|
import warnings
|
||||||
|
@ -18,25 +18,15 @@
|
|||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
import re
|
import re
|
||||||
import xml.dom.minidom as minidom
|
import xml.dom.minidom as minidom
|
||||||
from .. import operators, specs
|
from .. import operators
|
||||||
|
from ..specs.Simple import Simple
|
||||||
|
from ..specs.WorkflowSpec import WorkflowSpec
|
||||||
from ..exceptions import StorageException
|
from ..exceptions import StorageException
|
||||||
from .base import Serializer
|
from .base import Serializer, spec_map, op_map
|
||||||
|
|
||||||
# Create a list of tag names out of the spec names.
|
# Create a list of tag names out of the spec names.
|
||||||
_spec_map = dict()
|
_spec_map = spec_map()
|
||||||
for name in dir(specs):
|
_op_map = op_map()
|
||||||
if name.startswith('_'):
|
|
||||||
continue
|
|
||||||
module = specs.__dict__[name]
|
|
||||||
name = re.sub(r'(.)([A-Z])', r'\1-\2', name).lower()
|
|
||||||
_spec_map[name] = module
|
|
||||||
_spec_map['task'] = specs.Simple
|
|
||||||
|
|
||||||
_op_map = {'equals': operators.Equal,
|
|
||||||
'not-equals': operators.NotEqual,
|
|
||||||
'less-than': operators.LessThan,
|
|
||||||
'greater-than': operators.GreaterThan,
|
|
||||||
'matches': operators.Match}
|
|
||||||
|
|
||||||
_exc = StorageException
|
_exc = StorageException
|
||||||
|
|
||||||
@ -299,9 +289,9 @@ class XmlSerializer(Serializer):
|
|||||||
_exc('%s without a name attribute' % node.nodeName)
|
_exc('%s without a name attribute' % node.nodeName)
|
||||||
|
|
||||||
# Read all task specs and create a list of successors.
|
# Read all task specs and create a list of successors.
|
||||||
workflow_spec = specs.WorkflowSpec(name, filename)
|
workflow_spec = WorkflowSpec(name, filename)
|
||||||
del workflow_spec.task_specs['Start']
|
del workflow_spec.task_specs['Start']
|
||||||
end = specs.Simple(workflow_spec, 'End'), []
|
end = Simple(workflow_spec, 'End'), []
|
||||||
read_specs = dict(end=end)
|
read_specs = dict(end=end)
|
||||||
for child_node in node.childNodes:
|
for child_node in node.childNodes:
|
||||||
if child_node.nodeType != minidom.Node.ELEMENT_NODE:
|
if child_node.nodeType != minidom.Node.ELEMENT_NODE:
|
||||||
|
@ -24,29 +24,35 @@ from .. import specs, operators
|
|||||||
from ..task import Task, TaskStateNames
|
from ..task import Task, TaskStateNames
|
||||||
from ..operators import (Attrib, Assign, PathAttrib, Equal, NotEqual,
|
from ..operators import (Attrib, Assign, PathAttrib, Equal, NotEqual,
|
||||||
GreaterThan, LessThan, Match)
|
GreaterThan, LessThan, Match)
|
||||||
from ..specs import (Cancel, AcquireMutex, CancelTask, Celery, Choose,
|
from ..specs.AcquireMutex import AcquireMutex
|
||||||
ExclusiveChoice, Execute, Gate, Join, MultiChoice,
|
from ..specs.Cancel import Cancel
|
||||||
MultiInstance, ReleaseMutex, Simple, WorkflowSpec,
|
from ..specs.CancelTask import CancelTask
|
||||||
SubWorkflow, StartTask, ThreadMerge,
|
from ..specs.Celery import Celery
|
||||||
ThreadSplit, ThreadStart, Merge, Trigger, LoopResetTask)
|
from ..specs.Choose import Choose
|
||||||
from .base import Serializer
|
from ..specs.ExclusiveChoice import ExclusiveChoice
|
||||||
|
from ..specs.Execute import Execute
|
||||||
|
from ..specs.Gate import Gate
|
||||||
|
from ..specs.Join import Join
|
||||||
|
from ..specs.Merge import Merge
|
||||||
|
from ..specs.MultiChoice import MultiChoice
|
||||||
|
from ..specs.MultiInstance import MultiInstance
|
||||||
|
from ..specs.ReleaseMutex import ReleaseMutex
|
||||||
|
from ..specs.Simple import Simple
|
||||||
|
from ..specs.StartTask import StartTask
|
||||||
|
from ..specs.SubWorkflow import SubWorkflow
|
||||||
|
from ..specs.ThreadStart import ThreadStart
|
||||||
|
from ..specs.ThreadMerge import ThreadMerge
|
||||||
|
from ..specs.ThreadSplit import ThreadSplit
|
||||||
|
from ..specs.Transform import Transform
|
||||||
|
from ..specs.Trigger import Trigger
|
||||||
|
from ..specs.WorkflowSpec import WorkflowSpec
|
||||||
|
from ..specs.LoopResetTask import LoopResetTask
|
||||||
|
from .base import Serializer, spec_map, op_map
|
||||||
from .exceptions import TaskNotSupportedError
|
from .exceptions import TaskNotSupportedError
|
||||||
|
|
||||||
# Create a list of tag names out of the spec names.
|
# Create a list of tag names out of the spec names.
|
||||||
_spec_map = dict()
|
_spec_map = spec_map()
|
||||||
for name in dir(specs):
|
_op_map = op_map()
|
||||||
if name.startswith('_'):
|
|
||||||
continue
|
|
||||||
module = specs.__dict__[name]
|
|
||||||
name = re.sub(r'(.)([A-Z])', r'\1-\2', name).lower()
|
|
||||||
_spec_map[name] = module
|
|
||||||
_spec_map['task'] = specs.Simple
|
|
||||||
|
|
||||||
_op_map = {'equals': operators.Equal,
|
|
||||||
'not-equals': operators.NotEqual,
|
|
||||||
'less-than': operators.LessThan,
|
|
||||||
'greater-than': operators.GreaterThan,
|
|
||||||
'matches': operators.Match}
|
|
||||||
|
|
||||||
|
|
||||||
class XmlSerializer(Serializer):
|
class XmlSerializer(Serializer):
|
||||||
|
@ -1 +0,0 @@
|
|||||||
from .tasks import CallActivityParser
|
|
@ -25,13 +25,6 @@ from .base import TaskSpec
|
|||||||
from ..operators import valueof, Attrib, PathAttrib
|
from ..operators import valueof, Attrib, PathAttrib
|
||||||
from ..util.deep_merge import DeepMerge
|
from ..util.deep_merge import DeepMerge
|
||||||
|
|
||||||
try:
|
|
||||||
from celery.app import default_app
|
|
||||||
except ImportError:
|
|
||||||
have_celery = False
|
|
||||||
else:
|
|
||||||
have_celery = True
|
|
||||||
|
|
||||||
logger = logging.getLogger('spiff')
|
logger = logging.getLogger('spiff')
|
||||||
|
|
||||||
|
|
||||||
@ -111,7 +104,10 @@ class Celery(TaskSpec):
|
|||||||
:type kwargs: dict
|
:type kwargs: dict
|
||||||
:param kwargs: kwargs to pass to celery task.
|
:param kwargs: kwargs to pass to celery task.
|
||||||
"""
|
"""
|
||||||
if not have_celery:
|
|
||||||
|
try:
|
||||||
|
from celery.app import default_app
|
||||||
|
except ImportError:
|
||||||
raise Exception("Unable to import python-celery imports.")
|
raise Exception("Unable to import python-celery imports.")
|
||||||
assert wf_spec is not None
|
assert wf_spec is not None
|
||||||
assert name is not None
|
assert name is not None
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from . import StartTask
|
from .StartTask import StartTask
|
||||||
from .base import TaskSpec
|
from .base import TaskSpec
|
||||||
from ..task import TaskState
|
from ..task import TaskState
|
||||||
from ..exceptions import WorkflowException
|
from ..exceptions import WorkflowException
|
||||||
@ -87,7 +87,7 @@ class SubWorkflow(TaskSpec):
|
|||||||
|
|
||||||
def _create_subworkflow(self, my_task):
|
def _create_subworkflow(self, my_task):
|
||||||
from ..serializer.prettyxml import XmlSerializer
|
from ..serializer.prettyxml import XmlSerializer
|
||||||
from ..specs import WorkflowSpec
|
from ..specs.WorkflowSpec import WorkflowSpec
|
||||||
from ..workflow import Workflow
|
from ..workflow import Workflow
|
||||||
file_name = valueof(my_task, self.file)
|
file_name = valueof(my_task, self.file)
|
||||||
serializer = XmlSerializer()
|
serializer = XmlSerializer()
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
from ..task import TaskState
|
from ..task import TaskState
|
||||||
from ..exceptions import WorkflowException
|
from ..exceptions import WorkflowException
|
||||||
from ..operators import valueof
|
from ..operators import valueof
|
||||||
from ..specs import Join
|
from ..specs.Join import Join
|
||||||
|
|
||||||
|
|
||||||
class ThreadMerge(Join):
|
class ThreadMerge(Join):
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||||
# 02110-1301 USA
|
# 02110-1301 USA
|
||||||
|
|
||||||
from ..specs import StartTask
|
from ..specs.StartTask import StartTask
|
||||||
|
|
||||||
|
|
||||||
class WorkflowSpec(object):
|
class WorkflowSpec(object):
|
||||||
@ -82,7 +82,7 @@ class WorkflowSpec(object):
|
|||||||
:returns: empty list if valid, a list of errors if not
|
:returns: empty list if valid, a list of errors if not
|
||||||
"""
|
"""
|
||||||
results = []
|
results = []
|
||||||
from ..specs import Join
|
from ..specs.Join import Join
|
||||||
|
|
||||||
def recursive_find_loop(task, history):
|
def recursive_find_loop(task, history):
|
||||||
current = history[:]
|
current = history[:]
|
||||||
|
@ -1,31 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# flake8: noqa
|
|
||||||
|
|
||||||
from .base import TaskSpec
|
|
||||||
from .AcquireMutex import AcquireMutex
|
|
||||||
from .Cancel import Cancel
|
|
||||||
from .CancelTask import CancelTask
|
|
||||||
from .Celery import Celery
|
|
||||||
from .Choose import Choose
|
|
||||||
from .ExclusiveChoice import ExclusiveChoice
|
|
||||||
from .Execute import Execute
|
|
||||||
from .Gate import Gate
|
|
||||||
from .Join import Join
|
|
||||||
from .Merge import Merge
|
|
||||||
from .MultiChoice import MultiChoice
|
|
||||||
from .MultiInstance import MultiInstance
|
|
||||||
from .ReleaseMutex import ReleaseMutex
|
|
||||||
from .Simple import Simple
|
|
||||||
from .StartTask import StartTask
|
|
||||||
from .SubWorkflow import SubWorkflow
|
|
||||||
from .ThreadStart import ThreadStart
|
|
||||||
from .ThreadMerge import ThreadMerge
|
|
||||||
from .ThreadSplit import ThreadSplit
|
|
||||||
from .Transform import Transform
|
|
||||||
from .Trigger import Trigger
|
|
||||||
from .WorkflowSpec import WorkflowSpec
|
|
||||||
from .LoopResetTask import LoopResetTask
|
|
||||||
|
|
||||||
import inspect
|
|
||||||
__all__ = [name for name, obj in list(locals().items())
|
|
||||||
if not (name.startswith('_') or inspect.ismodule(obj))]
|
|
@ -1 +0,0 @@
|
|||||||
from .process import SpiffBpmnParser, VALIDATOR
|
|
@ -3,8 +3,15 @@ import os
|
|||||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag
|
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.specs.events import StartEvent, EndEvent, IntermediateThrowEvent, BoundaryEvent, IntermediateCatchEvent
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||||
from SpiffWorkflow.spiff.specs import NoneTask, ManualTask, UserTask, ScriptTask, SubWorkflowTask, TransactionSubprocess, CallActivity, ServiceTask
|
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
|
||||||
|
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, BoundaryEvent, IntermediateCatchEvent
|
||||||
|
from SpiffWorkflow.spiff.specs.none_task import NoneTask
|
||||||
|
from SpiffWorkflow.spiff.specs.manual_task import ManualTask
|
||||||
|
from SpiffWorkflow.spiff.specs.user_task import UserTask
|
||||||
|
from SpiffWorkflow.spiff.specs.script_task import ScriptTask
|
||||||
|
from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity
|
||||||
|
from SpiffWorkflow.spiff.specs.service_task import ServiceTask
|
||||||
from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask
|
from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask
|
||||||
from SpiffWorkflow.spiff.parser.task_spec import SpiffTaskParser, SubWorkflowParser, CallActivityParser, ServiceTaskParser, ScriptTaskParser
|
from SpiffWorkflow.spiff.parser.task_spec import SpiffTaskParser, SubWorkflowParser, CallActivityParser, ServiceTaskParser, ScriptTaskParser
|
||||||
from SpiffWorkflow.spiff.parser.event_parsers import (SpiffStartEventParser, SpiffEndEventParser, SpiffBoundaryEventParser,
|
from SpiffWorkflow.spiff.parser.event_parsers import (SpiffStartEventParser, SpiffEndEventParser, SpiffBoundaryEventParser,
|
||||||
|
@ -1,4 +0,0 @@
|
|||||||
from .task_spec_converters import NoneTaskConverter, ManualTaskConverter, UserTaskConverter, ScriptTaskConverter
|
|
||||||
from .task_spec_converters import TransactionSubprocessConverter, CallActivityTaskConverter, SubWorkflowTaskConverter
|
|
||||||
from .task_spec_converters import StartEventConverter, EndEventConverter, IntermediateCatchEventConverter, IntermediateThrowEventConverter, \
|
|
||||||
BoundaryEventConverter, SendTaskConverter, ReceiveTaskConverter, ServiceTaskConverter
|
|
@ -1,9 +1,16 @@
|
|||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
||||||
from SpiffWorkflow.bpmn.specs.events import EndEvent, StartEvent, IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
|
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
|
||||||
from SpiffWorkflow.spiff.specs import NoneTask, ManualTask, UserTask, ScriptTask, ServiceTask, SubWorkflowTask, TransactionSubprocess, CallActivity
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||||
from SpiffWorkflow.spiff.specs.events import SendTask, ReceiveTask
|
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
|
||||||
|
from SpiffWorkflow.spiff.specs.none_task import NoneTask
|
||||||
|
from SpiffWorkflow.spiff.specs.manual_task import ManualTask
|
||||||
|
from SpiffWorkflow.spiff.specs.user_task import UserTask
|
||||||
|
from SpiffWorkflow.spiff.specs.script_task import ScriptTask
|
||||||
|
from SpiffWorkflow.spiff.specs.service_task import ServiceTask
|
||||||
|
from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity
|
||||||
|
from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask
|
||||||
from SpiffWorkflow.spiff.specs.events.event_definitions import MessageEventDefinition
|
from SpiffWorkflow.spiff.specs.events.event_definitions import MessageEventDefinition
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +0,0 @@
|
|||||||
from .manual_task import ManualTask
|
|
||||||
from .none_task import NoneTask
|
|
||||||
from .subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity
|
|
||||||
from .user_task import UserTask
|
|
||||||
from .script_task import ScriptTask
|
|
||||||
from .service_task import ServiceTask
|
|
@ -1 +0,0 @@
|
|||||||
from .event_types import SendTask, ReceiveTask
|
|
@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from . import specs
|
from .specs.Simple import Simple
|
||||||
from .specs.LoopResetTask import LoopResetTask
|
from .specs.LoopResetTask import LoopResetTask
|
||||||
from .task import Task, TaskState
|
from .task import Task, TaskState
|
||||||
from .util.compat import mutex
|
from .util.compat import mutex
|
||||||
@ -61,7 +61,7 @@ class Workflow(object):
|
|||||||
if 'Root' in workflow_spec.task_specs:
|
if 'Root' in workflow_spec.task_specs:
|
||||||
root = workflow_spec.task_specs['Root']
|
root = workflow_spec.task_specs['Root']
|
||||||
else:
|
else:
|
||||||
root = specs.Simple(workflow_spec, 'Root')
|
root = Simple(workflow_spec, 'Root')
|
||||||
logger.info('Initialize', extra=self.log_info())
|
logger.info('Initialize', extra=self.log_info())
|
||||||
|
|
||||||
# Setting TaskState.COMPLETED prevents the root task from being executed.
|
# Setting TaskState.COMPLETED prevents the root task from being executed.
|
||||||
|
@ -277,14 +277,9 @@ We'll cover a simple extension of custom script engine here. There is also an e
|
|||||||
a similar engine based on `RestrictedPython <https://restrictedpython.readthedocs.io/en/latest/>`_
|
a similar engine based on `RestrictedPython <https://restrictedpython.readthedocs.io/en/latest/>`_
|
||||||
included alongside this example.
|
included alongside this example.
|
||||||
|
|
||||||
The default script engine imports the following objects:
|
The default script engine does not import any objects.
|
||||||
|
|
||||||
- :code:`timedelta`
|
You could add functions or classes from the standard python modules or any code you've
|
||||||
- :code:`datetime`
|
|
||||||
- :code:`dateparser`
|
|
||||||
- :code:`pytz`
|
|
||||||
|
|
||||||
You could add other functions or classes from the standard python modules or any code you've
|
|
||||||
implemented yourself. Your global environment can be passed in using the `default_globals`
|
implemented yourself. Your global environment can be passed in using the `default_globals`
|
||||||
argument when initializing the script engine. In our RestrictedPython example, we use their
|
argument when initializing the script engine. In our RestrictedPython example, we use their
|
||||||
`safe_globals` which prevents users from executing some potentially unsafe operations.
|
`safe_globals` which prevents users from executing some potentially unsafe operations.
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from SpiffWorkflow.workflow import Workflow
|
from SpiffWorkflow.workflow import Workflow
|
||||||
from SpiffWorkflow.specs import WorkflowSpec
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.serializer.json import JSONSerializer
|
from SpiffWorkflow.serializer.json import JSONSerializer
|
||||||
|
|
||||||
# Load from JSON
|
# Load from JSON
|
||||||
|
@ -3,6 +3,4 @@
|
|||||||
celery==5.2.3
|
celery==5.2.3
|
||||||
coverage
|
coverage
|
||||||
lxml
|
lxml
|
||||||
dateparser
|
|
||||||
pytz
|
|
||||||
.
|
.
|
||||||
|
@ -22,7 +22,7 @@ setup(name='SpiffWorkflow',
|
|||||||
license='lGPLv2',
|
license='lGPLv2',
|
||||||
packages=find_packages(exclude=['tests', 'tests.*']),
|
packages=find_packages(exclude=['tests', 'tests.*']),
|
||||||
package_data={'SpiffWorkflow.bpmn.parser.schema': ['*.xsd']},
|
package_data={'SpiffWorkflow.bpmn.parser.schema': ['*.xsd']},
|
||||||
install_requires=['configparser', 'lxml', 'celery', 'dateparser', 'pytz',
|
install_requires=['configparser', 'lxml', 'celery',
|
||||||
# required for python 3.7 - https://stackoverflow.com/a/73932581
|
# required for python 3.7 - https://stackoverflow.com/a/73932581
|
||||||
'importlib-metadata<5.0; python_version <= "3.7"'],
|
'importlib-metadata<5.0; python_version <= "3.7"'],
|
||||||
keywords='spiff workflow bpmn engine',
|
keywords='spiff workflow bpmn engine',
|
||||||
|
@ -6,7 +6,7 @@ import unittest
|
|||||||
import os
|
import os
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
from SpiffWorkflow.specs import WorkflowSpec
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.task import Task
|
from SpiffWorkflow.task import Task
|
||||||
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
||||||
from tests.SpiffWorkflow.util import run_workflow
|
from tests.SpiffWorkflow.util import run_workflow
|
||||||
|
@ -6,7 +6,9 @@ import os.path
|
|||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
from SpiffWorkflow.workflow import Workflow
|
from SpiffWorkflow.workflow import Workflow
|
||||||
from SpiffWorkflow.specs import Join, MultiChoice, WorkflowSpec
|
from SpiffWorkflow.specs.Join import Join
|
||||||
|
from SpiffWorkflow.specs.MultiChoice import MultiChoice
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.operators import Attrib, Equal, PathAttrib
|
from SpiffWorkflow.operators import Attrib, Equal, PathAttrib
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.specs.Simple import Simple
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
|
@ -7,7 +7,8 @@ import os.path
|
|||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
from SpiffWorkflow.task import Task, TaskState, updateDotDict
|
from SpiffWorkflow.task import Task, TaskState, updateDotDict
|
||||||
from SpiffWorkflow.specs import WorkflowSpec, Simple
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
|
|
||||||
|
|
||||||
class MockWorkflow(object):
|
class MockWorkflow(object):
|
||||||
|
@ -7,7 +7,9 @@ data_dir = os.path.join(os.path.dirname(__file__), 'data')
|
|||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||||
|
|
||||||
from SpiffWorkflow.workflow import Workflow
|
from SpiffWorkflow.workflow import Workflow
|
||||||
from SpiffWorkflow.specs import Cancel, Simple, WorkflowSpec
|
from SpiffWorkflow.specs.Cancel import Cancel
|
||||||
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
from SpiffWorkflow.bpmn.specs.events import MessageEventDefinition
|
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
__author__ = 'matth'
|
__author__ = 'matth'
|
||||||
|
@ -10,7 +10,7 @@ from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
|||||||
|
|
||||||
# Many of our tests relied on the Packager to set the calledElement attribute on
|
# Many of our tests relied on the Packager to set the calledElement attribute on
|
||||||
# Call Activities. I've moved that code to a customized parser.
|
# Call Activities. I've moved that code to a customized parser.
|
||||||
from SpiffWorkflow.signavio.parser import CallActivityParser
|
from SpiffWorkflow.signavio.parser.tasks import CallActivityParser
|
||||||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import CallActivity
|
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import CallActivity
|
||||||
|
|
||||||
__author__ = 'matth'
|
__author__ = 'matth'
|
||||||
|
@ -6,7 +6,7 @@ from uuid import uuid4
|
|||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
|
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
|
||||||
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||||
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
|
from SpiffWorkflow.bpmn.serializer.BpmnSerializer import BpmnSerializer
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter
|
from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter
|
||||||
|
@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator
|
|||||||
|
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||||
from .BpmnLoaderForTests import TestUserTaskConverter, TestBpmnParser
|
from .BpmnLoaderForTests import TestUserTaskConverter, TestBpmnParser
|
||||||
|
|
||||||
__author__ = 'matth'
|
__author__ = 'matth'
|
||||||
|
@ -5,6 +5,7 @@ import datetime
|
|||||||
import time
|
import time
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
__author__ = 'kellym'
|
__author__ = 'kellym'
|
||||||
@ -15,8 +16,9 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
|
|||||||
Non-Interrupting Timer boundary test
|
Non-Interrupting Timer boundary test
|
||||||
"""
|
"""
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.script_engine = PythonScriptEngine(default_globals={"timedelta": datetime.timedelta})
|
||||||
spec, subprocesses = self.load_workflow_spec('timer-non-interrupt-boundary.bpmn', 'NonInterruptTimer')
|
spec, subprocesses = self.load_workflow_spec('timer-non-interrupt-boundary.bpmn', 'NonInterruptTimer')
|
||||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=self.script_engine)
|
||||||
|
|
||||||
def load_spec(self):
|
def load_spec(self):
|
||||||
return
|
return
|
||||||
@ -47,7 +49,9 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
|
|||||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||||
if len(ready_tasks) > 1:
|
if len(ready_tasks) > 1:
|
||||||
break
|
break
|
||||||
if save_restore: self.save_restore()
|
if save_restore:
|
||||||
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = self.script_engine
|
||||||
#self.assertEqual(1, len(self.workflow.get_tasks(Task.WAITING)))
|
#self.assertEqual(1, len(self.workflow.get_tasks(Task.WAITING)))
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
self.workflow.complete_task_from_id(ready_tasks[0].id)
|
self.workflow.complete_task_from_id(ready_tasks[0].id)
|
||||||
|
@ -3,7 +3,7 @@ import os
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
|
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
|
||||||
from SpiffWorkflow.spiff.parser import SpiffBpmnParser
|
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser
|
||||||
|
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
|
@ -24,32 +24,11 @@ class PythonScriptEngineTest(BpmnWorkflowTestCase):
|
|||||||
workflow.do_engine_steps()
|
workflow.do_engine_steps()
|
||||||
self.task = workflow.last_task
|
self.task = workflow.last_task
|
||||||
|
|
||||||
def testDateTimeExpressions(self):
|
|
||||||
"""Basically, assure that we can use datime, dateutils, and pytz"""
|
|
||||||
script = """
|
|
||||||
# Create Current Date as UTC
|
|
||||||
now_utc = datetime.datetime.now(datetime.timezone.utc)
|
|
||||||
# Create Current Date at EST
|
|
||||||
now_est = now_utc.astimezone(pytz.timezone('US/Eastern'))
|
|
||||||
|
|
||||||
# Format a date from a date String in UTC
|
|
||||||
datestr = "2021-09-23 16:11:00 -0000" # 12 pm EST, 4pm UTC
|
|
||||||
dt = dateparser.parse(datestr)
|
|
||||||
localtime = dt.astimezone(pytz.timezone('US/Eastern'))
|
|
||||||
localtime_str = localtime.strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
"""
|
|
||||||
self.expressionEngine.execute(self.task, script)
|
|
||||||
self.assertEqual(self.task.data['now_utc'].utcoffset().days, 0)
|
|
||||||
self.assertEqual(self.task.data['now_est'].tzinfo.zone, "US/Eastern")
|
|
||||||
self.assertEqual(self.task.data['localtime_str'], "2021-09-23 12:11:00")
|
|
||||||
self.assertTrue(True)
|
|
||||||
|
|
||||||
def testFunctionsAndGlobalsAreRemoved(self):
|
def testFunctionsAndGlobalsAreRemoved(self):
|
||||||
self.assertIn('testvar', self.task.data)
|
self.assertIn('testvar', self.task.data)
|
||||||
self.assertIn('testvar2', self.task.data)
|
self.assertIn('testvar2', self.task.data)
|
||||||
self.assertIn('sample', self.task.data)
|
self.assertIn('sample', self.task.data)
|
||||||
self.assertNotIn('my_function', self.task.data)
|
self.assertNotIn('my_function', self.task.data)
|
||||||
self.assertNotIn('datetime', self.task.data)
|
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
return unittest.TestLoader().loadTestsFromTestCase(PythonScriptEngineTest)
|
return unittest.TestLoader().loadTestsFromTestCase(PythonScriptEngineTest)
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import datetime
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
@ -8,6 +9,15 @@ from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
|||||||
|
|
||||||
__author__ = 'sartography'
|
__author__ = 'sartography'
|
||||||
|
|
||||||
|
class CustomScriptEngine(PythonScriptEngine):
|
||||||
|
"""This is a custom script processor that can be easily injected into Spiff Workflow.
|
||||||
|
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||||
|
scripts directory available for execution. """
|
||||||
|
def __init__(self):
|
||||||
|
augment_methods = {
|
||||||
|
'timedelta': datetime.timedelta,
|
||||||
|
}
|
||||||
|
super().__init__(scripting_additions=augment_methods)
|
||||||
|
|
||||||
class TooManyLoopsTest(BpmnWorkflowTestCase):
|
class TooManyLoopsTest(BpmnWorkflowTestCase):
|
||||||
|
|
||||||
@ -23,7 +33,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
|||||||
|
|
||||||
def actual_test(self,save_restore = False):
|
def actual_test(self,save_restore = False):
|
||||||
spec, subprocesses = self.load_workflow_spec('too_many_loops*.bpmn', 'loops')
|
spec, subprocesses = self.load_workflow_spec('too_many_loops*.bpmn', 'loops')
|
||||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=PythonScriptEngine())
|
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine())
|
||||||
counter = 0
|
counter = 0
|
||||||
data = {}
|
data = {}
|
||||||
while not self.workflow.is_completed():
|
while not self.workflow.is_completed():
|
||||||
@ -34,6 +44,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
|||||||
counter += 1 # There is a 10 millisecond wait task.
|
counter += 1 # There is a 10 millisecond wait task.
|
||||||
if save_restore:
|
if save_restore:
|
||||||
self.save_restore()
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = CustomScriptEngine()
|
||||||
self.assertEqual(20, self.workflow.last_task.data['counter'])
|
self.assertEqual(20, self.workflow.last_task.data['counter'])
|
||||||
|
|
||||||
def test_with_sub_process(self):
|
def test_with_sub_process(self):
|
||||||
@ -41,7 +52,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
|||||||
# right after a sub-process. So assuring this is fixed.
|
# right after a sub-process. So assuring this is fixed.
|
||||||
counter = 0
|
counter = 0
|
||||||
spec, subprocesses = self.load_workflow_spec('too_many_loops_sub_process.bpmn', 'loops_sub')
|
spec, subprocesses = self.load_workflow_spec('too_many_loops_sub_process.bpmn', 'loops_sub')
|
||||||
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=PythonScriptEngine())
|
self.workflow = BpmnWorkflow(spec, subprocesses, script_engine=CustomScriptEngine())
|
||||||
data = {}
|
data = {}
|
||||||
while not self.workflow.is_completed():
|
while not self.workflow.is_completed():
|
||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
@ -57,7 +68,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
|||||||
|
|
||||||
def test_with_two_call_activities(self):
|
def test_with_two_call_activities(self):
|
||||||
spec, subprocess = self.load_workflow_spec('sub_in_loop*.bpmn', 'main')
|
spec, subprocess = self.load_workflow_spec('sub_in_loop*.bpmn', 'main')
|
||||||
self.workflow = BpmnWorkflow(spec, subprocess)
|
self.workflow = BpmnWorkflow(spec, subprocess, script_engine=CustomScriptEngine())
|
||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
for loop in range(3):
|
for loop in range(3):
|
||||||
ready = self.workflow.get_ready_user_tasks()
|
ready = self.workflow.get_ready_user_tasks()
|
||||||
@ -66,6 +77,7 @@ class TooManyLoopsTest(BpmnWorkflowTestCase):
|
|||||||
self.workflow.refresh_waiting_tasks()
|
self.workflow.refresh_waiting_tasks()
|
||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
self.save_restore()
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = CustomScriptEngine()
|
||||||
|
|
||||||
def suite():
|
def suite():
|
||||||
return unittest.TestLoader().loadTestsFromTestCase(TooManyLoopsTest)
|
return unittest.TestLoader().loadTestsFromTestCase(TooManyLoopsTest)
|
||||||
|
@ -7,8 +7,8 @@
|
|||||||
<bpmn:scriptTask id="Activity_1q1wged" name="Set Future Date">
|
<bpmn:scriptTask id="Activity_1q1wged" name="Set Future Date">
|
||||||
<bpmn:incoming>Flow_1i73q45</bpmn:incoming>
|
<bpmn:incoming>Flow_1i73q45</bpmn:incoming>
|
||||||
<bpmn:outgoing>Flow_00e79cz</bpmn:outgoing>
|
<bpmn:outgoing>Flow_00e79cz</bpmn:outgoing>
|
||||||
<bpmn:script>futuredate = dateparser.parse('in 1 second') - timedelta(seconds=.95)
|
<bpmn:script>futuredate = datetime.now() + timedelta(0, 1) - timedelta(seconds=.95)
|
||||||
futuredate2 = dateparser.parse('September 1 2021 at 10am EDT')</bpmn:script>
|
futuredate2 = datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M')</bpmn:script>
|
||||||
</bpmn:scriptTask>
|
</bpmn:scriptTask>
|
||||||
<bpmn:sequenceFlow id="Flow_1i73q45" sourceRef="Event_0u1rmur" targetRef="Activity_1q1wged" />
|
<bpmn:sequenceFlow id="Flow_1i73q45" sourceRef="Event_0u1rmur" targetRef="Activity_1q1wged" />
|
||||||
<bpmn:sequenceFlow id="Flow_00e79cz" sourceRef="Activity_1q1wged" targetRef="Event_0eb0w95" />
|
<bpmn:sequenceFlow id="Flow_00e79cz" sourceRef="Activity_1q1wged" targetRef="Event_0eb0w95" />
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.specs.events import CancelEventDefinition, SignalEventDefinition
|
from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition, SignalEventDefinition
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import datetime
|
||||||
import unittest
|
import unittest
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@ -21,7 +22,10 @@ class CustomScriptEngine(PythonScriptEngine):
|
|||||||
It will execute python code read in from the bpmn. It will also make any scripts in the
|
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||||
scripts directory available for execution. """
|
scripts directory available for execution. """
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
augment_methods = {'custom_function': my_custom_function}
|
augment_methods = {
|
||||||
|
'custom_function': my_custom_function,
|
||||||
|
'timedelta': datetime.timedelta,
|
||||||
|
}
|
||||||
super().__init__(scripting_additions=augment_methods)
|
super().__init__(scripting_additions=augment_methods)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import datetime
|
||||||
import unittest
|
import unittest
|
||||||
import time
|
import time
|
||||||
|
|
||||||
@ -21,7 +22,10 @@ class CustomScriptEngine(PythonScriptEngine):
|
|||||||
It will execute python code read in from the bpmn. It will also make any scripts in the
|
It will execute python code read in from the bpmn. It will also make any scripts in the
|
||||||
scripts directory available for execution. """
|
scripts directory available for execution. """
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
augment_methods = {'custom_function': my_custom_function}
|
augment_methods = {
|
||||||
|
'custom_function': my_custom_function,
|
||||||
|
'timedelta': datetime.timedelta,
|
||||||
|
}
|
||||||
super().__init__(scripting_additions=augment_methods)
|
super().__init__(scripting_additions=augment_methods)
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,10 +3,10 @@
|
|||||||
import unittest
|
import unittest
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
import pytz
|
|
||||||
|
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
__author__ = 'kellym'
|
__author__ = 'kellym'
|
||||||
@ -15,8 +15,12 @@ __author__ = 'kellym'
|
|||||||
class TimerDateTest(BpmnWorkflowTestCase):
|
class TimerDateTest(BpmnWorkflowTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.script_engine = PythonScriptEngine(default_globals={
|
||||||
|
"datetime": datetime.datetime,
|
||||||
|
"timedelta": datetime.timedelta,
|
||||||
|
})
|
||||||
self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer')
|
self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer')
|
||||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||||
|
|
||||||
def testRunThroughHappy(self):
|
def testRunThroughHappy(self):
|
||||||
self.actual_test(save_restore=False)
|
self.actual_test(save_restore=False)
|
||||||
@ -42,6 +46,7 @@ class TimerDateTest(BpmnWorkflowTestCase):
|
|||||||
break
|
break
|
||||||
if save_restore:
|
if save_restore:
|
||||||
self.save_restore()
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = self.script_engine
|
||||||
|
|
||||||
|
|
||||||
waiting_tasks = self.workflow.get_tasks(TaskState.WAITING)
|
waiting_tasks = self.workflow.get_tasks(TaskState.WAITING)
|
||||||
@ -50,8 +55,7 @@ class TimerDateTest(BpmnWorkflowTestCase):
|
|||||||
loopcount = loopcount +1
|
loopcount = loopcount +1
|
||||||
endtime = datetime.datetime.now()
|
endtime = datetime.datetime.now()
|
||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
tz = pytz.timezone('US/Eastern')
|
testdate = datetime.datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M')
|
||||||
testdate = tz.localize(datetime.datetime.strptime('2021-09-01 10:00','%Y-%m-%d %H:%M'))
|
|
||||||
self.assertEqual(self.workflow.last_task.data['futuredate2'],testdate)
|
self.assertEqual(self.workflow.last_task.data['futuredate2'],testdate)
|
||||||
self.assertTrue('completed' in self.workflow.last_task.data)
|
self.assertTrue('completed' in self.workflow.last_task.data)
|
||||||
self.assertTrue(self.workflow.last_task.data['completed'])
|
self.assertTrue(self.workflow.last_task.data['completed'])
|
||||||
|
@ -3,10 +3,12 @@
|
|||||||
import unittest
|
import unittest
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.specs.events import EndEvent
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
__author__ = 'kellym'
|
__author__ = 'kellym'
|
||||||
|
|
||||||
@ -14,8 +16,9 @@ __author__ = 'kellym'
|
|||||||
class TimerDurationTest(BpmnWorkflowTestCase):
|
class TimerDurationTest(BpmnWorkflowTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||||
self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer')
|
self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer')
|
||||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||||
|
|
||||||
def testRunThroughHappy(self):
|
def testRunThroughHappy(self):
|
||||||
self.actual_test(save_restore=False)
|
self.actual_test(save_restore=False)
|
||||||
@ -43,9 +46,11 @@ class TimerDurationTest(BpmnWorkflowTestCase):
|
|||||||
|
|
||||||
starttime = datetime.datetime.now()
|
starttime = datetime.datetime.now()
|
||||||
self.workflow = BpmnWorkflow(self.spec)
|
self.workflow = BpmnWorkflow(self.spec)
|
||||||
|
self.workflow.script_engine = self.script_engine
|
||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
if save_restore:
|
if save_restore:
|
||||||
self.save_restore()
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = self.script_engine
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
self.workflow.refresh_waiting_tasks()
|
self.workflow.refresh_waiting_tasks()
|
||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
|
@ -3,8 +3,10 @@
|
|||||||
import unittest
|
import unittest
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
|
from datetime import timedelta
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
__author__ = 'kellym'
|
__author__ = 'kellym'
|
||||||
@ -13,8 +15,9 @@ __author__ = 'kellym'
|
|||||||
class TimerDurationTest(BpmnWorkflowTestCase):
|
class TimerDurationTest(BpmnWorkflowTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||||
self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer')
|
self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer')
|
||||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||||
|
|
||||||
def testRunThroughHappy(self):
|
def testRunThroughHappy(self):
|
||||||
self.actual_test(save_restore=False)
|
self.actual_test(save_restore=False)
|
||||||
@ -40,7 +43,9 @@ class TimerDurationTest(BpmnWorkflowTestCase):
|
|||||||
while loopcount < 10:
|
while loopcount < 10:
|
||||||
if len(self.workflow.get_tasks(TaskState.READY)) >= 1:
|
if len(self.workflow.get_tasks(TaskState.READY)) >= 1:
|
||||||
break
|
break
|
||||||
if save_restore: self.save_restore()
|
if save_restore:
|
||||||
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = self.script_engine
|
||||||
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
|
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.WAITING)))
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
self.workflow.refresh_waiting_tasks()
|
self.workflow.refresh_waiting_tasks()
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||||
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
|
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
|
||||||
from SpiffWorkflow.camunda.serializer import UserTaskConverter, StartEventConverter, EndEventConverter, \
|
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter, StartEventConverter, EndEventConverter, \
|
||||||
IntermediateCatchEventConverter, IntermediateThrowEventConverter, BoundaryEventConverter
|
IntermediateCatchEventConverter, IntermediateThrowEventConverter, BoundaryEventConverter
|
||||||
|
|
||||||
from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter
|
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
|
||||||
|
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
|
@ -3,8 +3,10 @@
|
|||||||
|
|
||||||
import unittest
|
import unittest
|
||||||
import time
|
import time
|
||||||
|
from datetime import timedelta
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||||
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from .BaseTestCase import BaseTestCase
|
from .BaseTestCase import BaseTestCase
|
||||||
|
|
||||||
__author__ = 'kellym'
|
__author__ = 'kellym'
|
||||||
@ -13,8 +15,9 @@ __author__ = 'kellym'
|
|||||||
class MessageBoundaryTest(BaseTestCase):
|
class MessageBoundaryTest(BaseTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||||
self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs')
|
self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs')
|
||||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)
|
||||||
|
|
||||||
def testRunThroughHappy(self):
|
def testRunThroughHappy(self):
|
||||||
self.actual_test(save_restore=False)
|
self.actual_test(save_restore=False)
|
||||||
@ -41,7 +44,9 @@ class MessageBoundaryTest(BaseTestCase):
|
|||||||
self.workflow.do_engine_steps()
|
self.workflow.do_engine_steps()
|
||||||
time.sleep(.01)
|
time.sleep(.01)
|
||||||
self.workflow.refresh_waiting_tasks()
|
self.workflow.refresh_waiting_tasks()
|
||||||
if save_restore: self.save_restore()
|
if save_restore:
|
||||||
|
self.save_restore()
|
||||||
|
self.workflow.script_engine = self.script_engine
|
||||||
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
ready_tasks = self.workflow.get_tasks(TaskState.READY)
|
||||||
time.sleep(.01)
|
time.sleep(.01)
|
||||||
self.workflow.refresh_waiting_tasks()
|
self.workflow.refresh_waiting_tasks()
|
||||||
|
@ -3,7 +3,8 @@ import unittest
|
|||||||
|
|
||||||
from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, \
|
from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, \
|
||||||
EnumFormField
|
EnumFormField
|
||||||
from SpiffWorkflow.specs import WorkflowSpec, TaskSpec
|
from SpiffWorkflow.specs.base import TaskSpec
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
|
|
||||||
|
|
||||||
class UserTaskSpecTest(unittest.TestCase):
|
class UserTaskSpecTest(unittest.TestCase):
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from SpiffWorkflow.specs import ExclusiveChoice, Join, MultiChoice, MultiInstance, Simple, WorkflowSpec
|
from SpiffWorkflow.specs.ExclusiveChoice import ExclusiveChoice
|
||||||
|
from SpiffWorkflow.specs.Join import Join
|
||||||
|
from SpiffWorkflow.specs.MultiChoice import MultiChoice
|
||||||
|
from SpiffWorkflow.specs.MultiInstance import MultiInstance
|
||||||
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.operators import Attrib, Equal, NotEqual
|
from SpiffWorkflow.operators import Attrib, Equal, NotEqual
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import datetime
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
@ -7,4 +8,5 @@ from ..DecisionRunner import DecisionRunner
|
|||||||
class PythonDecisionRunner(DecisionRunner):
|
class PythonDecisionRunner(DecisionRunner):
|
||||||
|
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
super().__init__(PythonScriptEngine(scripting_additions={'Decimal': Decimal}), filename, 'python_engine')
|
scripting_additions={'Decimal': Decimal, 'datetime': datetime}
|
||||||
|
super().__init__(PythonScriptEngine(scripting_additions=scripting_additions), filename, 'python_engine')
|
||||||
|
@ -11,7 +11,7 @@ sys.path.insert(0, os.path.join(dirname, '..'))
|
|||||||
|
|
||||||
from PatternTest import run_workflow, PatternTest
|
from PatternTest import run_workflow, PatternTest
|
||||||
from SpiffWorkflow.serializer.base import Serializer
|
from SpiffWorkflow.serializer.base import Serializer
|
||||||
from SpiffWorkflow.specs import WorkflowSpec
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.workflow import Workflow
|
from SpiffWorkflow.workflow import Workflow
|
||||||
from SpiffWorkflow.serializer.exceptions import TaskNotSupportedError
|
from SpiffWorkflow.serializer.exceptions import TaskNotSupportedError
|
||||||
|
|
||||||
|
@ -6,7 +6,8 @@ import unittest
|
|||||||
import pickle
|
import pickle
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||||
from .TaskSpecTest import TaskSpecTest
|
from .TaskSpecTest import TaskSpecTest
|
||||||
from SpiffWorkflow.specs import Celery, WorkflowSpec
|
from SpiffWorkflow.specs.Celery import Celery
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.operators import Attrib
|
from SpiffWorkflow.operators import Attrib
|
||||||
from SpiffWorkflow.serializer.dict import DictionarySerializer
|
from SpiffWorkflow.serializer.dict import DictionarySerializer
|
||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
|
@ -10,7 +10,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|||||||
from tests.SpiffWorkflow.util import run_workflow
|
from tests.SpiffWorkflow.util import run_workflow
|
||||||
from .TaskSpecTest import TaskSpecTest
|
from .TaskSpecTest import TaskSpecTest
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.specs import Execute
|
from SpiffWorkflow.specs.Execute import Execute
|
||||||
|
|
||||||
|
|
||||||
class ExecuteTest(TaskSpecTest):
|
class ExecuteTest(TaskSpecTest):
|
||||||
|
@ -8,7 +8,7 @@ import unittest
|
|||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||||
|
|
||||||
from .TaskSpecTest import TaskSpecTest
|
from .TaskSpecTest import TaskSpecTest
|
||||||
from SpiffWorkflow.specs import Join
|
from SpiffWorkflow.specs.Join import Join
|
||||||
|
|
||||||
|
|
||||||
class JoinTest(TaskSpecTest):
|
class JoinTest(TaskSpecTest):
|
||||||
|
@ -8,7 +8,9 @@ import unittest
|
|||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||||
|
|
||||||
from .JoinTest import JoinTest
|
from .JoinTest import JoinTest
|
||||||
from SpiffWorkflow.specs import Merge, WorkflowSpec, Simple
|
from SpiffWorkflow.specs.Merge import Merge
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
from SpiffWorkflow.workflow import Workflow
|
from SpiffWorkflow.workflow import Workflow
|
||||||
|
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ import os
|
|||||||
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||||
|
|
||||||
from SpiffWorkflow.specs import WorkflowSpec
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.specs.SubWorkflow import SubWorkflow
|
from SpiffWorkflow.specs.SubWorkflow import SubWorkflow
|
||||||
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
|
@ -5,9 +5,11 @@ import unittest
|
|||||||
import os
|
import os
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||||
|
|
||||||
from SpiffWorkflow.specs import WorkflowSpec, Simple, Join
|
from SpiffWorkflow.specs.Join import Join
|
||||||
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.exceptions import WorkflowException
|
from SpiffWorkflow.exceptions import WorkflowException
|
||||||
from SpiffWorkflow.specs import TaskSpec
|
from SpiffWorkflow.specs.base import TaskSpec
|
||||||
from SpiffWorkflow.serializer.dict import DictionarySerializer
|
from SpiffWorkflow.serializer.dict import DictionarySerializer
|
||||||
|
|
||||||
|
|
||||||
|
@ -9,7 +9,8 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|||||||
|
|
||||||
from tests.SpiffWorkflow.util import run_workflow
|
from tests.SpiffWorkflow.util import run_workflow
|
||||||
from .TaskSpecTest import TaskSpecTest
|
from .TaskSpecTest import TaskSpecTest
|
||||||
from SpiffWorkflow.specs import Transform, Simple
|
from SpiffWorkflow.specs.Transform import Transform
|
||||||
|
from SpiffWorkflow.specs.Simple import Simple
|
||||||
|
|
||||||
|
|
||||||
class TransformTest(TaskSpecTest):
|
class TransformTest(TaskSpecTest):
|
||||||
|
@ -15,7 +15,8 @@ try:
|
|||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
from tests.SpiffWorkflow.util import track_workflow
|
from tests.SpiffWorkflow.util import track_workflow
|
||||||
from SpiffWorkflow.workflow import Workflow
|
from SpiffWorkflow.workflow import Workflow
|
||||||
from SpiffWorkflow.specs import Join, WorkflowSpec
|
from SpiffWorkflow.specs.Join import Join
|
||||||
|
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||||
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
from SpiffWorkflow.serializer.prettyxml import XmlSerializer
|
||||||
|
|
||||||
serializer = XmlSerializer()
|
serializer = XmlSerializer()
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from SpiffWorkflow.spiff.parser import SpiffBpmnParser, VALIDATOR
|
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser, VALIDATOR
|
||||||
from SpiffWorkflow.spiff.serializer import NoneTaskConverter, \
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter, \
|
||||||
ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, \
|
ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, \
|
||||||
SubWorkflowTaskConverter, TransactionSubprocessConverter, \
|
SubWorkflowTaskConverter, TransactionSubprocessConverter, \
|
||||||
CallActivityTaskConverter, \
|
CallActivityTaskConverter, \
|
||||||
@ -11,7 +11,7 @@ from SpiffWorkflow.spiff.serializer import NoneTaskConverter, \
|
|||||||
IntermediateCatchEventConverter, IntermediateThrowEventConverter, \
|
IntermediateCatchEventConverter, IntermediateThrowEventConverter, \
|
||||||
ServiceTaskConverter
|
ServiceTaskConverter
|
||||||
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
|
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
|
||||||
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
|
||||||
|
|
||||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||||
|
|
||||||
|
18
flask-bpmn/poetry.lock
generated
18
flask-bpmn/poetry.lock
generated
@ -667,7 +667,7 @@ SQLAlchemy = ">=0.8.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "furo"
|
name = "furo"
|
||||||
version = "2022.6.21"
|
version = "2022.9.29"
|
||||||
description = "A clean customisable Sphinx documentation theme."
|
description = "A clean customisable Sphinx documentation theme."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -675,7 +675,7 @@ python-versions = ">=3.7"
|
|||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
beautifulsoup4 = "*"
|
beautifulsoup4 = "*"
|
||||||
pygments = "*"
|
pygments = ">=2.7"
|
||||||
sphinx = ">=4.0,<6.0"
|
sphinx = ">=4.0,<6.0"
|
||||||
sphinx-basic-ng = "*"
|
sphinx-basic-ng = "*"
|
||||||
|
|
||||||
@ -1206,7 +1206,7 @@ tzdata = {version = "*", markers = "python_version >= \"3.6\""}
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyupgrade"
|
name = "pyupgrade"
|
||||||
version = "3.1.0"
|
version = "3.2.0"
|
||||||
description = "A tool to automatically upgrade syntax for newer versions."
|
description = "A tool to automatically upgrade syntax for newer versions."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -1578,7 +1578,7 @@ pytz = "*"
|
|||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
|
resolved_reference = "5cdb881edc4621502bfd61ce67565cf1148199f0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlalchemy"
|
name = "sqlalchemy"
|
||||||
@ -1853,7 +1853,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.7"
|
python-versions = "^3.7"
|
||||||
content-hash = "f5c0fcc30ff491c23da05e4d24c2dc9c66f43a2dfde028345f9dffd5e91f3f0a"
|
content-hash = "7d1d5e13f2546566277c6f0b5935753c89804db2abb7a1e76498b582f40f9a01"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
alabaster = [
|
alabaster = [
|
||||||
@ -2191,8 +2191,8 @@ flask-sqlalchemy = [
|
|||||||
{file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"},
|
{file = "Flask_SQLAlchemy-2.5.1-py2.py3-none-any.whl", hash = "sha256:f12c3d4cc5cc7fdcc148b9527ea05671718c3ea45d50c7e732cceb33f574b390"},
|
||||||
]
|
]
|
||||||
furo = [
|
furo = [
|
||||||
{file = "furo-2022.6.21-py3-none-any.whl", hash = "sha256:061b68e323345e27fcba024cf33a1e77f3dfd8d9987410be822749a706e2add6"},
|
{file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"},
|
||||||
{file = "furo-2022.6.21.tar.gz", hash = "sha256:9aa983b7488a4601d13113884bfb7254502c8729942e073a0acb87a5512af223"},
|
{file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"},
|
||||||
]
|
]
|
||||||
gitdb = [
|
gitdb = [
|
||||||
{file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
|
{file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"},
|
||||||
@ -2608,8 +2608,8 @@ pytz-deprecation-shim = [
|
|||||||
{file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"},
|
{file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"},
|
||||||
]
|
]
|
||||||
pyupgrade = [
|
pyupgrade = [
|
||||||
{file = "pyupgrade-3.1.0-py2.py3-none-any.whl", hash = "sha256:77c6101a710be3e24804891e43388cedbee617258e93b09c8c5e58de08617758"},
|
{file = "pyupgrade-3.2.0-py2.py3-none-any.whl", hash = "sha256:2aa6c40e49ea5a350e6e45b8c7847b1741aef274a35d4f0b2bf91731ec8ab796"},
|
||||||
{file = "pyupgrade-3.1.0.tar.gz", hash = "sha256:7a8d393d85e15e0e2753e90b7b2e173b9d29dfd71e61f93d93e985b242627ed3"},
|
{file = "pyupgrade-3.2.0.tar.gz", hash = "sha256:70e1ac1e6b34a90fb21f5cada1907ef035b12dfc1d9f13cefd367acf3b530310"},
|
||||||
]
|
]
|
||||||
pyyaml = [
|
pyyaml = [
|
||||||
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
||||||
|
@ -61,7 +61,7 @@ reorder-python-imports = "^3.9.0"
|
|||||||
pre-commit-hooks = "^4.3.0"
|
pre-commit-hooks = "^4.3.0"
|
||||||
sphinx-click = "^4.3.0"
|
sphinx-click = "^4.3.0"
|
||||||
Pygments = "^2.13.0"
|
Pygments = "^2.13.0"
|
||||||
pyupgrade = "^3.1.0"
|
pyupgrade = "^3.2.0"
|
||||||
furo = ">=2021.11.12"
|
furo = ">=2021.11.12"
|
||||||
MonkeyType = "^22.2.0"
|
MonkeyType = "^22.2.0"
|
||||||
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
"""empty message
|
"""empty message
|
||||||
|
|
||||||
Revision ID: 3bd6b0b1b8ae
|
Revision ID: b1647eff45c9
|
||||||
Revises:
|
Revises:
|
||||||
Create Date: 2022-10-25 12:31:50.177599
|
Create Date: 2022-11-02 14:25:09.992800
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = '3bd6b0b1b8ae'
|
revision = 'b1647eff45c9'
|
||||||
down_revision = None
|
down_revision = None
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
@ -18,13 +18,6 @@ depends_on = None
|
|||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.create_table('admin_session',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('token', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('admin_impersonate_uid', sa.String(length=50), nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('id'),
|
|
||||||
sa.UniqueConstraint('token')
|
|
||||||
)
|
|
||||||
op.create_table('bpmn_process_id_lookup',
|
op.create_table('bpmn_process_id_lookup',
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
|
sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True),
|
||||||
@ -113,6 +106,7 @@ def upgrade():
|
|||||||
sa.Column('status', sa.String(length=50), nullable=True),
|
sa.Column('status', sa.String(length=50), nullable=True),
|
||||||
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
|
sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True),
|
||||||
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
|
sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True),
|
||||||
|
sa.Column('spiff_step', sa.Integer(), nullable=True),
|
||||||
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
|
sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
@ -183,25 +177,6 @@ def upgrade():
|
|||||||
sa.PrimaryKeyConstraint('id'),
|
sa.PrimaryKeyConstraint('id'),
|
||||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
|
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
|
||||||
)
|
)
|
||||||
op.create_table('file',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('name', sa.String(length=50), nullable=False),
|
|
||||||
sa.Column('type', sa.String(length=50), nullable=False),
|
|
||||||
sa.Column('content_type', sa.String(length=50), nullable=False),
|
|
||||||
sa.Column('process_instance_id', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('task_spec', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('irb_doc_code', sa.String(length=50), nullable=False),
|
|
||||||
sa.Column('md5_hash', sa.String(length=50), nullable=False),
|
|
||||||
sa.Column('data', sa.LargeBinary(), nullable=True),
|
|
||||||
sa.Column('size', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('user_uid', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('archived', sa.Boolean(), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
|
||||||
sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('message_correlation',
|
op.create_table('message_correlation',
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||||
@ -255,30 +230,20 @@ def upgrade():
|
|||||||
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||||
sa.Column('message', sa.String(length=255), nullable=True),
|
sa.Column('message', sa.String(length=255), nullable=True),
|
||||||
sa.Column('current_user_id', sa.Integer(), nullable=True),
|
sa.Column('current_user_id', sa.Integer(), nullable=True),
|
||||||
|
sa.Column('spiff_step', sa.Integer(), nullable=False),
|
||||||
sa.ForeignKeyConstraint(['current_user_id'], ['user.id'], ),
|
sa.ForeignKeyConstraint(['current_user_id'], ['user.id'], ),
|
||||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('task_event',
|
op.create_table('spiff_step_details',
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||||
sa.Column('spec_version', sa.String(length=50), nullable=True),
|
sa.Column('spiff_step', sa.Integer(), nullable=False),
|
||||||
sa.Column('action', sa.String(length=50), nullable=True),
|
sa.Column('task_json', sa.JSON(), nullable=False),
|
||||||
sa.Column('task_id', sa.String(length=50), nullable=True),
|
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||||
sa.Column('task_name', sa.String(length=50), nullable=True),
|
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
|
||||||
sa.Column('task_title', sa.String(length=50), nullable=True),
|
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
|
||||||
sa.Column('task_type', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('task_state', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('task_lane', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('form_data', sa.JSON(), nullable=True),
|
|
||||||
sa.Column('mi_type', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('mi_count', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('mi_index', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('process_name', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('date', sa.DateTime(timezone=True), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
sa.PrimaryKeyConstraint('id')
|
||||||
)
|
)
|
||||||
op.create_table('active_task_user',
|
op.create_table('active_task_user',
|
||||||
@ -292,19 +257,6 @@ def upgrade():
|
|||||||
)
|
)
|
||||||
op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False)
|
op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False)
|
||||||
op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False)
|
op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False)
|
||||||
op.create_table('data_store',
|
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
|
||||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('key', sa.String(length=50), nullable=False),
|
|
||||||
sa.Column('process_instance_id', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('task_spec', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('spec_id', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('user_id', sa.String(length=50), nullable=True),
|
|
||||||
sa.Column('file_id', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('value', sa.String(length=50), nullable=True),
|
|
||||||
sa.ForeignKeyConstraint(['file_id'], ['file.id'], ),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('message_correlation_message_instance',
|
op.create_table('message_correlation_message_instance',
|
||||||
sa.Column('id', sa.Integer(), nullable=False),
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
sa.Column('message_instance_id', sa.Integer(), nullable=False),
|
sa.Column('message_instance_id', sa.Integer(), nullable=False),
|
||||||
@ -324,11 +276,10 @@ def downgrade():
|
|||||||
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
|
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
|
||||||
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
|
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
|
||||||
op.drop_table('message_correlation_message_instance')
|
op.drop_table('message_correlation_message_instance')
|
||||||
op.drop_table('data_store')
|
|
||||||
op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
|
op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
|
||||||
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
|
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
|
||||||
op.drop_table('active_task_user')
|
op.drop_table('active_task_user')
|
||||||
op.drop_table('task_event')
|
op.drop_table('spiff_step_details')
|
||||||
op.drop_table('spiff_logging')
|
op.drop_table('spiff_logging')
|
||||||
op.drop_table('permission_assignment')
|
op.drop_table('permission_assignment')
|
||||||
op.drop_table('message_instance')
|
op.drop_table('message_instance')
|
||||||
@ -337,7 +288,6 @@ def downgrade():
|
|||||||
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
|
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
|
||||||
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
|
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
|
||||||
op.drop_table('message_correlation')
|
op.drop_table('message_correlation')
|
||||||
op.drop_table('file')
|
|
||||||
op.drop_table('active_task')
|
op.drop_table('active_task')
|
||||||
op.drop_table('user_group_assignment')
|
op.drop_table('user_group_assignment')
|
||||||
op.drop_table('secret')
|
op.drop_table('secret')
|
||||||
@ -363,5 +313,4 @@ def downgrade():
|
|||||||
op.drop_table('group')
|
op.drop_table('group')
|
||||||
op.drop_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), table_name='bpmn_process_id_lookup')
|
op.drop_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), table_name='bpmn_process_id_lookup')
|
||||||
op.drop_table('bpmn_process_id_lookup')
|
op.drop_table('bpmn_process_id_lookup')
|
||||||
op.drop_table('admin_session')
|
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
74
spiffworkflow-backend/poetry.lock
generated
74
spiffworkflow-backend/poetry.lock
generated
@ -95,7 +95,7 @@ python-versions = ">=3.5"
|
|||||||
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||||
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||||
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
|
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
|
||||||
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "Babel"
|
name = "Babel"
|
||||||
@ -268,7 +268,7 @@ optional = false
|
|||||||
python-versions = ">=3.6.0"
|
python-versions = ">=3.6.0"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
unicode-backport = ["unicodedata2"]
|
unicode_backport = ["unicodedata2"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "classify-imports"
|
name = "classify-imports"
|
||||||
@ -410,7 +410,7 @@ python-versions = ">=3.6,<4.0"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dateparser"
|
name = "dateparser"
|
||||||
version = "1.1.1"
|
version = "1.1.2"
|
||||||
description = "Date parsing library designed to parse dates from HTML pages"
|
description = "Date parsing library designed to parse dates from HTML pages"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -639,7 +639,7 @@ werkzeug = "*"
|
|||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/flask-bpmn"
|
url = "https://github.com/sartography/flask-bpmn"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "cedc5253add81a18a274f2cd3289fe36bb138f8b"
|
resolved_reference = "191f0f32798720c9ce1e5307732c90ac26433298"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "Flask-Cors"
|
name = "Flask-Cors"
|
||||||
@ -820,7 +820,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-metadata"
|
name = "importlib-metadata"
|
||||||
version = "5.0.0"
|
version = "4.13.0"
|
||||||
description = "Read metadata from Python packages"
|
description = "Read metadata from Python packages"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -1441,7 +1441,7 @@ docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytz"
|
name = "pytz"
|
||||||
version = "2022.5"
|
version = "2022.6"
|
||||||
description = "World timezone definitions, modern and historical"
|
description = "World timezone definitions, modern and historical"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -1512,7 +1512,7 @@ urllib3 = ">=1.21.1,<1.27"
|
|||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests-toolbelt"
|
name = "requests-toolbelt"
|
||||||
@ -1625,7 +1625,7 @@ falcon = ["falcon (>=1.4)"]
|
|||||||
fastapi = ["fastapi (>=0.79.0)"]
|
fastapi = ["fastapi (>=0.79.0)"]
|
||||||
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
flask = ["blinker (>=1.1)", "flask (>=0.11)"]
|
||||||
httpx = ["httpx (>=0.16.0)"]
|
httpx = ["httpx (>=0.16.0)"]
|
||||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
pure_eval = ["asttokens", "executing", "pure-eval"]
|
||||||
pyspark = ["pyspark (>=2.4.4)"]
|
pyspark = ["pyspark (>=2.4.4)"]
|
||||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||||
rq = ["rq (>=0.6)"]
|
rq = ["rq (>=0.6)"]
|
||||||
@ -1865,15 +1865,13 @@ develop = false
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
celery = "*"
|
celery = "*"
|
||||||
configparser = "*"
|
configparser = "*"
|
||||||
dateparser = "*"
|
|
||||||
lxml = "*"
|
lxml = "*"
|
||||||
pytz = "*"
|
|
||||||
|
|
||||||
[package.source]
|
[package.source]
|
||||||
type = "git"
|
type = "git"
|
||||||
url = "https://github.com/sartography/SpiffWorkflow"
|
url = "https://github.com/sartography/SpiffWorkflow"
|
||||||
reference = "main"
|
reference = "main"
|
||||||
resolved_reference = "2d3bd00854ab483e823c4b386430abc9267f536b"
|
resolved_reference = "8d820dce1f439bb76bc07e39629832d998d6f634"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "SQLAlchemy"
|
name = "SQLAlchemy"
|
||||||
@ -1891,19 +1889,19 @@ aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
|
|||||||
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
|
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"]
|
||||||
asyncio = ["greenlet (!=0.4.17)"]
|
asyncio = ["greenlet (!=0.4.17)"]
|
||||||
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
|
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
|
||||||
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
|
mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"]
|
||||||
mssql = ["pyodbc"]
|
mssql = ["pyodbc"]
|
||||||
mssql-pymssql = ["pymssql"]
|
mssql_pymssql = ["pymssql"]
|
||||||
mssql-pyodbc = ["pyodbc"]
|
mssql_pyodbc = ["pyodbc"]
|
||||||
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
|
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
|
||||||
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
|
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
|
||||||
mysql-connector = ["mysql-connector-python"]
|
mysql_connector = ["mysql-connector-python"]
|
||||||
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
|
oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"]
|
||||||
postgresql = ["psycopg2 (>=2.7)"]
|
postgresql = ["psycopg2 (>=2.7)"]
|
||||||
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
|
postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
|
||||||
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
|
postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
|
||||||
postgresql-psycopg2binary = ["psycopg2-binary"]
|
postgresql_psycopg2binary = ["psycopg2-binary"]
|
||||||
postgresql-psycopg2cffi = ["psycopg2cffi"]
|
postgresql_psycopg2cffi = ["psycopg2cffi"]
|
||||||
pymysql = ["pymysql", "pymysql (<1)"]
|
pymysql = ["pymysql", "pymysql (<1)"]
|
||||||
sqlcipher = ["sqlcipher3_binary"]
|
sqlcipher = ["sqlcipher3_binary"]
|
||||||
|
|
||||||
@ -2000,6 +1998,14 @@ category = "main"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "types-dateparser"
|
||||||
|
version = "1.1.4.1"
|
||||||
|
description = "Typing stubs for dateparser"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "types-Flask"
|
name = "types-Flask"
|
||||||
version = "1.1.6"
|
version = "1.1.6"
|
||||||
@ -2248,7 +2254,7 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools"
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = ">=3.9,<3.11"
|
python-versions = ">=3.9,<3.11"
|
||||||
content-hash = "bfb51ebc4ef76d4a74f670f44dc4d7ca7e91874b096f56521c2776f1837f6a63"
|
content-hash = "995be3a9a60b515b281f017ff32ff27a52ca178b1980611b348dccac6afb6b89"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
alabaster = [
|
alabaster = [
|
||||||
@ -2454,8 +2460,8 @@ darglint = [
|
|||||||
{file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
|
{file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"},
|
||||||
]
|
]
|
||||||
dateparser = [
|
dateparser = [
|
||||||
{file = "dateparser-1.1.1-py2.py3-none-any.whl", hash = "sha256:9600874312ff28a41f96ec7ccdc73be1d1c44435719da47fea3339d55ff5a628"},
|
{file = "dateparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:d31659dc806a7d88e2b510b2c74f68b525ae531f145c62a57a99bd616b7f90cf"},
|
||||||
{file = "dateparser-1.1.1.tar.gz", hash = "sha256:038196b1f12c7397e38aad3d61588833257f6f552baa63a1499e6987fa8d42d9"},
|
{file = "dateparser-1.1.2.tar.gz", hash = "sha256:3821bf191f95b2658c4abd91571c09821ce7a2bc179bf6cefd8b4515c3ccf9ef"},
|
||||||
]
|
]
|
||||||
distlib = [
|
distlib = [
|
||||||
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
|
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
|
||||||
@ -2613,7 +2619,6 @@ greenlet = [
|
|||||||
{file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"},
|
{file = "greenlet-1.1.3.post0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a954002064ee919b444b19c1185e8cce307a1f20600f47d6f4b6d336972c809"},
|
||||||
{file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"},
|
{file = "greenlet-1.1.3.post0-cp39-cp39-win32.whl", hash = "sha256:2ccdc818cc106cc238ff7eba0d71b9c77be868fdca31d6c3b1347a54c9b187b2"},
|
||||||
{file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"},
|
{file = "greenlet-1.1.3.post0-cp39-cp39-win_amd64.whl", hash = "sha256:91a84faf718e6f8b888ca63d0b2d6d185c8e2a198d2a7322d75c303e7097c8b7"},
|
||||||
{file = "greenlet-1.1.3.post0.tar.gz", hash = "sha256:f5e09dc5c6e1796969fd4b775ea1417d70e49a5df29aaa8e5d10675d9e11872c"},
|
|
||||||
]
|
]
|
||||||
gunicorn = [
|
gunicorn = [
|
||||||
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
|
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
|
||||||
@ -2632,8 +2637,8 @@ imagesize = [
|
|||||||
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
|
{file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
|
||||||
]
|
]
|
||||||
importlib-metadata = [
|
importlib-metadata = [
|
||||||
{file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"},
|
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
|
||||||
{file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"},
|
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
|
||||||
]
|
]
|
||||||
inflection = [
|
inflection = [
|
||||||
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
|
{file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
|
||||||
@ -3051,7 +3056,18 @@ py = [
|
|||||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||||
]
|
]
|
||||||
pyasn1 = [
|
pyasn1 = [
|
||||||
|
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||||
|
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||||
|
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||||
|
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||||
|
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||||
]
|
]
|
||||||
pycodestyle = [
|
pycodestyle = [
|
||||||
@ -3130,8 +3146,8 @@ python-keycloak = [
|
|||||||
{file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"},
|
{file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"},
|
||||||
]
|
]
|
||||||
pytz = [
|
pytz = [
|
||||||
{file = "pytz-2022.5-py2.py3-none-any.whl", hash = "sha256:335ab46900b1465e714b4fda4963d87363264eb662aab5e65da039c25f1f5b22"},
|
{file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"},
|
||||||
{file = "pytz-2022.5.tar.gz", hash = "sha256:c4d88f472f54d615e9cd582a5004d1e5f624854a6a27a6211591c251f22a6914"},
|
{file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"},
|
||||||
]
|
]
|
||||||
pytz-deprecation-shim = [
|
pytz-deprecation-shim = [
|
||||||
{file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"},
|
{file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"},
|
||||||
@ -3539,6 +3555,10 @@ types-click = [
|
|||||||
{file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"},
|
{file = "types-click-7.1.8.tar.gz", hash = "sha256:b6604968be6401dc516311ca50708a0a28baa7a0cb840efd7412f0dbbff4e092"},
|
||||||
{file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"},
|
{file = "types_click-7.1.8-py3-none-any.whl", hash = "sha256:8cb030a669e2e927461be9827375f83c16b8178c365852c060a34e24871e7e81"},
|
||||||
]
|
]
|
||||||
|
types-dateparser = [
|
||||||
|
{file = "types-dateparser-1.1.4.1.tar.gz", hash = "sha256:0f76578bbae15c8b8701b5efd94db98a97ce0a27aedfe6f14a531170de6db97d"},
|
||||||
|
{file = "types_dateparser-1.1.4.1-py3-none-any.whl", hash = "sha256:dd7b2343bb06225c0e358533609b66a8edfb95e5426d8f658664e7d0f27dea68"},
|
||||||
|
]
|
||||||
types-Flask = [
|
types-Flask = [
|
||||||
{file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"},
|
{file = "types-Flask-1.1.6.tar.gz", hash = "sha256:aac777b3abfff9436e6b01f6d08171cf23ea6e5be71cbf773aaabb1c5763e9cf"},
|
||||||
{file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"},
|
{file = "types_Flask-1.1.6-py3-none-any.whl", hash = "sha256:6ab8a9a5e258b76539d652f6341408867298550b19b81f0e41e916825fc39087"},
|
||||||
|
@ -28,6 +28,7 @@ flask-migrate = "*"
|
|||||||
flask-restful = "*"
|
flask-restful = "*"
|
||||||
werkzeug = "*"
|
werkzeug = "*"
|
||||||
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
|
||||||
|
#SpiffWorkflow = {develop = true, path = "../SpiffWorkflow" }
|
||||||
sentry-sdk = "^1.10"
|
sentry-sdk = "^1.10"
|
||||||
sphinx-autoapi = "^2.0"
|
sphinx-autoapi = "^2.0"
|
||||||
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"}
|
||||||
@ -68,6 +69,9 @@ types-pytz = "^2022.1.1"
|
|||||||
# for now use my fork
|
# for now use my fork
|
||||||
sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" }
|
sqlalchemy-stubs = { git = "https://github.com/burnettk/sqlalchemy-stubs.git", rev = "scoped-session-delete" }
|
||||||
simplejson = "^3.17.6"
|
simplejson = "^3.17.6"
|
||||||
|
pytz = "^2022.6"
|
||||||
|
dateparser = "^1.1.2"
|
||||||
|
types-dateparser = "^1.1.4.1"
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
|
@ -153,7 +153,6 @@ paths:
|
|||||||
description: The number of groups to show per page. Defaults to page 10.
|
description: The number of groups to show per page. Defaults to page 10.
|
||||||
schema:
|
schema:
|
||||||
type: integer
|
type: integer
|
||||||
# process_groups_list
|
|
||||||
get:
|
get:
|
||||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_groups_list
|
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_groups_list
|
||||||
summary: get list
|
summary: get list
|
||||||
@ -168,7 +167,6 @@ paths:
|
|||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
$ref: "#/components/schemas/ProcessModelCategory"
|
$ref: "#/components/schemas/ProcessModelCategory"
|
||||||
# process_group_add
|
|
||||||
post:
|
post:
|
||||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add
|
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add
|
||||||
summary: Add process group
|
summary: Add process group
|
||||||
@ -400,7 +398,7 @@ paths:
|
|||||||
description: For filtering - beginning of start window - in seconds since epoch
|
description: For filtering - beginning of start window - in seconds since epoch
|
||||||
schema:
|
schema:
|
||||||
type: integer
|
type: integer
|
||||||
- name: start_till
|
- name: start_to
|
||||||
in: query
|
in: query
|
||||||
required: false
|
required: false
|
||||||
description: For filtering - end of start window - in seconds since epoch
|
description: For filtering - end of start window - in seconds since epoch
|
||||||
@ -412,7 +410,7 @@ paths:
|
|||||||
description: For filtering - beginning of end window - in seconds since epoch
|
description: For filtering - beginning of end window - in seconds since epoch
|
||||||
schema:
|
schema:
|
||||||
type: integer
|
type: integer
|
||||||
- name: end_till
|
- name: end_to
|
||||||
in: query
|
in: query
|
||||||
required: false
|
required: false
|
||||||
description: For filtering - end of end window - in seconds since epoch
|
description: For filtering - end of end window - in seconds since epoch
|
||||||
@ -960,6 +958,12 @@ paths:
|
|||||||
description: If true, this wil return all tasks associated with the process instance and not just user tasks.
|
description: If true, this wil return all tasks associated with the process instance and not just user tasks.
|
||||||
schema:
|
schema:
|
||||||
type: boolean
|
type: boolean
|
||||||
|
- name: spiff_step
|
||||||
|
in: query
|
||||||
|
required: false
|
||||||
|
description: If set will return the tasks as they were during a specific step of execution.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
get:
|
get:
|
||||||
tags:
|
tags:
|
||||||
- Process Instances
|
- Process Instances
|
||||||
|
@ -21,8 +21,6 @@ from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F4
|
|||||||
from spiffworkflow_backend.models.bpmn_process_id_lookup import (
|
from spiffworkflow_backend.models.bpmn_process_id_lookup import (
|
||||||
BpmnProcessIdLookup,
|
BpmnProcessIdLookup,
|
||||||
) # noqa: F401
|
) # noqa: F401
|
||||||
from spiffworkflow_backend.models.data_store import DataStoreModel # noqa: F401
|
|
||||||
from spiffworkflow_backend.models.file import FileModel # noqa: F401
|
|
||||||
from spiffworkflow_backend.models.message_correlation_property import (
|
from spiffworkflow_backend.models.message_correlation_property import (
|
||||||
MessageCorrelationPropertyModel,
|
MessageCorrelationPropertyModel,
|
||||||
) # noqa: F401
|
) # noqa: F401
|
||||||
@ -48,7 +46,9 @@ from spiffworkflow_backend.models.process_instance_report import (
|
|||||||
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
|
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel # noqa: F401
|
||||||
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
|
from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401
|
||||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
|
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401
|
||||||
from spiffworkflow_backend.models.task_event import TaskEventModel # noqa: F401
|
from spiffworkflow_backend.models.spiff_step_details import (
|
||||||
|
SpiffStepDetailsModel,
|
||||||
|
) # noqa: F401
|
||||||
from spiffworkflow_backend.models.user import UserModel # noqa: F401
|
from spiffworkflow_backend.models.user import UserModel # noqa: F401
|
||||||
from spiffworkflow_backend.models.group import GroupModel # noqa: F401
|
from spiffworkflow_backend.models.group import GroupModel # noqa: F401
|
||||||
|
|
||||||
|
@ -1,31 +0,0 @@
|
|||||||
"""Data_store."""
|
|
||||||
from flask_bpmn.models.db import db
|
|
||||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
|
||||||
from flask_marshmallow.sqla import SQLAlchemyAutoSchema # type: ignore
|
|
||||||
|
|
||||||
|
|
||||||
class DataStoreModel(SpiffworkflowBaseDBModel):
|
|
||||||
"""DataStoreModel."""
|
|
||||||
|
|
||||||
__tablename__ = "data_store"
|
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
|
||||||
updated_at_in_seconds = db.Column(db.Integer)
|
|
||||||
key = db.Column(db.String(50), nullable=False)
|
|
||||||
process_instance_id = db.Column(db.Integer)
|
|
||||||
task_spec = db.Column(db.String(50))
|
|
||||||
spec_id = db.Column(db.String(50))
|
|
||||||
user_id = db.Column(db.String(50), nullable=True)
|
|
||||||
file_id = db.Column(db.Integer, db.ForeignKey("file.id"), nullable=True)
|
|
||||||
value = db.Column(db.String(50))
|
|
||||||
|
|
||||||
|
|
||||||
class DataStoreSchema(SQLAlchemyAutoSchema): # type: ignore
|
|
||||||
"""DataStoreSchema."""
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
"""Meta."""
|
|
||||||
|
|
||||||
model = DataStoreModel
|
|
||||||
load_instance = True
|
|
||||||
include_fk = True
|
|
||||||
sqla_session = db.session
|
|
@ -4,40 +4,10 @@ from dataclasses import field
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from flask_bpmn.models.db import db
|
|
||||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
|
||||||
from marshmallow import INCLUDE
|
from marshmallow import INCLUDE
|
||||||
from marshmallow import Schema
|
from marshmallow import Schema
|
||||||
from sqlalchemy.orm import deferred
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
|
|
||||||
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum
|
||||||
from spiffworkflow_backend.models.data_store import DataStoreModel
|
|
||||||
|
|
||||||
|
|
||||||
class FileModel(SpiffworkflowBaseDBModel):
|
|
||||||
"""FileModel."""
|
|
||||||
|
|
||||||
__tablename__ = "file"
|
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
|
||||||
name = db.Column(db.String(50), nullable=False)
|
|
||||||
type = db.Column(db.String(50), nullable=False)
|
|
||||||
content_type = db.Column(db.String(50), nullable=False)
|
|
||||||
process_instance_id = db.Column(
|
|
||||||
db.Integer, db.ForeignKey("process_instance.id"), nullable=True
|
|
||||||
)
|
|
||||||
task_spec = db.Column(db.String(50), nullable=True)
|
|
||||||
irb_doc_code = db.Column(
|
|
||||||
db.String(50), nullable=False
|
|
||||||
) # Code reference to the documents.xlsx reference file.
|
|
||||||
data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file")
|
|
||||||
md5_hash = db.Column(db.String(50), unique=False, nullable=False)
|
|
||||||
data = deferred(db.Column(db.LargeBinary)) # type: ignore
|
|
||||||
size = db.Column(db.Integer, default=0)
|
|
||||||
updated_at_in_seconds = db.Column(db.Integer)
|
|
||||||
created_at_in_seconds = db.Column(db.Integer)
|
|
||||||
user_uid = db.Column(db.String(50), db.ForeignKey("user.uid"), nullable=True)
|
|
||||||
archived = db.Column(db.Boolean, default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class FileType(SpiffEnum):
|
class FileType(SpiffEnum):
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
"""Process_group."""
|
"""Process_group."""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import dataclasses
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from dataclasses import field
|
from dataclasses import field
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@ -20,6 +21,7 @@ class ProcessGroup:
|
|||||||
|
|
||||||
id: str # A unique string name, lower case, under scores (ie, 'my_group')
|
id: str # A unique string name, lower case, under scores (ie, 'my_group')
|
||||||
display_name: str
|
display_name: str
|
||||||
|
description: str | None = None
|
||||||
display_order: int | None = 0
|
display_order: int | None = 0
|
||||||
admin: bool | None = False
|
admin: bool | None = False
|
||||||
process_models: list[ProcessModelInfo] = field(
|
process_models: list[ProcessModelInfo] = field(
|
||||||
@ -38,6 +40,12 @@ class ProcessGroup:
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serialized(self) -> dict:
|
||||||
|
"""Serialized."""
|
||||||
|
original_dict = dataclasses.asdict(self)
|
||||||
|
return {x: original_dict[x] for x in original_dict if x not in ["sort_index"]}
|
||||||
|
|
||||||
|
|
||||||
class ProcessGroupSchema(Schema):
|
class ProcessGroupSchema(Schema):
|
||||||
"""ProcessGroupSchema."""
|
"""ProcessGroupSchema."""
|
||||||
|
@ -78,10 +78,10 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||||||
process_initiator = relationship("UserModel")
|
process_initiator = relationship("UserModel")
|
||||||
|
|
||||||
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
|
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
|
||||||
task_events = relationship("TaskEventModel", cascade="delete") # type: ignore
|
|
||||||
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
|
spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore
|
||||||
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
|
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
|
||||||
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
|
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
|
||||||
|
spiff_step_details = relationship("SpiffStepDetailsModel", cascade="delete") # type: ignore
|
||||||
|
|
||||||
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
|
bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
|
||||||
start_in_seconds: int | None = db.Column(db.Integer)
|
start_in_seconds: int | None = db.Column(db.Integer)
|
||||||
@ -93,6 +93,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||||||
bpmn_xml_file_contents: bytes | None = None
|
bpmn_xml_file_contents: bytes | None = None
|
||||||
bpmn_version_control_type: str = db.Column(db.String(50))
|
bpmn_version_control_type: str = db.Column(db.String(50))
|
||||||
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
||||||
|
spiff_step: int = db.Column(db.Integer)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
@ -111,6 +112,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||||||
"end_in_seconds": self.end_in_seconds,
|
"end_in_seconds": self.end_in_seconds,
|
||||||
"process_initiator_id": self.process_initiator_id,
|
"process_initiator_id": self.process_initiator_id,
|
||||||
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
|
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
|
||||||
|
"spiff_step": self.spiff_step,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -29,7 +29,6 @@ class ProcessModelInfo:
|
|||||||
id: str
|
id: str
|
||||||
display_name: str
|
display_name: str
|
||||||
description: str
|
description: str
|
||||||
# process_group_id: str = ""
|
|
||||||
process_group: Any | None = None
|
process_group: Any | None = None
|
||||||
primary_file_name: str | None = None
|
primary_file_name: str | None = None
|
||||||
primary_process_id: str | None = None
|
primary_process_id: str | None = None
|
||||||
|
@ -25,3 +25,4 @@ class SpiffLoggingModel(SpiffworkflowBaseDBModel):
|
|||||||
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||||
message: Optional[str] = db.Column(db.String(255), nullable=True)
|
message: Optional[str] = db.Column(db.String(255), nullable=True)
|
||||||
current_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
|
current_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
|
||||||
|
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
"""Spiff_step_details."""
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
from flask_bpmn.models.db import db
|
||||||
|
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||||
|
from sqlalchemy import ForeignKey
|
||||||
|
from sqlalchemy.orm import deferred
|
||||||
|
|
||||||
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
||||||
|
"""SpiffStepDetailsModel."""
|
||||||
|
|
||||||
|
__tablename__ = "spiff_step_details"
|
||||||
|
id: int = db.Column(db.Integer, primary_key=True)
|
||||||
|
process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore
|
||||||
|
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||||
|
task_json: str | None = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||||
|
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||||
|
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
|
@ -1,100 +0,0 @@
|
|||||||
"""Task_event."""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import enum
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
from flask_bpmn.models.db import db
|
|
||||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
|
||||||
from marshmallow import fields
|
|
||||||
from marshmallow import INCLUDE
|
|
||||||
from marshmallow import Schema
|
|
||||||
from sqlalchemy import func
|
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from spiffworkflow_backend.models.process_instance import (
|
|
||||||
ProcessInstanceModel,
|
|
||||||
) # noqa: F401
|
|
||||||
|
|
||||||
|
|
||||||
class TaskAction(enum.Enum):
|
|
||||||
"""TaskAction."""
|
|
||||||
|
|
||||||
COMPLETE = "COMPLETE"
|
|
||||||
TOKEN_RESET = "TOKEN_RESET" # noqa: S105
|
|
||||||
HARD_RESET = "HARD_RESET"
|
|
||||||
SOFT_RESET = "SOFT_RESET"
|
|
||||||
ASSIGNMENT = "ASSIGNMENT" # Whenever the lane changes between tasks we assign the task to specific user.
|
|
||||||
|
|
||||||
|
|
||||||
class TaskEventModel(SpiffworkflowBaseDBModel):
|
|
||||||
"""TaskEventModel."""
|
|
||||||
|
|
||||||
__tablename__ = "task_event"
|
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
|
||||||
user_id = db.Column(
|
|
||||||
db.Integer, db.ForeignKey("user.id"), nullable=False
|
|
||||||
) # In some cases the unique user id may not exist in the db yet.
|
|
||||||
process_instance_id = db.Column(
|
|
||||||
db.Integer, db.ForeignKey("process_instance.id"), nullable=False
|
|
||||||
)
|
|
||||||
spec_version = db.Column(db.String(50))
|
|
||||||
action = db.Column(db.String(50))
|
|
||||||
task_id = db.Column(db.String(50))
|
|
||||||
task_name = db.Column(db.String(50))
|
|
||||||
task_title = db.Column(db.String(50))
|
|
||||||
task_type = db.Column(db.String(50))
|
|
||||||
task_state = db.Column(db.String(50))
|
|
||||||
task_lane = db.Column(db.String(50))
|
|
||||||
form_data = db.Column(
|
|
||||||
db.JSON
|
|
||||||
) # And form data submitted when the task was completed.
|
|
||||||
mi_type = db.Column(db.String(50))
|
|
||||||
mi_count = db.Column(db.Integer)
|
|
||||||
mi_index = db.Column(db.Integer)
|
|
||||||
process_name = db.Column(db.String(50))
|
|
||||||
date = db.Column(db.DateTime(timezone=True), default=func.now())
|
|
||||||
|
|
||||||
|
|
||||||
class TaskEvent:
|
|
||||||
"""TaskEvent."""
|
|
||||||
|
|
||||||
def __init__(self, model: TaskEventModel, process_instance: ProcessInstanceModel):
|
|
||||||
"""__init__."""
|
|
||||||
self.id = model.id
|
|
||||||
self.process_instance = process_instance
|
|
||||||
self.user_id = model.user_id
|
|
||||||
self.action = model.action
|
|
||||||
self.task_id = model.task_id
|
|
||||||
self.task_title = model.task_title
|
|
||||||
self.task_name = model.task_name
|
|
||||||
self.task_type = model.task_type
|
|
||||||
self.task_state = model.task_state
|
|
||||||
self.task_lane = model.task_lane
|
|
||||||
self.date = model.date
|
|
||||||
|
|
||||||
|
|
||||||
class TaskEventSchema(Schema):
|
|
||||||
"""TaskEventSchema."""
|
|
||||||
|
|
||||||
process_instance = fields.Nested("ProcessInstanceMetadataSchema", dump_only=True)
|
|
||||||
task_lane = fields.String(allow_none=True, required=False)
|
|
||||||
|
|
||||||
class Meta:
|
|
||||||
"""Meta."""
|
|
||||||
|
|
||||||
model = TaskEvent
|
|
||||||
additional = [
|
|
||||||
"id",
|
|
||||||
"user_id",
|
|
||||||
"action",
|
|
||||||
"task_id",
|
|
||||||
"task_title",
|
|
||||||
"task_name",
|
|
||||||
"task_type",
|
|
||||||
"task_state",
|
|
||||||
"task_lane",
|
|
||||||
"date",
|
|
||||||
]
|
|
||||||
unknown = INCLUDE
|
|
@ -112,12 +112,3 @@ class UserModelSchema(Schema):
|
|||||||
|
|
||||||
id = marshmallow.fields.String(required=True)
|
id = marshmallow.fields.String(required=True)
|
||||||
username = marshmallow.fields.String(required=True)
|
username = marshmallow.fields.String(required=True)
|
||||||
|
|
||||||
|
|
||||||
class AdminSessionModel(SpiffworkflowBaseDBModel):
|
|
||||||
"""AdminSessionModel."""
|
|
||||||
|
|
||||||
__tablename__ = "admin_session"
|
|
||||||
id = db.Column(db.Integer, primary_key=True)
|
|
||||||
token = db.Column(db.String(50), unique=True)
|
|
||||||
admin_impersonate_uid = db.Column(db.String(50))
|
|
||||||
|
@ -43,6 +43,7 @@ from spiffworkflow_backend.models.message_triggerable_process_model import (
|
|||||||
MessageTriggerableProcessModel,
|
MessageTriggerableProcessModel,
|
||||||
)
|
)
|
||||||
from spiffworkflow_backend.models.principal import PrincipalModel
|
from spiffworkflow_backend.models.principal import PrincipalModel
|
||||||
|
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||||
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
|
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
@ -56,6 +57,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
|
|||||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||||
|
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.routes.user import verify_token
|
from spiffworkflow_backend.routes.user import verify_token
|
||||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||||
@ -134,18 +136,12 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R
|
|||||||
return make_response(jsonify({"results": response_dict}), 200)
|
return make_response(jsonify({"results": response_dict}), 200)
|
||||||
|
|
||||||
|
|
||||||
def process_group_add(
|
def process_group_add(body: dict) -> flask.wrappers.Response:
|
||||||
body: Dict[str, Union[str, bool, int]]
|
|
||||||
) -> flask.wrappers.Response:
|
|
||||||
"""Add_process_group."""
|
"""Add_process_group."""
|
||||||
process_model_service = ProcessModelService()
|
process_model_service = ProcessModelService()
|
||||||
process_group = ProcessGroupSchema().load(body)
|
process_group = ProcessGroup(**body)
|
||||||
process_model_service.add_process_group(process_group)
|
process_model_service.add_process_group(process_group)
|
||||||
return Response(
|
return make_response(jsonify(process_group), 201)
|
||||||
json.dumps(ProcessGroupSchema().dump(process_group)),
|
|
||||||
status=201,
|
|
||||||
mimetype="application/json",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def process_group_delete(process_group_id: str) -> flask.wrappers.Response:
|
def process_group_delete(process_group_id: str) -> flask.wrappers.Response:
|
||||||
@ -154,13 +150,18 @@ def process_group_delete(process_group_id: str) -> flask.wrappers.Response:
|
|||||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||||
|
|
||||||
|
|
||||||
def process_group_update(
|
def process_group_update(process_group_id: str, body: dict) -> flask.wrappers.Response:
|
||||||
process_group_id: str, body: Dict[str, Union[str, bool, int]]
|
|
||||||
) -> Dict[str, Union[str, bool, int]]:
|
|
||||||
"""Process Group Update."""
|
"""Process Group Update."""
|
||||||
process_group = ProcessGroupSchema().load(body)
|
body_include_list = ["display_name", "description"]
|
||||||
|
body_filtered = {
|
||||||
|
include_item: body[include_item]
|
||||||
|
for include_item in body_include_list
|
||||||
|
if include_item in body
|
||||||
|
}
|
||||||
|
|
||||||
|
process_group = ProcessGroup(id=process_group_id, **body_filtered)
|
||||||
ProcessModelService().update_process_group(process_group)
|
ProcessModelService().update_process_group(process_group)
|
||||||
return ProcessGroupSchema().dump(process_group) # type: ignore
|
return make_response(jsonify(process_group), 200)
|
||||||
|
|
||||||
|
|
||||||
def process_groups_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
def process_groups_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||||
@ -173,6 +174,7 @@ def process_groups_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Re
|
|||||||
remainder = len(process_groups) % per_page
|
remainder = len(process_groups) % per_page
|
||||||
if remainder > 0:
|
if remainder > 0:
|
||||||
pages += 1
|
pages += 1
|
||||||
|
|
||||||
response_json = {
|
response_json = {
|
||||||
"results": ProcessGroupSchema(many=True).dump(batch),
|
"results": ProcessGroupSchema(many=True).dump(batch),
|
||||||
"pagination": {
|
"pagination": {
|
||||||
@ -198,7 +200,7 @@ def process_group_show(
|
|||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
) from exception
|
) from exception
|
||||||
return ProcessGroupSchema().dump(process_group)
|
return make_response(jsonify(process_group), 200)
|
||||||
|
|
||||||
|
|
||||||
def process_model_add(
|
def process_model_add(
|
||||||
@ -225,7 +227,6 @@ def process_model_add(
|
|||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
|
|
||||||
process_model_info.process_group = process_group
|
|
||||||
process_model_service.add_spec(process_model_info)
|
process_model_service.add_spec(process_model_info)
|
||||||
return Response(
|
return Response(
|
||||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||||
@ -438,7 +439,6 @@ def process_instance_run(
|
|||||||
task=task,
|
task=task,
|
||||||
) from e
|
) from e
|
||||||
processor.save()
|
processor.save()
|
||||||
ProcessInstanceService.update_task_assignments(processor)
|
|
||||||
|
|
||||||
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
|
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
|
||||||
MessageService.process_message_instances()
|
MessageService.process_message_instances()
|
||||||
@ -656,9 +656,9 @@ def process_instance_list(
|
|||||||
page: int = 1,
|
page: int = 1,
|
||||||
per_page: int = 100,
|
per_page: int = 100,
|
||||||
start_from: Optional[int] = None,
|
start_from: Optional[int] = None,
|
||||||
start_till: Optional[int] = None,
|
start_to: Optional[int] = None,
|
||||||
end_from: Optional[int] = None,
|
end_from: Optional[int] = None,
|
||||||
end_till: Optional[int] = None,
|
end_to: Optional[int] = None,
|
||||||
process_status: Optional[str] = None,
|
process_status: Optional[str] = None,
|
||||||
) -> flask.wrappers.Response:
|
) -> flask.wrappers.Response:
|
||||||
"""Process_instance_list."""
|
"""Process_instance_list."""
|
||||||
@ -689,17 +689,17 @@ def process_instance_list(
|
|||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(
|
||||||
ProcessInstanceModel.start_in_seconds >= start_from
|
ProcessInstanceModel.start_in_seconds >= start_from
|
||||||
)
|
)
|
||||||
if start_till is not None:
|
if start_to is not None:
|
||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(
|
||||||
ProcessInstanceModel.start_in_seconds <= start_till
|
ProcessInstanceModel.start_in_seconds <= start_to
|
||||||
)
|
)
|
||||||
if end_from is not None:
|
if end_from is not None:
|
||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(
|
||||||
ProcessInstanceModel.end_in_seconds >= end_from
|
ProcessInstanceModel.end_in_seconds >= end_from
|
||||||
)
|
)
|
||||||
if end_till is not None:
|
if end_to is not None:
|
||||||
process_instance_query = process_instance_query.filter(
|
process_instance_query = process_instance_query.filter(
|
||||||
ProcessInstanceModel.end_in_seconds <= end_till
|
ProcessInstanceModel.end_in_seconds <= end_to
|
||||||
)
|
)
|
||||||
if process_status is not None:
|
if process_status is not None:
|
||||||
process_status_array = process_status.split(",")
|
process_status_array = process_status.split(",")
|
||||||
@ -959,10 +959,23 @@ def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Res
|
|||||||
|
|
||||||
|
|
||||||
def process_instance_task_list(
|
def process_instance_task_list(
|
||||||
process_instance_id: int, all_tasks: bool = False
|
process_instance_id: int, all_tasks: bool = False, spiff_step: int = 0
|
||||||
) -> flask.wrappers.Response:
|
) -> flask.wrappers.Response:
|
||||||
"""Process_instance_task_list."""
|
"""Process_instance_task_list."""
|
||||||
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
|
|
||||||
|
if spiff_step > 0:
|
||||||
|
step_detail = (
|
||||||
|
db.session.query(SpiffStepDetailsModel)
|
||||||
|
.filter(
|
||||||
|
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||||
|
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if step_detail is not None:
|
||||||
|
process_instance.bpmn_json = json.dumps(step_detail.task_json)
|
||||||
|
|
||||||
processor = ProcessInstanceProcessor(process_instance)
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
|
|
||||||
spiff_tasks = None
|
spiff_tasks = None
|
||||||
@ -1074,7 +1087,9 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
|
|||||||
task.form_ui_schema = ui_form_contents
|
task.form_ui_schema = ui_form_contents
|
||||||
|
|
||||||
if task.properties and task.data and "instructionsForEndUser" in task.properties:
|
if task.properties and task.data and "instructionsForEndUser" in task.properties:
|
||||||
print(f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}")
|
print(
|
||||||
|
f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}"
|
||||||
|
)
|
||||||
if task.properties["instructionsForEndUser"]:
|
if task.properties["instructionsForEndUser"]:
|
||||||
task.properties["instructionsForEndUser"] = render_jinja_template(
|
task.properties["instructionsForEndUser"] = render_jinja_template(
|
||||||
task.properties["instructionsForEndUser"], task.data
|
task.properties["instructionsForEndUser"], task.data
|
||||||
@ -1124,8 +1139,6 @@ def task_submit(
|
|||||||
# last_index = next_task.task_info()["mi_index"]
|
# last_index = next_task.task_info()["mi_index"]
|
||||||
# next_task = processor.next_task()
|
# next_task = processor.next_task()
|
||||||
|
|
||||||
ProcessInstanceService.update_task_assignments(processor)
|
|
||||||
|
|
||||||
next_active_task_assigned_to_me = (
|
next_active_task_assigned_to_me = (
|
||||||
ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id)
|
ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id)
|
||||||
.order_by(asc(ActiveTaskModel.id)) # type: ignore
|
.order_by(asc(ActiveTaskModel.id)) # type: ignore
|
||||||
@ -1238,6 +1251,7 @@ def script_unit_test_run(
|
|||||||
"""Script_unit_test_run."""
|
"""Script_unit_test_run."""
|
||||||
# FIXME: We should probably clear this somewhere else but this works
|
# FIXME: We should probably clear this somewhere else but this works
|
||||||
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
||||||
|
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
|
||||||
|
|
||||||
python_script = _get_required_parameter_or_raise("python_script", body)
|
python_script = _get_required_parameter_or_raise("python_script", body)
|
||||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||||
|
@ -9,8 +9,8 @@ from spiffworkflow_backend.models.script_attributes_context import (
|
|||||||
from spiffworkflow_backend.scripts.script import Script
|
from spiffworkflow_backend.scripts.script import Script
|
||||||
|
|
||||||
|
|
||||||
class GetUser(Script):
|
class GetCurrentUser(Script):
|
||||||
"""GetUser."""
|
"""GetCurrentUser."""
|
||||||
|
|
||||||
def get_description(self) -> str:
|
def get_description(self) -> str:
|
||||||
"""Get_description."""
|
"""Get_description."""
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Get_env."""
|
"""Get_env."""
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from spiffworkflow_backend.models.script_attributes_context import (
|
from spiffworkflow_backend.models.script_attributes_context import (
|
||||||
@ -22,4 +23,4 @@ class GetFrontendUrl(Script):
|
|||||||
**kwargs: Any
|
**kwargs: Any
|
||||||
) -> Any:
|
) -> Any:
|
||||||
"""Run."""
|
"""Run."""
|
||||||
return current_app.config['SPIFFWORKFLOW_FRONTEND_URL']
|
return current_app.config["SPIFFWORKFLOW_FRONTEND_URL"]
|
||||||
|
@ -8,7 +8,7 @@ from spiffworkflow_backend.scripts.script import Script
|
|||||||
|
|
||||||
|
|
||||||
class GetProcessInfo(Script):
|
class GetProcessInfo(Script):
|
||||||
"""GetUser."""
|
"""GetProcessInfo."""
|
||||||
|
|
||||||
def get_description(self) -> str:
|
def get_description(self) -> str:
|
||||||
"""Get_description."""
|
"""Get_description."""
|
||||||
|
@ -108,6 +108,8 @@ class SpiffFilter(logging.Filter):
|
|||||||
if hasattr(tld, "process_instance_id"):
|
if hasattr(tld, "process_instance_id"):
|
||||||
process_instance_id = tld.process_instance_id
|
process_instance_id = tld.process_instance_id
|
||||||
setattr(record, "process_instance_id", process_instance_id) # noqa: B010
|
setattr(record, "process_instance_id", process_instance_id) # noqa: B010
|
||||||
|
if hasattr(tld, "spiff_step"):
|
||||||
|
setattr(record, "spiff_step", tld.spiff_step) # noqa: 8010
|
||||||
if hasattr(g, "user") and g.user:
|
if hasattr(g, "user") and g.user:
|
||||||
setattr(record, "current_user_id", g.user.id) # noqa: B010
|
setattr(record, "current_user_id", g.user.id) # noqa: B010
|
||||||
return True
|
return True
|
||||||
@ -204,6 +206,11 @@ class DBHandler(logging.Handler):
|
|||||||
timestamp = record.created
|
timestamp = record.created
|
||||||
message = record.msg if hasattr(record, "msg") else None
|
message = record.msg if hasattr(record, "msg") else None
|
||||||
current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
|
current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore
|
||||||
|
spiff_step = (
|
||||||
|
record.spiff_step # type: ignore
|
||||||
|
if hasattr(record, "spiff_step") and record.spiff_step is not None # type: ignore
|
||||||
|
else 1
|
||||||
|
)
|
||||||
spiff_log = SpiffLoggingModel(
|
spiff_log = SpiffLoggingModel(
|
||||||
process_instance_id=record.process_instance_id, # type: ignore
|
process_instance_id=record.process_instance_id, # type: ignore
|
||||||
bpmn_process_identifier=bpmn_process_identifier,
|
bpmn_process_identifier=bpmn_process_identifier,
|
||||||
@ -214,6 +221,7 @@ class DBHandler(logging.Handler):
|
|||||||
message=message,
|
message=message,
|
||||||
timestamp=timestamp,
|
timestamp=timestamp,
|
||||||
current_user_id=current_user_id,
|
current_user_id=current_user_id,
|
||||||
|
spiff_step=spiff_step,
|
||||||
)
|
)
|
||||||
db.session.add(spiff_log)
|
db.session.add(spiff_log)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
@ -7,6 +7,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from datetime import timedelta
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
@ -17,6 +18,8 @@ from typing import Tuple
|
|||||||
from typing import TypedDict
|
from typing import TypedDict
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
import dateparser
|
||||||
|
import pytz
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask_bpmn.api.api_error import ApiError
|
from flask_bpmn.api.api_error import ApiError
|
||||||
from flask_bpmn.models.db import db
|
from flask_bpmn.models.db import db
|
||||||
@ -25,33 +28,40 @@ from RestrictedPython import safe_globals # type: ignore
|
|||||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
|
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
|
||||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
|
from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import DEFAULT_GLOBALS
|
|
||||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||||
from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer # type: ignore
|
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.events import CancelEventDefinition # type: ignore
|
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
|
||||||
from SpiffWorkflow.bpmn.specs.events import EndEvent
|
from SpiffWorkflow.bpmn.specs.events.event_definitions import CancelEventDefinition # type: ignore
|
||||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
|
||||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore
|
||||||
from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter # type: ignore
|
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter # type: ignore
|
||||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||||
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
|
||||||
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
|
||||||
from SpiffWorkflow.spiff.serializer import BoundaryEventConverter # type: ignore
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import BoundaryEventConverter # type: ignore
|
||||||
from SpiffWorkflow.spiff.serializer import CallActivityTaskConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||||
from SpiffWorkflow.spiff.serializer import EndEventConverter
|
CallActivityTaskConverter,
|
||||||
from SpiffWorkflow.spiff.serializer import IntermediateCatchEventConverter
|
)
|
||||||
from SpiffWorkflow.spiff.serializer import IntermediateThrowEventConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter
|
||||||
from SpiffWorkflow.spiff.serializer import ManualTaskConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||||
from SpiffWorkflow.spiff.serializer import NoneTaskConverter
|
IntermediateCatchEventConverter,
|
||||||
from SpiffWorkflow.spiff.serializer import ReceiveTaskConverter
|
)
|
||||||
from SpiffWorkflow.spiff.serializer import ScriptTaskConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||||
from SpiffWorkflow.spiff.serializer import SendTaskConverter
|
IntermediateThrowEventConverter,
|
||||||
from SpiffWorkflow.spiff.serializer import ServiceTaskConverter
|
)
|
||||||
from SpiffWorkflow.spiff.serializer import StartEventConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import ManualTaskConverter
|
||||||
from SpiffWorkflow.spiff.serializer import SubWorkflowTaskConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter
|
||||||
from SpiffWorkflow.spiff.serializer import TransactionSubprocessConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import ReceiveTaskConverter
|
||||||
from SpiffWorkflow.spiff.serializer import UserTaskConverter
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import ScriptTaskConverter
|
||||||
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import SendTaskConverter
|
||||||
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import ServiceTaskConverter
|
||||||
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import StartEventConverter
|
||||||
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import SubWorkflowTaskConverter
|
||||||
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import (
|
||||||
|
TransactionSubprocessConverter,
|
||||||
|
)
|
||||||
|
from SpiffWorkflow.spiff.serializer.task_spec_converters import UserTaskConverter
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.task import TaskState
|
from SpiffWorkflow.task import TaskState
|
||||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
||||||
@ -77,14 +87,16 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
|||||||
from spiffworkflow_backend.models.script_attributes_context import (
|
from spiffworkflow_backend.models.script_attributes_context import (
|
||||||
ScriptAttributesContext,
|
ScriptAttributesContext,
|
||||||
)
|
)
|
||||||
from spiffworkflow_backend.models.task_event import TaskAction
|
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||||
from spiffworkflow_backend.models.task_event import TaskEventModel
|
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.models.user import UserModelSchema
|
from spiffworkflow_backend.models.user import UserModelSchema
|
||||||
from spiffworkflow_backend.scripts.script import Script
|
from spiffworkflow_backend.scripts.script import Script
|
||||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
|
from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate
|
||||||
|
from spiffworkflow_backend.services.spec_file_service import (
|
||||||
|
ProcessModelFileNotFoundError,
|
||||||
|
)
|
||||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||||
from spiffworkflow_backend.services.user_service import UserService
|
from spiffworkflow_backend.services.user_service import UserService
|
||||||
|
|
||||||
@ -98,19 +110,6 @@ def _import(name: str, glbls: Dict[str, Any], *args: Any) -> None:
|
|||||||
raise ImportError(f"Import not allowed: {name}", name=name)
|
raise ImportError(f"Import not allowed: {name}", name=name)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_GLOBALS.update(
|
|
||||||
{
|
|
||||||
"datetime": datetime,
|
|
||||||
"time": time,
|
|
||||||
"decimal": decimal,
|
|
||||||
"_strptime": _strptime,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# This will overwrite the standard builtins
|
|
||||||
DEFAULT_GLOBALS.update(safe_globals)
|
|
||||||
DEFAULT_GLOBALS["__builtins__"]["__import__"] = _import
|
|
||||||
|
|
||||||
|
|
||||||
class PotentialOwnerIdList(TypedDict):
|
class PotentialOwnerIdList(TypedDict):
|
||||||
"""PotentialOwnerIdList."""
|
"""PotentialOwnerIdList."""
|
||||||
|
|
||||||
@ -143,7 +142,21 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
|
|||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
"""__init__."""
|
"""__init__."""
|
||||||
super().__init__(default_globals=DEFAULT_GLOBALS)
|
default_globals = {
|
||||||
|
"timedelta": timedelta,
|
||||||
|
"datetime": datetime,
|
||||||
|
"dateparser": dateparser,
|
||||||
|
"pytz": pytz,
|
||||||
|
"time": time,
|
||||||
|
"decimal": decimal,
|
||||||
|
"_strptime": _strptime,
|
||||||
|
}
|
||||||
|
|
||||||
|
# This will overwrite the standard builtins
|
||||||
|
default_globals.update(safe_globals)
|
||||||
|
default_globals["__builtins__"]["__import__"] = _import
|
||||||
|
|
||||||
|
super().__init__(default_globals=default_globals)
|
||||||
|
|
||||||
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
|
def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]:
|
||||||
"""__get_augment_methods."""
|
"""__get_augment_methods."""
|
||||||
@ -275,9 +288,9 @@ class ProcessInstanceProcessor:
|
|||||||
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False
|
self, process_instance_model: ProcessInstanceModel, validate_only: bool = False
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
"""Create a Workflow Processor based on the serialized information available in the process_instance model."""
|
||||||
current_app.config[
|
tld = current_app.config["THREAD_LOCAL_DATA"]
|
||||||
"THREAD_LOCAL_DATA"
|
tld.process_instance_id = process_instance_model.id
|
||||||
].process_instance_id = process_instance_model.id
|
tld.spiff_step = process_instance_model.spiff_step
|
||||||
|
|
||||||
# we want this to be the fully qualified path to the process model including all group subcomponents
|
# we want this to be the fully qualified path to the process model including all group subcomponents
|
||||||
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
|
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
|
||||||
@ -408,13 +421,11 @@ class ProcessInstanceProcessor:
|
|||||||
bpmn_process_spec, subprocesses
|
bpmn_process_spec, subprocesses
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_user_info_to_process_instance(
|
def current_user(self) -> Any:
|
||||||
self, bpmn_process_instance: BpmnWorkflow
|
"""Current_user."""
|
||||||
) -> None:
|
|
||||||
"""Add_user_info_to_process_instance."""
|
|
||||||
current_user = None
|
current_user = None
|
||||||
if UserService.has_user():
|
if UserService.has_user():
|
||||||
current_user = UserService.current_user(allow_admin_impersonate=True)
|
current_user = UserService.current_user()
|
||||||
|
|
||||||
# fall back to initiator if g.user is not set
|
# fall back to initiator if g.user is not set
|
||||||
# this is for background processes when there will not be a user
|
# this is for background processes when there will not be a user
|
||||||
@ -422,65 +433,20 @@ class ProcessInstanceProcessor:
|
|||||||
elif self.process_instance_model.process_initiator_id:
|
elif self.process_instance_model.process_initiator_id:
|
||||||
current_user = self.process_instance_model.process_initiator
|
current_user = self.process_instance_model.process_initiator
|
||||||
|
|
||||||
|
return current_user
|
||||||
|
|
||||||
|
def add_user_info_to_process_instance(
|
||||||
|
self, bpmn_process_instance: BpmnWorkflow
|
||||||
|
) -> None:
|
||||||
|
"""Add_user_info_to_process_instance."""
|
||||||
|
current_user = self.current_user()
|
||||||
|
|
||||||
if current_user:
|
if current_user:
|
||||||
current_user_data = UserModelSchema().dump(current_user)
|
current_user_data = UserModelSchema().dump(current_user)
|
||||||
tasks = bpmn_process_instance.get_tasks(TaskState.READY)
|
tasks = bpmn_process_instance.get_tasks(TaskState.READY)
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
task.data["current_user"] = current_user_data
|
task.data["current_user"] = current_user_data
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def reset(
|
|
||||||
process_instance_model: ProcessInstanceModel, clear_data: bool = False
|
|
||||||
) -> None:
|
|
||||||
"""Resets the process_instance back to an unstarted state - where nothing has happened yet.
|
|
||||||
|
|
||||||
If clear_data is set to false, then the information
|
|
||||||
previously used in forms will be re-populated when the form is re-
|
|
||||||
displayed, and any files that were updated will remain in place, otherwise
|
|
||||||
files will also be cleared out.
|
|
||||||
"""
|
|
||||||
# Try to execute a cancel notify
|
|
||||||
try:
|
|
||||||
bpmn_process_instance = (
|
|
||||||
ProcessInstanceProcessor.__get_bpmn_process_instance(
|
|
||||||
process_instance_model
|
|
||||||
)
|
|
||||||
)
|
|
||||||
ProcessInstanceProcessor.__cancel_notify(bpmn_process_instance)
|
|
||||||
except Exception as e:
|
|
||||||
db.session.rollback() # in case the above left the database with a bad transaction
|
|
||||||
current_app.logger.error(
|
|
||||||
"Unable to send a cancel notify for process_instance %s during a reset."
|
|
||||||
" Continuing with the reset anyway so we don't get in an unresolvable"
|
|
||||||
" state. An %s error occured with the following information: %s"
|
|
||||||
% (process_instance_model.id, e.__class__.__name__, str(e))
|
|
||||||
)
|
|
||||||
process_instance_model.bpmn_json = None
|
|
||||||
process_instance_model.status = ProcessInstanceStatus.not_started.value
|
|
||||||
|
|
||||||
# clear out any task assignments
|
|
||||||
db.session.query(TaskEventModel).filter(
|
|
||||||
TaskEventModel.process_instance_id == process_instance_model.id
|
|
||||||
).filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete()
|
|
||||||
|
|
||||||
if clear_data:
|
|
||||||
# Clear out data in previous task events
|
|
||||||
task_events = (
|
|
||||||
db.session.query(TaskEventModel)
|
|
||||||
.filter(TaskEventModel.process_instance_id == process_instance_model.id)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
for task_event in task_events:
|
|
||||||
task_event.form_data = {}
|
|
||||||
db.session.add(task_event)
|
|
||||||
# Remove any uploaded files.
|
|
||||||
|
|
||||||
# TODO: grab UserFileService
|
|
||||||
# files = FileModel.query.filter(FileModel.process_instance_id == process_instance_model.id).all()
|
|
||||||
# for file in files:
|
|
||||||
# UserFileService().delete_file(file.id)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_bpmn_process_instance_from_workflow_spec(
|
def get_bpmn_process_instance_from_workflow_spec(
|
||||||
spec: BpmnProcessSpec,
|
spec: BpmnProcessSpec,
|
||||||
@ -592,9 +558,31 @@ class ProcessInstanceProcessor:
|
|||||||
"lane_assignment_id": lane_assignment_id,
|
"lane_assignment_id": lane_assignment_id,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def save_spiff_step_details(self) -> None:
|
||||||
|
"""SaveSpiffStepDetails."""
|
||||||
|
bpmn_json = self.serialize()
|
||||||
|
wf_json = json.loads(bpmn_json)
|
||||||
|
task_json = "{}"
|
||||||
|
if "tasks" in wf_json:
|
||||||
|
task_json = json.dumps(wf_json["tasks"])
|
||||||
|
|
||||||
|
# TODO want to just save the tasks, something wasn't immediately working
|
||||||
|
# so after the flow works with the full wf_json revisit this
|
||||||
|
task_json = wf_json
|
||||||
|
details_model = SpiffStepDetailsModel(
|
||||||
|
process_instance_id=self.process_instance_model.id,
|
||||||
|
spiff_step=self.process_instance_model.spiff_step or 1,
|
||||||
|
task_json=task_json,
|
||||||
|
timestamp=round(time.time()),
|
||||||
|
completed_by_user_id=self.current_user().id,
|
||||||
|
)
|
||||||
|
db.session.add(details_model)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
def save(self) -> None:
|
def save(self) -> None:
|
||||||
"""Saves the current state of this processor to the database."""
|
"""Saves the current state of this processor to the database."""
|
||||||
self.process_instance_model.bpmn_json = self.serialize()
|
self.process_instance_model.bpmn_json = self.serialize()
|
||||||
|
|
||||||
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
|
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
|
||||||
user_tasks = list(self.get_all_user_tasks())
|
user_tasks = list(self.get_all_user_tasks())
|
||||||
self.process_instance_model.status = self.get_status().value
|
self.process_instance_model.status = self.get_status().value
|
||||||
@ -682,10 +670,14 @@ class ProcessInstanceProcessor:
|
|||||||
process_models = ProcessModelService().get_process_models()
|
process_models = ProcessModelService().get_process_models()
|
||||||
for process_model in process_models:
|
for process_model in process_models:
|
||||||
if process_model.primary_file_name:
|
if process_model.primary_file_name:
|
||||||
|
try:
|
||||||
etree_element = SpecFileService.get_etree_element_from_file_name(
|
etree_element = SpecFileService.get_etree_element_from_file_name(
|
||||||
process_model, process_model.primary_file_name
|
process_model, process_model.primary_file_name
|
||||||
)
|
)
|
||||||
bpmn_process_identifiers = []
|
bpmn_process_identifiers = []
|
||||||
|
except ProcessModelFileNotFoundError:
|
||||||
|
# if primary_file_name doesn't actually exist on disk, then just go on to the next process_model
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
bpmn_process_identifiers = (
|
bpmn_process_identifiers = (
|
||||||
@ -713,7 +705,11 @@ class ProcessInstanceProcessor:
|
|||||||
bpmn_process_identifier: str,
|
bpmn_process_identifier: str,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Bpmn_file_full_path_from_bpmn_process_identifier."""
|
"""Bpmn_file_full_path_from_bpmn_process_identifier."""
|
||||||
db.session.flush()
|
if bpmn_process_identifier is None:
|
||||||
|
raise ValueError(
|
||||||
|
"bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None"
|
||||||
|
)
|
||||||
|
|
||||||
bpmn_process_id_lookup = BpmnProcessIdLookup.query.filter_by(
|
bpmn_process_id_lookup = BpmnProcessIdLookup.query.filter_by(
|
||||||
bpmn_process_identifier=bpmn_process_identifier
|
bpmn_process_identifier=bpmn_process_identifier
|
||||||
).first()
|
).first()
|
||||||
@ -746,6 +742,10 @@ class ProcessInstanceProcessor:
|
|||||||
if processed_identifiers is None:
|
if processed_identifiers is None:
|
||||||
processed_identifiers = set()
|
processed_identifiers = set()
|
||||||
processor_dependencies = parser.get_process_dependencies()
|
processor_dependencies = parser.get_process_dependencies()
|
||||||
|
|
||||||
|
# since get_process_dependencies() returns a set with None sometimes, we need to remove it
|
||||||
|
processor_dependencies = processor_dependencies - {None}
|
||||||
|
|
||||||
processor_dependencies_new = processor_dependencies - processed_identifiers
|
processor_dependencies_new = processor_dependencies - processed_identifiers
|
||||||
bpmn_process_identifiers_in_parser = parser.get_process_ids()
|
bpmn_process_identifiers_in_parser = parser.get_process_ids()
|
||||||
|
|
||||||
@ -981,11 +981,29 @@ class ProcessInstanceProcessor:
|
|||||||
|
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
|
def increment_spiff_step(self) -> None:
|
||||||
|
"""Spiff_step++."""
|
||||||
|
spiff_step = self.process_instance_model.spiff_step or 0
|
||||||
|
spiff_step += 1
|
||||||
|
self.process_instance_model.spiff_step = spiff_step
|
||||||
|
current_app.config["THREAD_LOCAL_DATA"].spiff_step = spiff_step
|
||||||
|
db.session.add(self.process_instance_model)
|
||||||
|
db.session.commit()
|
||||||
|
|
||||||
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None:
|
||||||
"""Do_engine_steps."""
|
"""Do_engine_steps."""
|
||||||
try:
|
try:
|
||||||
self.bpmn_process_instance.refresh_waiting_tasks()
|
self.bpmn_process_instance.refresh_waiting_tasks(
|
||||||
self.bpmn_process_instance.do_engine_steps(exit_at=exit_at)
|
will_refresh_task=lambda t: self.increment_spiff_step(),
|
||||||
|
did_refresh_task=lambda t: self.save_spiff_step_details(),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.bpmn_process_instance.do_engine_steps(
|
||||||
|
exit_at=exit_at,
|
||||||
|
will_complete_task=lambda t: self.increment_spiff_step(),
|
||||||
|
did_complete_task=lambda t: self.save_spiff_step_details(),
|
||||||
|
)
|
||||||
|
|
||||||
self.process_bpmn_messages()
|
self.process_bpmn_messages()
|
||||||
self.queue_waiting_receive_messages()
|
self.queue_waiting_receive_messages()
|
||||||
|
|
||||||
@ -1007,6 +1025,7 @@ class ProcessInstanceProcessor:
|
|||||||
# A little hackly, but make the bpmn_process_instance catch a cancel event.
|
# A little hackly, but make the bpmn_process_instance catch a cancel event.
|
||||||
bpmn_process_instance.signal("cancel") # generate a cancel signal.
|
bpmn_process_instance.signal("cancel") # generate a cancel signal.
|
||||||
bpmn_process_instance.catch(CancelEventDefinition())
|
bpmn_process_instance.catch(CancelEventDefinition())
|
||||||
|
# Due to this being static, can't save granular step details in this case
|
||||||
bpmn_process_instance.do_engine_steps()
|
bpmn_process_instance.do_engine_steps()
|
||||||
except WorkflowTaskExecException as we:
|
except WorkflowTaskExecException as we:
|
||||||
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
raise ApiError.from_workflow_exception("task_error", str(we), we) from we
|
||||||
@ -1105,7 +1124,9 @@ class ProcessInstanceProcessor:
|
|||||||
|
|
||||||
def complete_task(self, task: SpiffTask) -> None:
|
def complete_task(self, task: SpiffTask) -> None:
|
||||||
"""Complete_task."""
|
"""Complete_task."""
|
||||||
|
self.increment_spiff_step()
|
||||||
self.bpmn_process_instance.complete_task_from_id(task.id)
|
self.bpmn_process_instance.complete_task_from_id(task.id)
|
||||||
|
self.save_spiff_step_details()
|
||||||
|
|
||||||
def get_data(self) -> dict[str, Any]:
|
def get_data(self) -> dict[str, Any]:
|
||||||
"""Get_data."""
|
"""Get_data."""
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Process_instance_service."""
|
"""Process_instance_service."""
|
||||||
import time
|
import time
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Dict
|
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@ -9,15 +8,12 @@ from flask import current_app
|
|||||||
from flask_bpmn.api.api_error import ApiError
|
from flask_bpmn.api.api_error import ApiError
|
||||||
from flask_bpmn.models.db import db
|
from flask_bpmn.models.db import db
|
||||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||||
from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore
|
|
||||||
|
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceApi
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||||
from spiffworkflow_backend.models.task import MultiInstanceType
|
from spiffworkflow_backend.models.task import MultiInstanceType
|
||||||
from spiffworkflow_backend.models.task import Task
|
from spiffworkflow_backend.models.task import Task
|
||||||
from spiffworkflow_backend.models.task_event import TaskAction
|
|
||||||
from spiffworkflow_backend.models.task_event import TaskEventModel
|
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||||
from spiffworkflow_backend.services.git_service import GitService
|
from spiffworkflow_backend.services.git_service import GitService
|
||||||
@ -107,70 +103,9 @@ class ProcessInstanceService:
|
|||||||
is_review=is_review_value,
|
is_review=is_review_value,
|
||||||
title=title_value,
|
title=title_value,
|
||||||
)
|
)
|
||||||
next_task_trying_again = next_task
|
|
||||||
if (
|
|
||||||
not next_task
|
|
||||||
): # The Next Task can be requested to be a certain task, useful for parallel tasks.
|
|
||||||
# This may or may not work, sometimes there is no next task to complete.
|
|
||||||
next_task_trying_again = processor.next_task()
|
|
||||||
|
|
||||||
if next_task_trying_again is not None:
|
|
||||||
previous_form_data = ProcessInstanceService.get_previously_submitted_data(
|
|
||||||
processor.process_instance_model.id, next_task_trying_again
|
|
||||||
)
|
|
||||||
# DeepMerge.merge(next_task_trying_again.data, previous_form_data)
|
|
||||||
next_task_trying_again.data = DeepMerge.merge(
|
|
||||||
previous_form_data, next_task_trying_again.data
|
|
||||||
)
|
|
||||||
|
|
||||||
process_instance_api.next_task = (
|
|
||||||
ProcessInstanceService.spiff_task_to_api_task(
|
|
||||||
next_task_trying_again, add_docs_and_forms=True
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# TODO: Hack for now, until we decide how to implment forms
|
|
||||||
process_instance_api.next_task.form = None
|
|
||||||
|
|
||||||
# Update the state of the task to locked if the current user does not own the task.
|
|
||||||
# user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task)
|
|
||||||
# if not UserService.in_list(user_uids, allow_admin_impersonate=True):
|
|
||||||
# workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED
|
|
||||||
|
|
||||||
return process_instance_api
|
return process_instance_api
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_previously_submitted_data(
|
|
||||||
process_instance_id: int, spiff_task: SpiffTask
|
|
||||||
) -> Dict[Any, Any]:
|
|
||||||
"""If the user has completed this task previously, find the form data for the last submission."""
|
|
||||||
query = (
|
|
||||||
db.session.query(TaskEventModel)
|
|
||||||
.filter_by(process_instance_id=process_instance_id)
|
|
||||||
.filter_by(task_name=spiff_task.task_spec.name)
|
|
||||||
.filter_by(action=TaskAction.COMPLETE.value)
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
hasattr(spiff_task, "internal_data")
|
|
||||||
and "runtimes" in spiff_task.internal_data
|
|
||||||
):
|
|
||||||
query = query.filter_by(mi_index=spiff_task.internal_data["runtimes"])
|
|
||||||
|
|
||||||
latest_event = query.order_by(TaskEventModel.date.desc()).first()
|
|
||||||
if latest_event:
|
|
||||||
if latest_event.form_data is not None:
|
|
||||||
return latest_event.form_data # type: ignore
|
|
||||||
else:
|
|
||||||
missing_form_error = (
|
|
||||||
f"We have lost data for workflow {process_instance_id}, "
|
|
||||||
f"task {spiff_task.task_spec.name}, it is not in the task event model, "
|
|
||||||
f"and it should be."
|
|
||||||
)
|
|
||||||
current_app.logger.exception("missing_form_data", missing_form_error)
|
|
||||||
return {}
|
|
||||||
else:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def get_process_instance(self, process_instance_id: int) -> Any:
|
def get_process_instance(self, process_instance_id: int) -> Any:
|
||||||
"""Get_process_instance."""
|
"""Get_process_instance."""
|
||||||
result = (
|
result = (
|
||||||
@ -180,30 +115,6 @@ class ProcessInstanceService:
|
|||||||
)
|
)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def update_task_assignments(processor: ProcessInstanceProcessor) -> None:
|
|
||||||
"""For every upcoming user task, log a task action that connects the assigned user(s) to that task.
|
|
||||||
|
|
||||||
All existing assignment actions for this workflow are removed from the database,
|
|
||||||
so that only the current valid actions are available. update_task_assignments
|
|
||||||
should be called whenever progress is made on a workflow.
|
|
||||||
"""
|
|
||||||
db.session.query(TaskEventModel).filter(
|
|
||||||
TaskEventModel.process_instance_id == processor.process_instance_model.id
|
|
||||||
).filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete()
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
tasks = processor.get_current_user_tasks()
|
|
||||||
for task in tasks:
|
|
||||||
user_ids = ProcessInstanceService.get_users_assigned_to_task(
|
|
||||||
processor, task
|
|
||||||
)
|
|
||||||
|
|
||||||
for user_id in user_ids:
|
|
||||||
ProcessInstanceService().log_task_action(
|
|
||||||
user_id, processor, task, TaskAction.ASSIGNMENT.value
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_users_assigned_to_task(
|
def get_users_assigned_to_task(
|
||||||
processor: ProcessInstanceProcessor, spiff_task: SpiffTask
|
processor: ProcessInstanceProcessor, spiff_task: SpiffTask
|
||||||
@ -278,52 +189,8 @@ class ProcessInstanceService:
|
|||||||
spiff_task.update_data(dot_dct)
|
spiff_task.update_data(dot_dct)
|
||||||
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
|
# ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store.
|
||||||
processor.complete_task(spiff_task)
|
processor.complete_task(spiff_task)
|
||||||
# Log the action before doing the engine steps, as doing so could effect the state of the task
|
|
||||||
# the workflow could wrap around in the ngine steps, and the task could jump from being completed to
|
|
||||||
# another state. What we are logging here is the completion.
|
|
||||||
ProcessInstanceService.log_task_action(
|
|
||||||
user.id, processor, spiff_task, TaskAction.COMPLETE.value
|
|
||||||
)
|
|
||||||
processor.do_engine_steps(save=True)
|
processor.do_engine_steps(save=True)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def log_task_action(
|
|
||||||
user_id: int,
|
|
||||||
processor: ProcessInstanceProcessor,
|
|
||||||
spiff_task: SpiffTask,
|
|
||||||
action: str,
|
|
||||||
) -> None:
|
|
||||||
"""Log_task_action."""
|
|
||||||
task = ProcessInstanceService.spiff_task_to_api_task(spiff_task)
|
|
||||||
form_data = ProcessInstanceService.extract_form_data(
|
|
||||||
spiff_task.data, spiff_task
|
|
||||||
)
|
|
||||||
multi_instance_type_value = ""
|
|
||||||
if task.multi_instance_type:
|
|
||||||
multi_instance_type_value = task.multi_instance_type.value
|
|
||||||
|
|
||||||
task_event = TaskEventModel(
|
|
||||||
# study_id=processor.workflow_model.study_id,
|
|
||||||
user_id=user_id,
|
|
||||||
process_instance_id=processor.process_instance_model.id,
|
|
||||||
# workflow_spec_id=processor.workflow_model.workflow_spec_id,
|
|
||||||
action=action,
|
|
||||||
task_id=str(task.id),
|
|
||||||
task_name=task.name,
|
|
||||||
task_title=task.title,
|
|
||||||
task_type=str(task.type),
|
|
||||||
task_state=task.state,
|
|
||||||
task_lane=task.lane,
|
|
||||||
form_data=form_data,
|
|
||||||
mi_type=multi_instance_type_value, # Some tasks have a repeat behavior.
|
|
||||||
mi_count=task.multi_instance_count, # This is the number of times the task could repeat.
|
|
||||||
mi_index=task.multi_instance_index, # And the index of the currently repeating task.
|
|
||||||
process_name=task.process_name,
|
|
||||||
# date=datetime.utcnow(), <=== For future reference, NEVER do this. Let the database set the time.
|
|
||||||
)
|
|
||||||
db.session.add(task_event)
|
|
||||||
db.session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
|
def extract_form_data(latest_data: dict, task: SpiffTask) -> dict:
|
||||||
"""Extracts data from the latest_data that is directly related to the form that is being submitted."""
|
"""Extracts data from the latest_data that is directly related to the form that is being submitted."""
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user