merged in main and resolved conflicts
This commit is contained in:
commit
23550583b2
|
@ -11,6 +11,12 @@ repos:
|
|||
require_serial: true
|
||||
# exclude: ^migrations/
|
||||
exclude: "/migrations/"
|
||||
|
||||
# otherwise it will not fix long lines if the long lines contain long strings
|
||||
# https://github.com/psf/black/pull/1132
|
||||
# https://github.com/psf/black/pull/1609
|
||||
args: [--preview]
|
||||
|
||||
- id: check-added-large-files
|
||||
files: ^spiffworkflow-backend/
|
||||
name: Check for added large files
|
||||
|
|
|
@ -36,8 +36,16 @@ uninstall:
|
|||
|
||||
.PHONY : tests
|
||||
tests:
|
||||
cd tests/$(NAME)
|
||||
PYTHONPATH=../.. python -m unittest discover -v . "*Test.py"
|
||||
python -m unittest discover -vs tests/SpiffWorkflow -p \*Test.py -t .
|
||||
|
||||
.PHONY : tests-par
|
||||
tests-par:
|
||||
@if ! command -v unittest-parallel >/dev/null 2>&1; then \
|
||||
echo "unittest-parallel not found. Please install it with:"; \
|
||||
echo " pip install unittest-parallel"; \
|
||||
exit 1; \
|
||||
fi
|
||||
unittest-parallel --module-fixtures -vs tests/SpiffWorkflow -p \*Test.py -t .
|
||||
|
||||
.PHONY : tests-cov
|
||||
tests-cov:
|
||||
|
|
|
@ -72,7 +72,7 @@ coverage run --source=SpiffWorkflow -m unittest discover -v . "*Test.py"
|
|||
```
|
||||
|
||||
## Support
|
||||
You can find us on Discord at https://discord.gg/zDEBEnrF
|
||||
You can find us on Discord at https://discord.gg/BYHcc7PpUC
|
||||
|
||||
Commercial support for SpiffWorkflow is available from
|
||||
[Sartography](https://sartography.com)
|
||||
|
|
|
@ -29,7 +29,7 @@ from .ValidationException import ValidationException
|
|||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||
from ..specs.events.EndEvent import EndEvent
|
||||
from ..specs.events.StartEvent import StartEvent
|
||||
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
||||
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent, EventBasedGateway
|
||||
from ..specs.events.IntermediateEvent import SendTask, ReceiveTask
|
||||
from ..specs.SubWorkflowTask import CallActivity, SubWorkflowTask, TransactionSubprocess
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
|
@ -47,7 +47,7 @@ from .task_parsers import (UserTaskParser, NoneTaskParser, ManualTaskParser,
|
|||
ExclusiveGatewayParser, ParallelGatewayParser, InclusiveGatewayParser,
|
||||
CallActivityParser, ScriptTaskParser, SubWorkflowParser,
|
||||
ServiceTaskParser)
|
||||
from .event_parsers import (StartEventParser, EndEventParser, BoundaryEventParser,
|
||||
from .event_parsers import (EventBasedGatewayParser, StartEventParser, EndEventParser, BoundaryEventParser,
|
||||
IntermediateCatchEventParser, IntermediateThrowEventParser,
|
||||
SendTaskParser, ReceiveTaskParser)
|
||||
|
||||
|
@ -57,7 +57,8 @@ XSD_PATH = os.path.join(os.path.dirname(__file__), 'schema', 'BPMN20.xsd')
|
|||
class BpmnValidator:
|
||||
|
||||
def __init__(self, xsd_path=XSD_PATH, imports=None):
|
||||
schema = etree.parse(open(xsd_path))
|
||||
with open(xsd_path) as xsd:
|
||||
schema = etree.parse(xsd)
|
||||
if imports is not None:
|
||||
for ns, fn in imports.items():
|
||||
elem = etree.Element(
|
||||
|
@ -104,6 +105,7 @@ class BpmnParser(object):
|
|||
full_tag('boundaryEvent'): (BoundaryEventParser, BoundaryEvent),
|
||||
full_tag('receiveTask'): (ReceiveTaskParser, ReceiveTask),
|
||||
full_tag('sendTask'): (SendTaskParser, SendTask),
|
||||
full_tag('eventBasedGateway'): (EventBasedGatewayParser, EventBasedGateway),
|
||||
}
|
||||
|
||||
OVERRIDE_PARSER_CLASSES = {}
|
||||
|
|
|
@ -121,6 +121,25 @@ class TaskParser(NodeParser):
|
|||
elif len(self.xpath('./bpmn:standardLoopCharacteristics')) > 0:
|
||||
self._set_multiinstance_attributes(True, 25, STANDARDLOOPCOUNT, loop_task=True)
|
||||
|
||||
def _add_boundary_event(self, children):
|
||||
|
||||
parent = _BoundaryEventParent(
|
||||
self.spec, '%s.BoundaryEventParent' % self.get_id(),
|
||||
self.task, lane=self.task.lane)
|
||||
self.process_parser.parsed_nodes[self.node.get('id')] = parent
|
||||
parent.connect_outgoing(self.task, '%s.FromBoundaryEventParent' % self.get_id(), None, None)
|
||||
for event in children:
|
||||
child = self.process_parser.parse_node(event)
|
||||
if isinstance(child.event_definition, CancelEventDefinition) \
|
||||
and not isinstance(self.task, TransactionSubprocess):
|
||||
raise ValidationException('Cancel Events may only be used with transactions',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
parent.connect_outgoing(child,
|
||||
'%s.FromBoundaryEventParent' % event.get('id'),
|
||||
None, None)
|
||||
return parent
|
||||
|
||||
def parse_node(self):
|
||||
"""
|
||||
Parse this node, and all children, returning the connected task spec.
|
||||
|
@ -139,30 +158,9 @@ class TaskParser(NodeParser):
|
|||
|
||||
boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.get_id())
|
||||
if boundary_event_nodes:
|
||||
parent_task = _BoundaryEventParent(
|
||||
self.spec, '%s.BoundaryEventParent' % self.get_id(),
|
||||
self.task, lane=self.task.lane)
|
||||
self.process_parser.parsed_nodes[
|
||||
self.node.get('id')] = parent_task
|
||||
parent_task.connect_outgoing(
|
||||
self.task, '%s.FromBoundaryEventParent' % self.get_id(),
|
||||
None, None)
|
||||
for boundary_event in boundary_event_nodes:
|
||||
b = self.process_parser.parse_node(boundary_event)
|
||||
if isinstance(b.event_definition, CancelEventDefinition) \
|
||||
and not isinstance(self.task, TransactionSubprocess):
|
||||
raise ValidationException(
|
||||
'Cancel Events may only be used with transactions',
|
||||
node=self.node,
|
||||
filename=self.filename)
|
||||
parent_task.connect_outgoing(
|
||||
b,
|
||||
'%s.FromBoundaryEventParent' % boundary_event.get(
|
||||
'id'),
|
||||
None, None)
|
||||
parent = self._add_boundary_event(boundary_event_nodes)
|
||||
else:
|
||||
self.process_parser.parsed_nodes[
|
||||
self.node.get('id')] = self.task
|
||||
self.process_parser.parsed_nodes[self.node.get('id')] = self.task
|
||||
|
||||
children = []
|
||||
outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.get_id())
|
||||
|
@ -202,7 +200,7 @@ class TaskParser(NodeParser):
|
|||
c, target_node, sequence_flow,
|
||||
sequence_flow.get('id') == default_outgoing)
|
||||
|
||||
return parent_task if boundary_event_nodes else self.task
|
||||
return parent if boundary_event_nodes else self.task
|
||||
except ValidationException:
|
||||
raise
|
||||
except Exception as ex:
|
||||
|
|
|
@ -5,7 +5,7 @@ from SpiffWorkflow.bpmn.specs.events.event_definitions import CorrelationPropert
|
|||
from .ValidationException import ValidationException
|
||||
from .TaskParser import TaskParser
|
||||
from .util import first, one
|
||||
from ..specs.events.event_definitions import (TimerEventDefinition, MessageEventDefinition,
|
||||
from ..specs.events.event_definitions import (MultipleEventDefinition, TimerEventDefinition, MessageEventDefinition,
|
||||
ErrorEventDefinition, EscalationEventDefinition,
|
||||
SignalEventDefinition,
|
||||
CancelEventDefinition, CycleTimerEventDefinition,
|
||||
|
@ -81,17 +81,18 @@ class EventDefinitionParser(TaskParser):
|
|||
"""Parse the timerEventDefinition node and return an instance of TimerEventDefinition."""
|
||||
|
||||
try:
|
||||
label = self.node.get('name', self.node.get('id'))
|
||||
time_date = first(self.xpath('.//bpmn:timeDate'))
|
||||
if time_date is not None:
|
||||
return TimerEventDefinition(self.node.get('name'), time_date.text)
|
||||
return TimerEventDefinition(label, time_date.text)
|
||||
|
||||
time_duration = first(self.xpath('.//bpmn:timeDuration'))
|
||||
if time_duration is not None:
|
||||
return TimerEventDefinition(self.node.get('name'), time_duration.text)
|
||||
return TimerEventDefinition(label, time_duration.text)
|
||||
|
||||
time_cycle = first(self.xpath('.//bpmn:timeCycle'))
|
||||
if time_cycle is not None:
|
||||
return CycleTimerEventDefinition(self.node.get('name'), time_cycle.text)
|
||||
return CycleTimerEventDefinition(label, time_cycle.text)
|
||||
raise ValidationException("Unknown Time Specification", node=self.node, filename=self.filename)
|
||||
except Exception as e:
|
||||
raise ValidationException("Time Specification Error. " + str(e), node=self.node, filename=self.filename)
|
||||
|
@ -109,7 +110,7 @@ class EventDefinitionParser(TaskParser):
|
|||
correlations.append(CorrelationProperty(key, expression, used_by))
|
||||
return correlations
|
||||
|
||||
def _create_task(self, event_definition, cancel_activity=None):
|
||||
def _create_task(self, event_definition, cancel_activity=None, parallel=None):
|
||||
|
||||
if isinstance(event_definition, MessageEventDefinition):
|
||||
for prop in event_definition.correlation_properties:
|
||||
|
@ -126,28 +127,40 @@ class EventDefinitionParser(TaskParser):
|
|||
}
|
||||
if cancel_activity is not None:
|
||||
kwargs['cancel_activity'] = cancel_activity
|
||||
if parallel is not None:
|
||||
kwargs['parallel'] = parallel
|
||||
return self.spec_class(self.spec, self.get_task_spec_name(), event_definition, **kwargs)
|
||||
|
||||
def get_event_definition(self, xpaths):
|
||||
"""Returns the first event definition it can find in given list of xpaths"""
|
||||
"""Returns all event definitions it can find in given list of xpaths"""
|
||||
|
||||
event_definitions = []
|
||||
for path in xpaths:
|
||||
event = first(self.xpath(path))
|
||||
if event is not None:
|
||||
for event in self.xpath(path):
|
||||
if path == MESSAGE_EVENT_XPATH:
|
||||
return self.parse_message_event(event)
|
||||
event_definitions.append(self.parse_message_event(event))
|
||||
elif path == SIGNAL_EVENT_XPATH:
|
||||
return self.parse_signal_event(event)
|
||||
event_definitions.append(self.parse_signal_event(event))
|
||||
elif path == TIMER_EVENT_XPATH:
|
||||
return self.parse_timer_event()
|
||||
event_definitions.append(self.parse_timer_event())
|
||||
elif path == CANCEL_EVENT_XPATH:
|
||||
return self.parse_cancel_event()
|
||||
event_definitions.append(self.parse_cancel_event())
|
||||
elif path == ERROR_EVENT_XPATH:
|
||||
return self.parse_error_event(event)
|
||||
event_definitions.append(self.parse_error_event(event))
|
||||
elif path == ESCALATION_EVENT_XPATH:
|
||||
return self.parse_escalation_event(event)
|
||||
event_definitions.append(self.parse_escalation_event(event))
|
||||
elif path == TERMINATION_EVENT_XPATH:
|
||||
return self.parse_terminate_event()
|
||||
return NoneEventDefinition()
|
||||
event_definitions.append(self.parse_terminate_event())
|
||||
|
||||
parallel = self.node.get('parallelMultiple') == 'true'
|
||||
|
||||
if len(event_definitions) == 0:
|
||||
return NoneEventDefinition()
|
||||
elif len(event_definitions) == 1:
|
||||
return event_definitions[0]
|
||||
else:
|
||||
return MultipleEventDefinition(event_definitions, parallel)
|
||||
|
||||
|
||||
class StartEventParser(EventDefinitionParser):
|
||||
"""Parses a Start Event, and connects it to the internal spec.start task.
|
||||
|
@ -158,8 +171,7 @@ class StartEventParser(EventDefinitionParser):
|
|||
task = self._create_task(event_definition)
|
||||
self.spec.start.connect(task)
|
||||
if isinstance(event_definition, CycleTimerEventDefinition):
|
||||
# We are misusing cycle timers, so this is a hack whereby we will
|
||||
# revisit ourselves if we fire.
|
||||
# We are misusing cycle timers, so this is a hack whereby we will revisit ourselves if we fire.
|
||||
task.connect(task)
|
||||
return task
|
||||
|
||||
|
@ -229,3 +241,22 @@ class BoundaryEventParser(EventDefinitionParser):
|
|||
if isinstance(event_definition, NoneEventDefinition):
|
||||
raise NotImplementedError('Unsupported Catch Event: %r', etree.tostring(self.node))
|
||||
return self._create_task(event_definition, cancel_activity)
|
||||
|
||||
|
||||
class EventBasedGatewayParser(EventDefinitionParser):
|
||||
|
||||
def create_task(self):
|
||||
return self._create_task(MultipleEventDefinition())
|
||||
|
||||
def handles_multiple_outgoing(self):
|
||||
return True
|
||||
|
||||
def connect_outgoing(self, outgoing_task, outgoing_task_node, sequence_flow_node, is_default):
|
||||
self.task.event_definition.event_definitions.append(outgoing_task.event_definition)
|
||||
self.task.connect_outgoing(
|
||||
outgoing_task,
|
||||
sequence_flow_node.get('id'),
|
||||
sequence_flow_node.get('name', None),
|
||||
self.parse_documentation(sequence_flow_node)
|
||||
)
|
||||
|
|
@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification
|
|||
|
||||
from .dictionary import DictionaryConverter
|
||||
|
||||
from ..specs.events.event_definitions import SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
|
||||
from ..specs.events.event_definitions import MultipleEventDefinition, SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
|
||||
from ..specs.events.event_definitions import TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition
|
||||
from ..specs.events.event_definitions import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition
|
||||
from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition
|
||||
|
@ -91,7 +91,7 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
|||
|
||||
event_definitions = [ NoneEventDefinition, CancelEventDefinition, TerminateEventDefinition,
|
||||
SignalEventDefinition, MessageEventDefinition, ErrorEventDefinition, EscalationEventDefinition,
|
||||
TimerEventDefinition, CycleTimerEventDefinition ]
|
||||
TimerEventDefinition, CycleTimerEventDefinition , MultipleEventDefinition]
|
||||
|
||||
for event_definition in event_definitions:
|
||||
self.register(
|
||||
|
@ -257,6 +257,9 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
|||
dct['error_code'] = event_definition.error_code
|
||||
if isinstance(event_definition, EscalationEventDefinition):
|
||||
dct['escalation_code'] = event_definition.escalation_code
|
||||
if isinstance(event_definition, MultipleEventDefinition):
|
||||
dct['event_definitions'] = [self.convert(e) for e in event_definition.event_definitions]
|
||||
dct['parallel'] = event_definition.parallel
|
||||
|
||||
return dct
|
||||
|
||||
|
@ -273,6 +276,8 @@ class BpmnTaskSpecConverter(DictionaryConverter):
|
|||
internal, external = dct.pop('internal'), dct.pop('external')
|
||||
if 'correlation_properties' in dct:
|
||||
dct['correlation_properties'] = [CorrelationProperty(**prop) for prop in dct['correlation_properties']]
|
||||
if 'event_definitions' in dct:
|
||||
dct['event_definitions'] = [self.restore(d) for d in dct['event_definitions']]
|
||||
event_definition = definition_class(**dct)
|
||||
event_definition.internal = internal
|
||||
event_definition.external = external
|
||||
|
|
|
@ -21,7 +21,7 @@ from ..specs.ParallelGateway import ParallelGateway
|
|||
|
||||
from ..specs.events.StartEvent import StartEvent
|
||||
from ..specs.events.EndEvent import EndEvent
|
||||
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
|
||||
from ..specs.events.IntermediateEvent import BoundaryEvent, EventBasedGateway, IntermediateCatchEvent, IntermediateThrowEvent
|
||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask
|
||||
|
||||
from ..workflow import BpmnWorkflow
|
||||
|
@ -52,6 +52,7 @@ class StartTaskConverter(BpmnTaskSpecConverter):
|
|||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class LoopResetTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
|
@ -70,6 +71,7 @@ class LoopResetTaskConverter(BpmnTaskSpecConverter):
|
|||
spec.destination_id = UUID(spec.destination_id)
|
||||
return spec
|
||||
|
||||
|
||||
class EndJoinConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
|
@ -310,3 +312,9 @@ class BoundaryEventParentConverter(BpmnTaskSpecConverter):
|
|||
|
||||
def from_dict(self, dct):
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class EventBasedGatewayConverter(EventConverter):
|
||||
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(EventBasedGateway, data_converter, typename)
|
||||
|
|
|
@ -17,7 +17,7 @@ from .task_spec_converters import SimpleTaskConverter, StartTaskConverter, EndJo
|
|||
from .task_spec_converters import NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter
|
||||
from .task_spec_converters import CallActivityTaskConverter, TransactionSubprocessTaskConverter
|
||||
from .task_spec_converters import StartEventConverter, EndEventConverter
|
||||
from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter
|
||||
from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter
|
||||
from .task_spec_converters import SendTaskConverter, ReceiveTaskConverter
|
||||
from .task_spec_converters import BoundaryEventConverter, BoundaryEventParentConverter
|
||||
from .task_spec_converters import ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
|
||||
|
@ -27,7 +27,7 @@ DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
|
|||
NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter,
|
||||
CallActivityTaskConverter, TransactionSubprocessTaskConverter,
|
||||
StartEventConverter, EndEventConverter, SendTaskConverter, ReceiveTaskConverter,
|
||||
IntermediateCatchEventConverter, IntermediateThrowEventConverter,
|
||||
IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter,
|
||||
BoundaryEventConverter, BoundaryEventParentConverter,
|
||||
ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
|
||||
]
|
||||
|
|
|
@ -111,6 +111,7 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin):
|
|||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_boundary_event_parent(wf_spec, s_state, cls)
|
||||
|
||||
|
||||
class BoundaryEvent(CatchingEvent):
|
||||
"""Task Spec for a bpmn:boundaryEvent node."""
|
||||
|
||||
|
@ -128,7 +129,6 @@ class BoundaryEvent(CatchingEvent):
|
|||
interrupting = 'Interrupting' if self.cancel_activity else 'Non-Interrupting'
|
||||
return f'{interrupting} {self.event_definition.event_type} Event'
|
||||
|
||||
|
||||
def catches(self, my_task, event_definition, correlations=None):
|
||||
# Boundary events should only be caught while waiting
|
||||
return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING
|
||||
|
@ -148,3 +148,19 @@ class BoundaryEvent(CatchingEvent):
|
|||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state):
|
||||
return serializer.deserialize_boundary_event(wf_spec, s_state, cls)
|
||||
|
||||
|
||||
class EventBasedGateway(CatchingEvent):
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Event Based Gateway'
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
my_task._sync_children(self.outputs, state=TaskState.MAYBE)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
for child in my_task.children:
|
||||
if not child.task_spec.event_definition.has_fired(child):
|
||||
child.cancel()
|
||||
return super()._on_complete_hook(my_task)
|
|
@ -20,6 +20,7 @@
|
|||
import datetime
|
||||
from copy import deepcopy
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
|
||||
class EventDefinition(object):
|
||||
"""
|
||||
|
@ -69,10 +70,10 @@ class EventDefinition(object):
|
|||
# We also don't have a more sophisticated method for addressing events to
|
||||
# a particular process, but this at least provides a mechanism for distinguishing
|
||||
# between processes and subprocesses.
|
||||
if self.internal:
|
||||
workflow.catch(event)
|
||||
if self.external:
|
||||
outer_workflow.catch(event, correlations)
|
||||
if self.internal and (self.external and workflow != outer_workflow):
|
||||
workflow.catch(event)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__
|
||||
|
@ -92,6 +93,7 @@ class EventDefinition(object):
|
|||
obj.internal, obj.external = internal, external
|
||||
return obj
|
||||
|
||||
|
||||
class NamedEventDefinition(EventDefinition):
|
||||
"""
|
||||
Extend the base event class to provide a name for the event. Most throw/catch events
|
||||
|
@ -115,7 +117,6 @@ class NamedEventDefinition(EventDefinition):
|
|||
retdict['name'] = self.name
|
||||
return retdict
|
||||
|
||||
|
||||
class CancelEventDefinition(EventDefinition):
|
||||
"""
|
||||
Cancel events are only handled by the outerworkflow, as they can only be used inside
|
||||
|
@ -307,6 +308,11 @@ class TimerEventDefinition(EventDefinition):
|
|||
The Timer is considered to have fired if the evaluated dateTime
|
||||
expression is before datetime.datetime.now()
|
||||
"""
|
||||
|
||||
if my_task.internal_data.get('event_fired'):
|
||||
# If we manually send this event, this will be set
|
||||
return True
|
||||
|
||||
dt = my_task.workflow.script_engine.evaluate(my_task, self.dateTime)
|
||||
if isinstance(dt,datetime.timedelta):
|
||||
if my_task._get_internal_data('start_time',None) is not None:
|
||||
|
@ -330,6 +336,9 @@ class TimerEventDefinition(EventDefinition):
|
|||
now = datetime.date.today()
|
||||
return now > dt
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(TimerEventDefinition, self).serialize()
|
||||
retdict['label'] = self.label
|
||||
|
@ -363,6 +372,10 @@ class CycleTimerEventDefinition(EventDefinition):
|
|||
# We will fire this timer whenever a cycle completes
|
||||
# The task itself will manage counting how many times it fires
|
||||
|
||||
if my_task.internal_data.get('event_fired'):
|
||||
# If we manually send this event, this will be set
|
||||
return True
|
||||
|
||||
repeat, delta = my_task.workflow.script_engine.evaluate(my_task, self.cycle_definition)
|
||||
|
||||
# This is the first time we've entered this event
|
||||
|
@ -393,8 +406,65 @@ class CycleTimerEventDefinition(EventDefinition):
|
|||
my_task.internal_data['start_time'] = None
|
||||
super(CycleTimerEventDefinition, self).reset(my_task)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label
|
||||
|
||||
def serialize(self):
|
||||
retdict = super(CycleTimerEventDefinition, self).serialize()
|
||||
retdict['label'] = self.label
|
||||
retdict['cycle_definition'] = self.cycle_definition
|
||||
return retdict
|
||||
|
||||
|
||||
class MultipleEventDefinition(EventDefinition):
|
||||
|
||||
def __init__(self, event_definitions=None, parallel=False):
|
||||
super().__init__()
|
||||
self.event_definitions = event_definitions or []
|
||||
self.parallel = parallel
|
||||
|
||||
@property
|
||||
def event_type(self):
|
||||
return 'Multiple'
|
||||
|
||||
def has_fired(self, my_task):
|
||||
|
||||
seen_events = my_task.internal_data.get('seen_events', [])
|
||||
for event in self.event_definitions:
|
||||
if isinstance(event, (TimerEventDefinition, CycleTimerEventDefinition)):
|
||||
child = [c for c in my_task.children if c.task_spec.event_definition == event]
|
||||
child[0].task_spec._update_hook(child[0])
|
||||
child[0]._set_state(TaskState.MAYBE)
|
||||
if event.has_fired(my_task):
|
||||
seen_events.append(event)
|
||||
|
||||
if self.parallel:
|
||||
# Parallel multiple need to match all events
|
||||
return all(event in seen_events for event in self.event_definitions)
|
||||
else:
|
||||
return len(seen_events) > 0
|
||||
|
||||
def catch(self, my_task, event_definition=None):
|
||||
event_definition.catch(my_task, event_definition)
|
||||
seen_events = my_task.internal_data.get('seen_events', []) + [event_definition]
|
||||
my_task._set_internal_data(seen_events=seen_events)
|
||||
|
||||
def reset(self, my_task):
|
||||
my_task.internal_data.pop('seen_events', None)
|
||||
super().reset(my_task)
|
||||
|
||||
def __eq__(self, other):
|
||||
# This event can catch any of the events associated with it
|
||||
for event in self.event_definitions:
|
||||
if event == other:
|
||||
return True
|
||||
return False
|
||||
|
||||
def throw(self, my_task):
|
||||
# Mutiple events throw all associated events when they fire
|
||||
for event_definition in self.event_definitions:
|
||||
self._throw(
|
||||
event=event_definition,
|
||||
workflow=my_task.workflow,
|
||||
outer_workflow=my_task.workflow.outer_workflow
|
||||
)
|
|
@ -54,7 +54,7 @@ class CatchingEvent(Simple, BpmnSpecMixin):
|
|||
my_task._ready()
|
||||
super(CatchingEvent, self)._update_hook(my_task)
|
||||
|
||||
def _on_ready(self, my_task):
|
||||
def _on_ready_hook(self, my_task):
|
||||
|
||||
# None events don't propogate, so as soon as we're ready, we fire our event
|
||||
if isinstance(self.event_definition, NoneEventDefinition):
|
||||
|
@ -63,7 +63,7 @@ class CatchingEvent(Simple, BpmnSpecMixin):
|
|||
# If we have not seen the event we're waiting for, enter the waiting state
|
||||
if not self.event_definition.has_fired(my_task):
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
super(CatchingEvent, self)._on_ready(my_task)
|
||||
super(CatchingEvent, self)._on_ready_hook(my_task)
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition, MultipleEventDefinition
|
||||
from .PythonScriptEngine import PythonScriptEngine
|
||||
from .specs.events.event_types import CatchingEvent
|
||||
from .specs.events.StartEvent import StartEvent
|
||||
|
@ -113,6 +113,14 @@ class BpmnWorkflow(Workflow):
|
|||
workflow = workflow.outer_workflow
|
||||
return workflow
|
||||
|
||||
def _get_or_create_subprocess(self, task_spec, wf_spec):
|
||||
if isinstance(task_spec.event_definition, MultipleEventDefinition):
|
||||
for sp in self.subprocesses.values():
|
||||
start = sp.get_tasks_from_spec_name(task_spec.name)
|
||||
if len(start) and start[0].state == TaskState.WAITING:
|
||||
return sp
|
||||
return self.add_subprocess(wf_spec.name, f'{wf_spec.name}_{len(self.subprocesses)}')
|
||||
|
||||
def catch(self, event_definition, correlations=None):
|
||||
"""
|
||||
Send an event definition to any tasks that catch it.
|
||||
|
@ -134,10 +142,8 @@ class BpmnWorkflow(Workflow):
|
|||
for name, spec in self.subprocess_specs.items():
|
||||
for task_spec in list(spec.task_specs.values()):
|
||||
if isinstance(task_spec, StartEvent) and task_spec.event_definition == event_definition:
|
||||
subprocess = self.add_subprocess(spec.name, f'{spec.name}_{len(self.subprocesses)}')
|
||||
subprocess.correlations = correlations or {}
|
||||
start = self.get_tasks_from_spec_name(task_spec.name, workflow=subprocess)[0]
|
||||
task_spec.event_definition.catch(start, event_definition)
|
||||
subprocess = self._get_or_create_subprocess(task_spec, spec)
|
||||
subprocess.correlations.update(correlations or {})
|
||||
|
||||
# We need to get all the tasks that catch an event before completing any of them
|
||||
# in order to prevent the scenario where multiple boundary events catch the
|
||||
|
|
|
@ -30,9 +30,7 @@ class DMNEngine:
|
|||
a given task."""
|
||||
result = {}
|
||||
matched_rules = self.decide(task)
|
||||
if len(matched_rules) == 1:
|
||||
result = matched_rules[0].output_as_dict(task)
|
||||
elif len(matched_rules) > 1: # This must be a multi-output
|
||||
if self.decision_table.hit_policy == HitPolicy.COLLECT.value:
|
||||
# each output will be an array of values, all outputs will
|
||||
# be placed in a dict, which we will then merge.
|
||||
for rule in matched_rules:
|
||||
|
@ -41,6 +39,8 @@ class DMNEngine:
|
|||
if not key in result:
|
||||
result[key] = []
|
||||
result[key].append(rule_output[key])
|
||||
elif len(matched_rules) > 0:
|
||||
result = matched_rules[0].output_as_dict(task)
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter):
|
|||
return {
|
||||
'id': table.id,
|
||||
'name': table.name,
|
||||
'hit_policy': table.hit_policy,
|
||||
'inputs': [val.__dict__ for val in table.inputs],
|
||||
'outputs': [val.__dict__ for val in table.outputs],
|
||||
'rules': [self.rule_to_dict(rule) for rule in table.rules],
|
||||
|
|
|
@ -3,7 +3,7 @@ from functools import partial
|
|||
from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
|
||||
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
|
||||
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
|
||||
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent, EventBasedGateway
|
||||
from SpiffWorkflow.spiff.specs.none_task import NoneTask
|
||||
from SpiffWorkflow.spiff.specs.manual_task import ManualTask
|
||||
from SpiffWorkflow.spiff.specs.user_task import UserTask
|
||||
|
@ -164,3 +164,7 @@ class ReceiveTaskConverter(SpiffEventConverter):
|
|||
dct['prescript'] = spec.prescript
|
||||
dct['postscript'] = spec.postscript
|
||||
return dct
|
||||
|
||||
class EventBasedGatewayConverter(SpiffEventConverter):
|
||||
def __init__(self, data_converter=None, typename=None):
|
||||
super().__init__(EventBasedGateway, data_converter, typename)
|
|
@ -36,7 +36,7 @@ Spiff Workflow is published under the terms of the
|
|||
|
||||
Support
|
||||
-------
|
||||
You can find us on `our Discord Channel <https://discord.gg/zDEBEnrF>`_
|
||||
You can find us on `our Discord Channel <https://discord.gg/BYHcc7PpUC>`_
|
||||
|
||||
Commercial support for SpiffWorkflow is available from
|
||||
`Sartography <https://sartography.com>`_
|
||||
|
|
|
@ -0,0 +1,107 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1jaorpt" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="Process_0pvx19v" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0w4b5t2</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0w4b5t2" sourceRef="StartEvent_1" targetRef="Gateway_1434v9l" />
|
||||
<bpmn:eventBasedGateway id="Gateway_1434v9l">
|
||||
<bpmn:incoming>Flow_0w4b5t2</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0gge7fn</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0px7ksu</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_1rfbrlf</bpmn:outgoing>
|
||||
</bpmn:eventBasedGateway>
|
||||
<bpmn:intermediateCatchEvent id="message_1_event">
|
||||
<bpmn:incoming>Flow_0gge7fn</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1g4g85l</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
|
||||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0gge7fn" sourceRef="Gateway_1434v9l" targetRef="message_1_event" />
|
||||
<bpmn:intermediateCatchEvent id="message_2_event">
|
||||
<bpmn:incoming>Flow_0px7ksu</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_18v90rx</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
|
||||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0px7ksu" sourceRef="Gateway_1434v9l" targetRef="message_2_event" />
|
||||
<bpmn:intermediateCatchEvent id="timer_event">
|
||||
<bpmn:incoming>Flow_1rfbrlf</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0mppjk9</bpmn:outgoing>
|
||||
<bpmn:timerEventDefinition id="TimerEventDefinition_0reo0gl">
|
||||
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">timedelta(seconds=1)</bpmn:timeDuration>
|
||||
</bpmn:timerEventDefinition>
|
||||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1rfbrlf" sourceRef="Gateway_1434v9l" targetRef="timer_event" />
|
||||
<bpmn:endEvent id="timer">
|
||||
<bpmn:incoming>Flow_0mppjk9</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0mppjk9" sourceRef="timer_event" targetRef="timer" />
|
||||
<bpmn:endEvent id="message_1_end">
|
||||
<bpmn:incoming>Flow_1g4g85l</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1g4g85l" sourceRef="message_1_event" targetRef="message_1_end" />
|
||||
<bpmn:endEvent id="message_2_end">
|
||||
<bpmn:incoming>Flow_18v90rx</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_18v90rx" sourceRef="message_2_event" targetRef="message_2_end" />
|
||||
</bpmn:process>
|
||||
<bpmn:message id="Message_0lyfmat" name="message_1" />
|
||||
<bpmn:message id="Message_1ntpwce" name="message_2" />
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0pvx19v">
|
||||
<bpmndi:BPMNEdge id="Flow_18v90rx_di" bpmnElement="Flow_18v90rx">
|
||||
<di:waypoint x="408" y="230" />
|
||||
<di:waypoint x="472" y="230" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1g4g85l_di" bpmnElement="Flow_1g4g85l">
|
||||
<di:waypoint x="408" y="117" />
|
||||
<di:waypoint x="472" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0mppjk9_di" bpmnElement="Flow_0mppjk9">
|
||||
<di:waypoint x="408" y="340" />
|
||||
<di:waypoint x="472" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1rfbrlf_di" bpmnElement="Flow_1rfbrlf">
|
||||
<di:waypoint x="290" y="142" />
|
||||
<di:waypoint x="290" y="340" />
|
||||
<di:waypoint x="372" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0px7ksu_di" bpmnElement="Flow_0px7ksu">
|
||||
<di:waypoint x="290" y="142" />
|
||||
<di:waypoint x="290" y="230" />
|
||||
<di:waypoint x="372" y="230" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0gge7fn_di" bpmnElement="Flow_0gge7fn">
|
||||
<di:waypoint x="315" y="117" />
|
||||
<di:waypoint x="372" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0w4b5t2_di" bpmnElement="Flow_0w4b5t2">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="265" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0gplu2e_di" bpmnElement="Gateway_1434v9l">
|
||||
<dc:Bounds x="265" y="92" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1og7irs_di" bpmnElement="message_1_event">
|
||||
<dc:Bounds x="372" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0quxwe6_di" bpmnElement="message_2_event">
|
||||
<dc:Bounds x="372" y="212" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1ea7gov_di" bpmnElement="timer_event">
|
||||
<dc:Bounds x="372" y="322" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0emrepu_di" bpmnElement="timer">
|
||||
<dc:Bounds x="472" y="322" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0x07cac_di" bpmnElement="message_1_end">
|
||||
<dc:Bounds x="472" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1p7slpj_di" bpmnElement="message_2_end">
|
||||
<dc:Bounds x="472" y="212" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,43 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
|
||||
<bpmn:process id="main" name="Main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1" parallelMultiple="true">
|
||||
<bpmn:outgoing>Flow_1tr2mqr</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_1tr2mqr</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1qjctmo</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_1tr2mqr" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_0hamwsf">
|
||||
<bpmn:incoming>Flow_1qjctmo</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1qjctmo" sourceRef="any_task" targetRef="Event_0hamwsf" />
|
||||
</bpmn:process>
|
||||
<bpmn:message id="Message_0lyfmat" name="message_1" />
|
||||
<bpmn:message id="Message_1ntpwce" name="message_2" />
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNShape id="Event_1mddj6x_di" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1bn6xph_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="270" y="147" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
|
||||
<dc:Bounds x="432" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1tr2mqr_di" bpmnElement="Flow_1tr2mqr">
|
||||
<di:waypoint x="215" y="187" />
|
||||
<di:waypoint x="270" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1qjctmo_di" bpmnElement="Flow_1qjctmo">
|
||||
<di:waypoint x="370" y="187" />
|
||||
<di:waypoint x="432" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,43 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
|
||||
<bpmn:process id="main" name="Main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1tr2mqr</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_1tr2mqr</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1qjctmo</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_1tr2mqr" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_0hamwsf">
|
||||
<bpmn:incoming>Flow_1qjctmo</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1qjctmo" sourceRef="any_task" targetRef="Event_0hamwsf" />
|
||||
</bpmn:process>
|
||||
<bpmn:message id="Message_0lyfmat" name="message_1" />
|
||||
<bpmn:message id="Message_1ntpwce" name="message_2" />
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNShape id="Event_1mddj6x_di" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1bn6xph_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="270" y="147" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
|
||||
<dc:Bounds x="432" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1tr2mqr_di" bpmnElement="Flow_1tr2mqr">
|
||||
<di:waypoint x="215" y="187" />
|
||||
<di:waypoint x="270" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1qjctmo_di" bpmnElement="Flow_1qjctmo">
|
||||
<di:waypoint x="370" y="187" />
|
||||
<di:waypoint x="432" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,88 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
|
||||
<bpmn:message id="Message_0lyfmat" name="message_1" />
|
||||
<bpmn:message id="Message_1ntpwce" name="message_2" />
|
||||
<bpmn:collaboration id="top">
|
||||
<bpmn:participant id="responder" name="Responder" processRef="respond" />
|
||||
<bpmn:participant id="initiator" name="Initiator" processRef="initiate" />
|
||||
</bpmn:collaboration>
|
||||
<bpmn:process id="respond" name="Respond" isExecutable="true">
|
||||
<bpmn:startEvent id="Event_07g2lnb" parallelMultiple="true">
|
||||
<bpmn:outgoing>Flow_04uk4n8</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
|
||||
</bpmn:startEvent>
|
||||
<bpmn:endEvent id="Event_0hamwsf">
|
||||
<bpmn:incoming>Flow_08al33k</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_04uk4n8</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_08al33k</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_04uk4n8" sourceRef="Event_07g2lnb" targetRef="any_task" />
|
||||
<bpmn:sequenceFlow id="Flow_08al33k" sourceRef="any_task" targetRef="Event_0hamwsf" />
|
||||
</bpmn:process>
|
||||
<bpmn:process id="initiate" name="Initiate" isExecutable="true">
|
||||
<bpmn:startEvent id="Event_0jamixt">
|
||||
<bpmn:outgoing>Flow_1wgdi4h</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1wgdi4h" sourceRef="Event_0jamixt" targetRef="Event_0n8a7vh" />
|
||||
<bpmn:sequenceFlow id="Flow_1wxjn4e" sourceRef="Event_0n8a7vh" targetRef="Event_0vork94" />
|
||||
<bpmn:endEvent id="Event_0vork94">
|
||||
<bpmn:incoming>Flow_1wxjn4e</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:intermediateThrowEvent id="Event_0n8a7vh">
|
||||
<bpmn:incoming>Flow_1wgdi4h</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1wxjn4e</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox_throw" messageRef="Message_0lyfmat" />
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze_throw" messageRef="Message_1ntpwce" />
|
||||
</bpmn:intermediateThrowEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top">
|
||||
<bpmndi:BPMNShape id="Participant_0ctz0ow_di" bpmnElement="responder" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="62" width="430" height="250" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_07g2lnb_di" bpmnElement="Event_07g2lnb">
|
||||
<dc:Bounds x="192" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
|
||||
<dc:Bounds x="432" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1yaipjy_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="280" y="147" width="100" height="80" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_04uk4n8_di" bpmnElement="Flow_04uk4n8">
|
||||
<di:waypoint x="228" y="187" />
|
||||
<di:waypoint x="280" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_08al33k_di" bpmnElement="Flow_08al33k">
|
||||
<di:waypoint x="380" y="187" />
|
||||
<di:waypoint x="432" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Participant_0gyj8ha_di" bpmnElement="initiator" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="350" width="430" height="250" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0jamixt_di" bpmnElement="Event_0jamixt">
|
||||
<dc:Bounds x="192" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0vork94_di" bpmnElement="Event_0vork94">
|
||||
<dc:Bounds x="432" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0n8a7vh_di" bpmnElement="Event_0n8a7vh">
|
||||
<dc:Bounds x="322" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1wgdi4h_di" bpmnElement="Flow_1wgdi4h">
|
||||
<di:waypoint x="228" y="470" />
|
||||
<di:waypoint x="322" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1wxjn4e_di" bpmnElement="Flow_1wxjn4e">
|
||||
<di:waypoint x="358" y="470" />
|
||||
<di:waypoint x="432" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,87 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
|
||||
<bpmn:message id="Message_0lyfmat" name="message_1" />
|
||||
<bpmn:message id="Message_1ntpwce" name="message_2" />
|
||||
<bpmn:collaboration id="top">
|
||||
<bpmn:participant id="responder" name="Responder" processRef="respond" />
|
||||
<bpmn:participant id="initiator" name="Initiator" processRef="initiate" />
|
||||
</bpmn:collaboration>
|
||||
<bpmn:process id="respond" name="Respond" isExecutable="true">
|
||||
<bpmn:endEvent id="Event_0hamwsf">
|
||||
<bpmn:incoming>Flow_1tr2mqr</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1tr2mqr" sourceRef="StartEvent_1" targetRef="Event_0hamwsf" />
|
||||
<bpmn:intermediateCatchEvent id="StartEvent_1" parallelMultiple="true">
|
||||
<bpmn:incoming>Flow_1wohnl8</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1tr2mqr</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
|
||||
</bpmn:intermediateCatchEvent>
|
||||
<bpmn:startEvent id="Event_07g2lnb">
|
||||
<bpmn:outgoing>Flow_1wohnl8</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1wohnl8" sourceRef="Event_07g2lnb" targetRef="StartEvent_1" />
|
||||
</bpmn:process>
|
||||
<bpmn:process id="initiate" name="Initiate" isExecutable="true">
|
||||
<bpmn:startEvent id="Event_0jamixt">
|
||||
<bpmn:outgoing>Flow_1wgdi4h</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1wgdi4h" sourceRef="Event_0jamixt" targetRef="Event_0n8a7vh" />
|
||||
<bpmn:sequenceFlow id="Flow_1wxjn4e" sourceRef="Event_0n8a7vh" targetRef="Event_0vork94" />
|
||||
<bpmn:endEvent id="Event_0vork94">
|
||||
<bpmn:incoming>Flow_1wxjn4e</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:intermediateThrowEvent id="Event_0n8a7vh">
|
||||
<bpmn:incoming>Flow_1wgdi4h</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1wxjn4e</bpmn:outgoing>
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox_throw" messageRef="Message_0lyfmat" />
|
||||
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze_throw" messageRef="Message_1ntpwce" />
|
||||
</bpmn:intermediateThrowEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top">
|
||||
<bpmndi:BPMNShape id="Participant_0ctz0ow_di" bpmnElement="responder" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="62" width="430" height="250" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
|
||||
<dc:Bounds x="432" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1mddj6x_di" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="312" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_07g2lnb_di" bpmnElement="Event_07g2lnb">
|
||||
<dc:Bounds x="192" y="169" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1tr2mqr_di" bpmnElement="Flow_1tr2mqr">
|
||||
<di:waypoint x="348" y="187" />
|
||||
<di:waypoint x="432" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1wohnl8_di" bpmnElement="Flow_1wohnl8">
|
||||
<di:waypoint x="228" y="187" />
|
||||
<di:waypoint x="312" y="187" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Participant_0gyj8ha_di" bpmnElement="initiator" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="350" width="430" height="250" />
|
||||
<bpmndi:BPMNLabel />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0jamixt_di" bpmnElement="Event_0jamixt">
|
||||
<dc:Bounds x="192" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0vork94_di" bpmnElement="Event_0vork94">
|
||||
<dc:Bounds x="432" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0n8a7vh_di" bpmnElement="Event_0n8a7vh">
|
||||
<dc:Bounds x="322" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1wgdi4h_di" bpmnElement="Flow_1wgdi4h">
|
||||
<di:waypoint x="228" y="470" />
|
||||
<di:waypoint x="322" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1wxjn4e_di" bpmnElement="Flow_1wxjn4e">
|
||||
<di:waypoint x="358" y="470" />
|
||||
<di:waypoint x="432" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,60 @@
|
|||
from datetime import timedelta
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||
from SpiffWorkflow.task import TaskState
|
||||
|
||||
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
class EventBsedGatewayTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v')
|
||||
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
|
||||
self.workflow = BpmnWorkflow(self.spec, script_engine=self.script_engine)
|
||||
|
||||
def testEventBasedGateway(self):
|
||||
self.actual_test()
|
||||
|
||||
def testEventBasedGatewaySaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
waiting_tasks = self.workflow.get_waiting_tasks()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.script_engine = self.script_engine
|
||||
self.assertEqual(len(waiting_tasks), 1)
|
||||
self.workflow.catch(MessageEventDefinition('message_1'))
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(self.workflow.is_completed(), True)
|
||||
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.COMPLETED)
|
||||
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED)
|
||||
self.assertEqual(self.workflow.get_tasks_from_spec_name('timer_event')[0].state, TaskState.CANCELLED)
|
||||
|
||||
def testTimeout(self):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
waiting_tasks = self.workflow.get_waiting_tasks()
|
||||
self.assertEqual(len(waiting_tasks), 1)
|
||||
timer_event = waiting_tasks[0].task_spec.event_definition.event_definitions[-1]
|
||||
self.workflow.catch(timer_event)
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(self.workflow.is_completed(), True)
|
||||
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.CANCELLED)
|
||||
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED)
|
||||
self.assertEqual(self.workflow.get_tasks_from_spec_name('timer_event')[0].state, TaskState.COMPLETED)
|
||||
|
||||
def testMultipleStart(self):
|
||||
spec, subprocess = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main')
|
||||
workflow = BpmnWorkflow(spec)
|
||||
workflow.do_engine_steps()
|
||||
workflow.catch(MessageEventDefinition('message_1'))
|
||||
workflow.catch(MessageEventDefinition('message_2'))
|
||||
workflow.refresh_waiting_tasks()
|
||||
workflow.do_engine_steps()
|
|
@ -0,0 +1,81 @@
|
|||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
|
||||
|
||||
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
|
||||
class MultipleStartEventTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('multiple-start.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testMultipleStartEvent(self):
|
||||
self.actual_test()
|
||||
|
||||
def testMultipleStartEventSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
waiting_tasks = self.workflow.get_waiting_tasks()
|
||||
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
|
||||
# The start event should be waiting
|
||||
self.assertEqual(len(waiting_tasks), 1)
|
||||
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
|
||||
|
||||
self.workflow.catch(MessageEventDefinition('message_1'))
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
# Now the first task should be ready
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 1)
|
||||
self.assertEqual(ready_tasks[0].task_spec.name, 'any_task')
|
||||
|
||||
|
||||
class ParallelStartEventTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testParallelStartEvent(self):
|
||||
self.actual_test()
|
||||
|
||||
def testParallelStartEventSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
waiting_tasks = self.workflow.get_waiting_tasks()
|
||||
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
|
||||
# The start event should be waiting
|
||||
self.assertEqual(len(waiting_tasks), 1)
|
||||
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
|
||||
|
||||
self.workflow.catch(MessageEventDefinition('message_1'))
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
# It should still be waiting because it has to receive both messages
|
||||
waiting_tasks = self.workflow.get_waiting_tasks()
|
||||
self.assertEqual(len(waiting_tasks), 1)
|
||||
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
|
||||
|
||||
self.workflow.catch(MessageEventDefinition('message_2'))
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
# Now the first task should be ready
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 1)
|
||||
self.assertEqual(ready_tasks[0].task_spec.name, 'any_task')
|
|
@ -0,0 +1,47 @@
|
|||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
|
||||
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
|
||||
class MultipleThrowEventIntermediateCatchTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocesses = self.load_collaboration('multiple-throw.bpmn','top')
|
||||
self.workflow = BpmnWorkflow(self.spec, subprocesses)
|
||||
|
||||
def testMultipleThrowEventIntermediateCatch(self):
|
||||
self.actual_test()
|
||||
|
||||
def testMultipleThrowEventIntermediateCatchSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(len(self.workflow.get_waiting_tasks()), 0)
|
||||
self.assertEqual(self.workflow.is_completed(), True)
|
||||
|
||||
|
||||
class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
specs = self.get_all_specs('multiple-throw-start.bpmn')
|
||||
self.spec = specs.pop('initiate')
|
||||
self.workflow = BpmnWorkflow(self.spec, specs)
|
||||
|
||||
def testMultipleThrowEventStartEvent(self):
|
||||
self.actual_test()
|
||||
|
||||
def testMultipleThrowEventStartEventSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 1)
|
||||
ready_tasks[0].complete()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(self.workflow.is_completed(), True)
|
|
@ -3,6 +3,8 @@ import unittest
|
|||
|
||||
from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.dmn.serializer.task_spec_converters import \
|
||||
BusinessRuleTaskConverter
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
from tests.SpiffWorkflow.dmn.DecisionRunner import DecisionRunner
|
||||
from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import \
|
||||
|
@ -27,7 +29,18 @@ class HitPolicyTest(BpmnWorkflowTestCase):
|
|||
self.assertEqual('COLLECT', decision_table.hit_policy)
|
||||
res = runner.result({'type': 'stooge'})
|
||||
self.assertEqual(4, len(res['name']))
|
||||
res = runner.result({'type': 'farmer'})
|
||||
self.assertEqual(1, len(res['name']))
|
||||
self.assertEqual('Elmer Fudd', res['name'][0])
|
||||
|
||||
def testSerializeHitPolicy(self):
|
||||
file_name = os.path.join(os.path.dirname(__file__), 'data', 'collect_hit.dmn')
|
||||
runner = PythonDecisionRunner(file_name)
|
||||
decision_table = runner.decision_table
|
||||
self.assertEqual("COLLECT", decision_table.hit_policy)
|
||||
dict = BusinessRuleTaskConverter().decision_table_to_dict(decision_table)
|
||||
new_table = BusinessRuleTaskConverter().decision_table_from_dict(dict)
|
||||
self.assertEqual("COLLECT", new_table.hit_policy)
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(HitPolicyTest)
|
||||
|
|
|
@ -3,9 +3,7 @@
|
|||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
|
||||
from tests.SpiffWorkflow.util import run_workflow
|
||||
from .TaskSpecTest import TaskSpecTest
|
||||
|
@ -25,7 +23,8 @@ class ExecuteTest(TaskSpecTest):
|
|||
args=self.cmd_args)
|
||||
|
||||
def setUp(self):
|
||||
self.cmd_args = ["python", "ExecuteProcessMock.py"]
|
||||
script_path = os.path.join(os.path.dirname(__file__), '..', 'ExecuteProcessMock.py')
|
||||
self.cmd_args = ["python", script_path]
|
||||
TaskSpecTest.setUp(self)
|
||||
|
||||
def testConstructor(self):
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
"${script_dir}/run_pyl" pre
|
50
bin/run_pyl
50
bin/run_pyl
|
@ -16,6 +16,17 @@ react_projects=(
|
|||
spiffworkflow-frontend
|
||||
)
|
||||
|
||||
subcommand="${1:-}"
|
||||
|
||||
if [[ "$subcommand" == "pre" ]]; then
|
||||
if [[ -n "$(git status --porcelain SpiffWorkflow)" ]]; then
|
||||
echo "SpiffWorkflow has uncommitted changes. Running its test suite."
|
||||
pushd SpiffWorkflow
|
||||
make tests-par # run tests in parallel
|
||||
popd
|
||||
fi
|
||||
fi
|
||||
|
||||
function get_python_dirs() {
|
||||
(git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo ''
|
||||
}
|
||||
|
@ -50,23 +61,34 @@ function run_pre_commmit() {
|
|||
}
|
||||
|
||||
for react_project in "${react_projects[@]}" ; do
|
||||
pushd "$react_project"
|
||||
npm run lint:fix
|
||||
popd
|
||||
# if pre, only do stuff when there are changes
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$react_project")" ]]; then
|
||||
pushd "$react_project"
|
||||
npm run lint:fix
|
||||
popd
|
||||
fi
|
||||
done
|
||||
|
||||
for python_project in "${python_projects[@]}" ; do
|
||||
pushd "$python_project"
|
||||
run_fix_docstrings || run_fix_docstrings
|
||||
run_autoflake || run_autoflake
|
||||
popd
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then
|
||||
pushd "$python_project"
|
||||
run_fix_docstrings || run_fix_docstrings
|
||||
run_autoflake || run_autoflake
|
||||
popd
|
||||
fi
|
||||
done
|
||||
run_pre_commmit || run_pre_commmit
|
||||
|
||||
for python_project in "${python_projects[@]}"; do
|
||||
pushd "$python_project"
|
||||
poetry install
|
||||
poetry run mypy $(get_python_dirs)
|
||||
poetry run coverage run --parallel -m pytest
|
||||
popd
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "spiffworkflow-backend")" ]]; then
|
||||
# rune_pre_commit only applies to spiffworkflow-backend at the moment
|
||||
run_pre_commmit || run_pre_commmit
|
||||
fi
|
||||
|
||||
for python_project in "${python_projects[@]}"; do
|
||||
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then
|
||||
pushd "$python_project"
|
||||
poetry install
|
||||
poetry run mypy $(get_python_dirs)
|
||||
poetry run coverage run --parallel -m pytest
|
||||
popd
|
||||
fi
|
||||
done
|
||||
|
|
|
@ -35,6 +35,7 @@ module.exports = {
|
|||
new CopyWebpackPlugin({
|
||||
patterns: [
|
||||
{ from: 'assets/**', to: 'vendor/bpmn-js', context: 'node_modules/bpmn-js/dist/' },
|
||||
{ from: '*.css', to: 'vendor/bpmn-js-color-picker', context: 'node_modules/bpmn-js-color-picker/colors' },
|
||||
{
|
||||
from: 'assets/**',
|
||||
to: 'vendor/bpmn-js-properties-panel',
|
||||
|
|
|
@ -10,9 +10,9 @@ services:
|
|||
environment:
|
||||
- MYSQL_DATABASE=spiffworkflow_backend_development
|
||||
- MYSQL_ROOT_PASSWORD=my-secret-pw
|
||||
- MYSQL_TCP_PORT=7003
|
||||
- MYSQL_TCP_PORT=8003
|
||||
ports:
|
||||
- "7003"
|
||||
- "8003"
|
||||
healthcheck:
|
||||
test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development
|
||||
interval: 10s
|
||||
|
@ -30,12 +30,12 @@ services:
|
|||
- SPIFFWORKFLOW_BACKEND_ENV=development
|
||||
- FLASK_DEBUG=0
|
||||
- FLASK_SESSION_SECRET_KEY=super_secret_key
|
||||
- OPEN_ID_SERVER_URL=http://localhost:7000/openid
|
||||
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001
|
||||
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000
|
||||
- SPIFFWORKFLOW_BACKEND_PORT=7000
|
||||
- OPEN_ID_SERVER_URL=http://localhost:8000/openid
|
||||
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:8001
|
||||
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:8000
|
||||
- SPIFFWORKFLOW_BACKEND_PORT=8000
|
||||
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development
|
||||
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:8003/spiffworkflow_backend_development
|
||||
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
|
||||
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false
|
||||
- SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml
|
||||
|
@ -43,12 +43,12 @@ services:
|
|||
- OPEN_ID_CLIENT_ID=spiffworkflow-backend
|
||||
- OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key
|
||||
ports:
|
||||
- "7000:7000"
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- ./process_models:/app/process_models
|
||||
- ./log:/app/log
|
||||
healthcheck:
|
||||
test: curl localhost:7000/v1.0/status --fail
|
||||
test: curl localhost:8000/v1.0/status --fail
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 20
|
||||
|
@ -58,9 +58,9 @@ services:
|
|||
image: ghcr.io/sartography/spiffworkflow-frontend
|
||||
environment:
|
||||
- APPLICATION_ROOT=/
|
||||
- PORT0=7001
|
||||
- PORT0=8001
|
||||
ports:
|
||||
- "7001:7001"
|
||||
- "8001:8001"
|
||||
|
||||
spiffworkflow-connector:
|
||||
container_name: spiffworkflow-connector
|
||||
|
@ -69,10 +69,11 @@ services:
|
|||
- FLASK_ENV=${FLASK_ENV:-development}
|
||||
- FLASK_DEBUG=0
|
||||
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
|
||||
- CONNECTOR_PROXY_PORT=8004
|
||||
ports:
|
||||
- "7004:7004"
|
||||
- "8004:8004"
|
||||
healthcheck:
|
||||
test: curl localhost:7004/liveness --fail
|
||||
test: curl localhost:8004/liveness --fail
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 20
|
||||
|
|
|
@ -813,22 +813,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "0.4.3"
|
||||
description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[package.dependencies]
|
||||
pyyaml = ">=5.2"
|
||||
typing-extensions = ">=3.7.4.2"
|
||||
typing-inspect = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
|
||||
|
||||
[[package]]
|
||||
name = "livereload"
|
||||
version = "2.6.3"
|
||||
|
@ -905,18 +889,6 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "monkeytype"
|
||||
version = "22.2.0"
|
||||
description = "Generating type annotations from sampled production types"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
libcst = ">=0.3.7"
|
||||
mypy-extensions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.991"
|
||||
|
@ -1504,7 +1476,7 @@ test = ["pytest"]
|
|||
[[package]]
|
||||
name = "SpiffWorkflow"
|
||||
version = "1.2.1"
|
||||
description = ""
|
||||
description = "A workflow framework and BPMN/DMN Processor"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
@ -1520,7 +1492,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46"
|
||||
resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -1627,18 +1599,6 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspect"
|
||||
version = "0.7.1"
|
||||
description = "Runtime inspection utilities for typing module."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.3.0"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[[package]]
|
||||
name = "unidecode"
|
||||
version = "1.3.4"
|
||||
|
@ -1770,7 +1730,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d"
|
||||
content-hash = "45cac5741fa47e44710f5aae6dfdb4636fc4d60df2d6aba467052fdd5199e791"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
@ -2234,32 +2194,6 @@ lazy-object-proxy = [
|
|||
{file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"},
|
||||
{file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"},
|
||||
]
|
||||
libcst = [
|
||||
{file = "libcst-0.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bea98a8be2b1725784ae01e89519121eba7d81280dcbee40ae03ececd7277cf3"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d9191c764645dddf94d49885e590433fa0ee6d347b07eec86566786e6d2ada5"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f22e9787e44304e7cd9744e543602ab2c1bca8b922cb6237ea08d9a0be3fdd"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff147dd77b6ea72e4f2f0abfcd1be11a3108c28cb65e6da666c0b77142033f7c"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d744d4a6301c75322f1d88365dccfe402a51e724583a2edc4cba474462cc9419"},
|
||||
{file = "libcst-0.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:ed0f15545eddfdd6270069ce0b2d4c253298817bd676a1a6adddaa1d66c7e28b"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6f57056a743853c01bbd21bfd96c2a1b4c317bbc66920f5f2c9999b3dca7233"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3d33da8f9b088e118bfc6ecacdd627ac237baeb490f4d7a383af4df4ea4f82"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df5f51a837fc10cdbf5c61acb467f6c15d5f9ca1d94a84a6a29c4f20ce7b437e"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f744f60057c8998b856d9baf28765c65574992f4a49830ca350010fc31f4eac4"},
|
||||
{file = "libcst-0.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:88ab371aab82f7241448e263ec42abced649a77cdd21df960268e6df70b3f3f7"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:826ea5f10a84625db861ccf35946317f4f29e575261e44c0cd6c24c4dde5c2bb"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab5b23796ce66303398bb7b2d27bcb17d2416dacd3d00229c961aed87d79a3b"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afc793c95af79e5adc5905713ccddff034d0de3e3da748424b722edf890227de"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c982387b8e23ad18efbd0287004924931a0b05c91ed5630453faf224bb0b185"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4c25aca45df5f86a6a1c8c219e8c7a90acdaef02b53eb01eafa563381cb0ce"},
|
||||
{file = "libcst-0.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1a395129ecf6c6ce429427f34100ccd99f35898a98187764a4559d9f92166cd0"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ca00819affafccb02b2582ec47706712b995c9887cad02bb8efe94a066830f37"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:231a9ca446570f9b63d8c2c6dbf6c796fb939a5e4ef9dc0dd9304a21a6c0da16"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b08e7a56950479c856183ad6fdf0a21df028d6732e1d19822ec1593e32f700ca"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cb70e7e5118234e75d309fcf04931e20f282f16c80dda464fc1b88ef02e52e4"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8c00b24ab39facff463b18b9abc8df7dd063ae0ce9fe2e78e199c9a8572e37"},
|
||||
{file = "libcst-0.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:28f35b9a21b2f8982a8ed3f53b1fdbc5435252409d34d061a3229dc4b413b8c7"},
|
||||
{file = "libcst-0.4.3.tar.gz", hash = "sha256:f79ab61287505d97ed57ead14b78777f48cd6ec5339ca4978987e4c35957a465"},
|
||||
]
|
||||
livereload = [
|
||||
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
|
||||
]
|
||||
|
@ -2389,10 +2323,6 @@ mccabe = [
|
|||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
]
|
||||
monkeytype = [
|
||||
{file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"},
|
||||
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
|
||||
|
@ -2808,11 +2738,6 @@ typing-extensions = [
|
|||
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
|
||||
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
|
||||
]
|
||||
typing-inspect = [
|
||||
{file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"},
|
||||
{file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"},
|
||||
{file = "typing_inspect-0.7.1.tar.gz", hash = "sha256:047d4097d9b17f46531bf6f014356111a1b6fb821a24fe7ac909853ca2a782aa"},
|
||||
]
|
||||
unidecode = [
|
||||
{file = "Unidecode-1.3.4-py3-none-any.whl", hash = "sha256:afa04efcdd818a93237574791be9b2817d7077c25a068b00f8cff7baa4e59257"},
|
||||
{file = "Unidecode-1.3.4.tar.gz", hash = "sha256:8e4352fb93d5a735c788110d2e7ac8e8031eb06ccbfe8d324ab71735015f9342"},
|
||||
|
|
|
@ -170,15 +170,21 @@ def set_user_sentry_context() -> None:
|
|||
def handle_exception(exception: Exception) -> flask.wrappers.Response:
|
||||
"""Handles unexpected exceptions."""
|
||||
set_user_sentry_context()
|
||||
id = capture_exception(exception)
|
||||
|
||||
organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
|
||||
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
|
||||
sentry_link = None
|
||||
if organization_slug and project_slug:
|
||||
sentry_link = (
|
||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
|
||||
id = capture_exception(exception)
|
||||
|
||||
if isinstance(exception, ApiError):
|
||||
current_app.logger.info(
|
||||
f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}")
|
||||
|
||||
organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG")
|
||||
project_slug = current_app.config.get("SENTRY_PROJECT_SLUG")
|
||||
if organization_slug and project_slug:
|
||||
sentry_link = (
|
||||
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
|
||||
)
|
||||
|
||||
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
|
||||
# seems to break the sentry sdk context where we no longer get back
|
||||
|
|
|
@ -163,7 +163,7 @@ python-versions = "*"
|
|||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "22.10.0"
|
||||
version = "23.1a1"
|
||||
description = "The uncompromising code formatter."
|
||||
category = "dev"
|
||||
optional = false
|
||||
|
@ -614,7 +614,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
|
||||
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
|
||||
|
||||
[[package]]
|
||||
name = "flask-cors"
|
||||
|
@ -1760,7 +1760,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4"
|
||||
resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
|
@ -2182,27 +2182,18 @@ billiard = [
|
|||
{file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"},
|
||||
]
|
||||
black = [
|
||||
{file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
|
||||
{file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
|
||||
{file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
|
||||
{file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
|
||||
{file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
|
||||
{file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
|
||||
{file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
|
||||
{file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
|
||||
{file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
|
||||
{file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
|
||||
{file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
|
||||
{file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
|
||||
{file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
|
||||
{file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
|
||||
{file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
|
||||
{file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
|
||||
{file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
|
||||
{file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
|
||||
{file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
|
||||
{file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
|
||||
{file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
|
||||
{file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"},
|
||||
{file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"},
|
||||
{file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"},
|
||||
{file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"},
|
||||
{file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"},
|
||||
{file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"},
|
||||
{file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"},
|
||||
{file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"},
|
||||
{file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"},
|
||||
{file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"},
|
||||
{file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"},
|
||||
{file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"},
|
||||
]
|
||||
blinker = [
|
||||
{file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"},
|
||||
|
@ -2857,7 +2848,18 @@ psycopg2 = [
|
|||
{file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"},
|
||||
]
|
||||
pyasn1 = [
|
||||
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
|
||||
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
|
||||
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
|
||||
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
|
||||
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
|
||||
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
|
||||
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
|
||||
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
|
||||
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
|
||||
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
|
||||
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
|
||||
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
|
||||
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
|
||||
]
|
||||
pycodestyle = [
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
.mypy_cache/
|
||||
/.idea/
|
||||
/.coverage
|
||||
/.coverage.*
|
||||
.coverage.*
|
||||
/.nox/
|
||||
/.python-version
|
||||
/.pytype/
|
||||
|
|
|
@ -9,7 +9,7 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
|
||||
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
|
||||
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
|
||||
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models"
|
||||
fi
|
||||
|
||||
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then
|
||||
|
|
|
@ -7,7 +7,8 @@ def main() -> None:
|
|||
"""Main."""
|
||||
app = get_hacked_up_app_for_script()
|
||||
with app.app_context():
|
||||
AuthorizationService.delete_all_permissions_and_recreate()
|
||||
AuthorizationService.delete_all_permissions()
|
||||
AuthorizationService.import_permissions_from_yaml_file()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Get the bpmn process json for a given process instance id and store it in /tmp."""
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
@ -18,15 +17,17 @@ def main(process_instance_id: str):
|
|||
id=process_instance_id
|
||||
).first()
|
||||
|
||||
file_path = f"/tmp/{process_instance_id}_bpmn_json.json"
|
||||
if not process_instance:
|
||||
raise Exception(
|
||||
f"Could not find a process instance with id: {process_instance_id}"
|
||||
)
|
||||
|
||||
with open(
|
||||
f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8"
|
||||
file_path, "w", encoding="utf-8"
|
||||
) as f:
|
||||
f.write(process_instance.bpmn_json)
|
||||
print(f"Saved to {file_path}")
|
||||
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
|
|
|
@ -7,4 +7,5 @@ function error_handler() {
|
|||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
docker compose logs "$@"
|
||||
# "docker compose logs" is only getting the db logs so specify them both
|
||||
docker compose logs db spiffworkflow-backend
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
function error_handler() {
|
||||
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
|
||||
exit "$2"
|
||||
}
|
||||
trap 'error_handler ${LINENO} $?' ERR
|
||||
set -o errtrace -o errexit -o nounset -o pipefail
|
||||
|
||||
set -x
|
||||
mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;'
|
|
@ -11,26 +11,42 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
|
||||
bpmn_models_absolute_dir="$1"
|
||||
git_commit_message="$2"
|
||||
git_commit_username="$3"
|
||||
git_commit_email="$4"
|
||||
git_branch="$3"
|
||||
git_commit_username="$4"
|
||||
git_commit_email="$5"
|
||||
git_commit_password="$6"
|
||||
|
||||
if [[ -z "${2:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]"
|
||||
if [[ -z "${6:-}" ]]; then
|
||||
>&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$bpmn_models_absolute_dir"
|
||||
git add .
|
||||
function failed_to_get_lock() {
|
||||
>&2 echo "ERROR: Failed to get lock."
|
||||
exit 1
|
||||
}
|
||||
|
||||
function run() {
|
||||
cd "$bpmn_models_absolute_dir"
|
||||
git add .
|
||||
|
||||
# https://unix.stackexchange.com/a/155077/456630
|
||||
if [ -z "$(git status --porcelain)" ]; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
PAT="${git_commit_username}:${git_commit_password}"
|
||||
AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n')
|
||||
|
||||
# https://unix.stackexchange.com/a/155077/456630
|
||||
if [ -z "$(git status --porcelain)" ]; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
if [[ -n "$git_commit_username" ]]; then
|
||||
git config --local user.name "$git_commit_username"
|
||||
fi
|
||||
if [[ -n "$git_commit_email" ]]; then
|
||||
git config --local user.email "$git_commit_email"
|
||||
git config --local http.extraHeader "Authorization: Basic $AUTH"
|
||||
git commit -m "$git_commit_message"
|
||||
git push --set-upstream origin "$git_branch"
|
||||
git config --unset --local http.extraHeader
|
||||
fi
|
||||
git commit -m "$git_commit_message"
|
||||
fi
|
||||
}
|
||||
|
||||
exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock
|
||||
flock --timeout 60 "$lock_fd" || failed_to_get_lock
|
||||
run
|
||||
flock -u "$lock_fd"
|
||||
|
|
|
@ -27,7 +27,6 @@ def main():
|
|||
"""Main."""
|
||||
app = get_hacked_up_app_for_script()
|
||||
with app.app_context():
|
||||
|
||||
process_model_identifier_ticket = "ticket"
|
||||
db.session.query(ProcessInstanceModel).filter(
|
||||
ProcessInstanceModel.process_model_identifier
|
||||
|
|
|
@ -40,7 +40,8 @@ def hello_world():
|
|||
return (
|
||||
'Hello, %s, <a href="/private">See private</a> '
|
||||
'<a href="/logout">Log out</a>'
|
||||
) % oidc.user_getfield("preferred_username")
|
||||
% oidc.user_getfield("preferred_username")
|
||||
)
|
||||
else:
|
||||
return 'Welcome anonymous, <a href="/private">Log in</a>'
|
||||
|
||||
|
|
|
@ -61,3 +61,7 @@ for task in $tasks; do
|
|||
done
|
||||
|
||||
SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then
|
||||
mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV"
|
||||
FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade
|
||||
fi
|
||||
|
|
|
@ -426,6 +426,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "admin@spiffworkflow.org",
|
||||
"credentials" : [ {
|
||||
"id" : "ef435043-ef0c-407a-af5b-ced13182a408",
|
||||
"type" : "password",
|
||||
|
@ -446,6 +447,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "alex@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "81a61a3b-228d-42b3-b39a-f62d8e7f57ca",
|
||||
"type" : "password",
|
||||
|
@ -465,6 +467,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "amir@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "e589f3ad-bf7b-4756-89f7-7894c03c2831",
|
||||
"type" : "password",
|
||||
|
@ -484,6 +487,9 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "ciadmin1@spiffworkflow.org",
|
||||
"credentials" : [ {
|
||||
"id" : "111b5ea1-c2ab-470a-a16b-2373bc94de7a",
|
||||
"type" : "password",
|
||||
|
@ -499,28 +505,6 @@
|
|||
},
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "56457e8f-47c6-4f9f-a72b-473dea5edfeb",
|
||||
"createdTimestamp" : 1657139955336,
|
||||
"username" : "ciuser1",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"credentials" : [ {
|
||||
"id" : "762f36e9-47af-44da-8520-cf09d752497a",
|
||||
"type" : "password",
|
||||
"createdDate" : 1657139966468,
|
||||
"secretData" : "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}",
|
||||
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
|
||||
} ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
"requiredActions" : [ ],
|
||||
"realmRoles" : [ "default-roles-spiffworkflow" ],
|
||||
"clientRoles" : {
|
||||
"spiffworkflow-backend" : [ "uma_protection" ]
|
||||
},
|
||||
"notBefore" : 0,
|
||||
"groups" : [ ]
|
||||
}, {
|
||||
"id" : "d58b61cc-a77e-488f-a427-05f4e0572e20",
|
||||
"createdTimestamp" : 1669132945413,
|
||||
|
@ -530,6 +514,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "core@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "ee80092b-8ee6-4699-8492-566e088b48f5",
|
||||
"type" : "password",
|
||||
|
@ -550,6 +535,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "dan@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "d517c520-f500-4542-80e5-7144daef1e32",
|
||||
"type" : "password",
|
||||
|
@ -569,6 +555,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "daniel@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "f240495c-265b-42fc-99db-46928580d07d",
|
||||
"type" : "password",
|
||||
|
@ -588,6 +575,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "elizabeth@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2",
|
||||
"type" : "password",
|
||||
|
@ -609,6 +597,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "fin@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "2379940c-98b4-481a-b629-0bd1a4e91acf",
|
||||
"type" : "password",
|
||||
|
@ -631,6 +620,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "fin1@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "96216746-ff72-454e-8288-232428d10b42",
|
||||
"type" : "password",
|
||||
|
@ -651,6 +641,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "finance_user1@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "f14722ec-13a7-4d35-a4ec-0475d405ae58",
|
||||
"type" : "password",
|
||||
|
@ -670,6 +661,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "harmeet@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "89c26090-9bd3-46ac-b038-883d02e3f125",
|
||||
"type" : "password",
|
||||
|
@ -691,6 +683,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "j@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
|
||||
"type" : "password",
|
||||
|
@ -711,6 +704,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jakub@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2",
|
||||
"type" : "password",
|
||||
|
@ -730,6 +724,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jarrad@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "113e0343-1069-476d-83f9-21d98edb9cfa",
|
||||
"type" : "password",
|
||||
|
@ -749,6 +744,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jason@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "40abf32e-f0cc-4a17-8231-1a69a02c1b0b",
|
||||
"type" : "password",
|
||||
|
@ -768,6 +764,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "jon@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "8b520e01-5b9b-44ab-9ee8-505bd0831a45",
|
||||
"type" : "password",
|
||||
|
@ -787,6 +784,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "kb@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "2c0be363-038f-48f1-86d6-91fdd28657cf",
|
||||
"type" : "password",
|
||||
|
@ -808,6 +806,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "lead@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "96e836a4-1a84-45c5-a9ed-651b0c90195e",
|
||||
"type" : "password",
|
||||
|
@ -830,6 +829,7 @@
|
|||
"emailVerified" : false,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "lead1@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "4e17388b-6c44-44e1-b20a-a873c0feb9a8",
|
||||
"type" : "password",
|
||||
|
@ -850,6 +850,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "manuchehr@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "07dabf55-b5d3-4f98-abba-3334086ecf5e",
|
||||
"type" : "password",
|
||||
|
@ -869,6 +870,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "mike@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "1ed375fb-0f1a-4c2a-9243-2477242cf7bd",
|
||||
"type" : "password",
|
||||
|
@ -887,7 +889,10 @@
|
|||
"username" : "natalia",
|
||||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"emailVerified" : true,
|
||||
"firstName" : "",
|
||||
"lastName" : "",
|
||||
"email" : "natalia@sartography.com",
|
||||
"credentials" : [ {
|
||||
"id" : "b6aa9936-39cc-4931-bfeb-60e6753de5ba",
|
||||
"type" : "password",
|
||||
|
@ -907,6 +912,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "sasha@status.im",
|
||||
"credentials" : [ {
|
||||
"id" : "4a170af4-6f0c-4e7b-b70c-e674edf619df",
|
||||
"type" : "password",
|
||||
|
@ -926,6 +932,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "service-account@status.im",
|
||||
"serviceAccountClientId" : "spiffworkflow-backend",
|
||||
"credentials" : [ ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
|
@ -943,6 +950,7 @@
|
|||
"enabled" : true,
|
||||
"totp" : false,
|
||||
"emailVerified" : false,
|
||||
"email" : "service-account-withauth@status.im",
|
||||
"serviceAccountClientId" : "withAuth",
|
||||
"credentials" : [ ],
|
||||
"disableableCredentialTypes" : [ ],
|
||||
|
@ -2166,7 +2174,7 @@
|
|||
"subType" : "authenticated",
|
||||
"subComponents" : { },
|
||||
"config" : {
|
||||
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ]
|
||||
"allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ]
|
||||
}
|
||||
}, {
|
||||
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
|
||||
|
@ -2184,7 +2192,7 @@
|
|||
"subType" : "anonymous",
|
||||
"subComponents" : { },
|
||||
"config" : {
|
||||
"allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
|
||||
"allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
|
||||
}
|
||||
}, {
|
||||
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
|
||||
|
@ -2274,7 +2282,7 @@
|
|||
"internationalizationEnabled" : false,
|
||||
"supportedLocales" : [ ],
|
||||
"authenticationFlows" : [ {
|
||||
"id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3",
|
||||
"id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2",
|
||||
"alias" : "Account verification options",
|
||||
"description" : "Method with which to verity the existing account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2296,7 +2304,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "4da99e29-371e-4f4b-a863-e5079f30a714",
|
||||
"id" : "ddf80243-ec40-4c21-ae94-2967d841f84c",
|
||||
"alias" : "Authentication Options",
|
||||
"description" : "Authentication options.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2325,7 +2333,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "d398c928-e201-4e8b-ab09-289bb351cd2e",
|
||||
"id" : "4f075680-46b7-49eb-b94c-d7425f105cb9",
|
||||
"alias" : "Browser - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2347,7 +2355,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d",
|
||||
"id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed",
|
||||
"alias" : "Direct Grant - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2369,7 +2377,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "98013bc1-e4dd-41f7-9849-1f898143b944",
|
||||
"id" : "07536fec-8d41-4c73-845f-ca85002022e0",
|
||||
"alias" : "First broker login - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP is required for the authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2391,7 +2399,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2",
|
||||
"id" : "f123f912-71fb-4596-97f9-c0628a59413d",
|
||||
"alias" : "Handle Existing Account",
|
||||
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2413,7 +2421,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "2470e6f4-9a01-476a-9057-75d78e577182",
|
||||
"id" : "03c26cc5-366b-462d-9297-b4016f8d7c57",
|
||||
"alias" : "Reset - Conditional OTP",
|
||||
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2435,7 +2443,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9",
|
||||
"id" : "1b4f474e-aa64-45cc-90f1-63504585d89c",
|
||||
"alias" : "User creation or linking",
|
||||
"description" : "Flow for the existing/non-existing user alternatives",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2458,7 +2466,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "97c83e43-cba8-4d92-b108-9181bca07a1e",
|
||||
"id" : "38024dd6-daff-45de-8782-06b07b7bfa56",
|
||||
"alias" : "Verify Existing Account by Re-authentication",
|
||||
"description" : "Reauthentication of existing account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2480,7 +2488,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "fbabd64c-20de-4b8c-bfd2-be6822572278",
|
||||
"id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7",
|
||||
"alias" : "browser",
|
||||
"description" : "browser based authentication",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2516,7 +2524,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "0628a99f-b194-495d-8e54-cc4ca8684956",
|
||||
"id" : "92e3571d-ac3e-4e79-a391-5315954e866f",
|
||||
"alias" : "clients",
|
||||
"description" : "Base authentication for clients",
|
||||
"providerId" : "client-flow",
|
||||
|
@ -2552,7 +2560,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a",
|
||||
"id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f",
|
||||
"alias" : "direct grant",
|
||||
"description" : "OpenID Connect Resource Owner Grant",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2581,7 +2589,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa",
|
||||
"id" : "95d2f1ff-6907-47ce-a93c-db462fe04844",
|
||||
"alias" : "docker auth",
|
||||
"description" : "Used by Docker clients to authenticate against the IDP",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2596,7 +2604,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9",
|
||||
"id" : "27405ee8-5730-419c-944c-a7c67edd91ce",
|
||||
"alias" : "first broker login",
|
||||
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2619,7 +2627,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9",
|
||||
"id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8",
|
||||
"alias" : "forms",
|
||||
"description" : "Username, password, otp and other auth forms.",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2641,7 +2649,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a",
|
||||
"id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9",
|
||||
"alias" : "http challenge",
|
||||
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2663,7 +2671,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03",
|
||||
"id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485",
|
||||
"alias" : "registration",
|
||||
"description" : "registration flow",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2679,7 +2687,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3",
|
||||
"id" : "99593c1e-f2a5-4198-ad41-634694259110",
|
||||
"alias" : "registration form",
|
||||
"description" : "registration form",
|
||||
"providerId" : "form-flow",
|
||||
|
@ -2715,7 +2723,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828",
|
||||
"id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4",
|
||||
"alias" : "reset credentials",
|
||||
"description" : "Reset credentials for a user if they forgot their password or something",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2751,7 +2759,7 @@
|
|||
"userSetupAllowed" : false
|
||||
} ]
|
||||
}, {
|
||||
"id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c",
|
||||
"id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8",
|
||||
"alias" : "saml ecp",
|
||||
"description" : "SAML ECP Profile Authentication Flow",
|
||||
"providerId" : "basic-flow",
|
||||
|
@ -2767,13 +2775,13 @@
|
|||
} ]
|
||||
} ],
|
||||
"authenticatorConfig" : [ {
|
||||
"id" : "817a93da-29df-447f-ab05-cd9557e66745",
|
||||
"id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa",
|
||||
"alias" : "create unique user config",
|
||||
"config" : {
|
||||
"require.password.update.after.registration" : "false"
|
||||
}
|
||||
}, {
|
||||
"id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878",
|
||||
"id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2",
|
||||
"alias" : "review profile config",
|
||||
"config" : {
|
||||
"update.profile.on.first.login" : "missing"
|
||||
|
|
|
@ -18,7 +18,19 @@ set -o errtrace -o errexit -o nounset -o pipefail
|
|||
if ! docker network inspect spiffworkflow > /dev/null 2>&1; then
|
||||
docker network create spiffworkflow
|
||||
fi
|
||||
docker rm keycloak 2>/dev/null || echo 'no keycloak container found, safe to start new container'
|
||||
|
||||
# https://stackoverflow.com/a/60579344/6090676
|
||||
container_name="keycloak"
|
||||
if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then
|
||||
echo ":: Found container - $container_name"
|
||||
if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then
|
||||
echo ":: Stopping running container - $container_name"
|
||||
docker stop $container_name
|
||||
fi
|
||||
echo ":: Removing stopped container - $container_name"
|
||||
docker rm $container_name
|
||||
fi
|
||||
|
||||
docker run \
|
||||
-p 7002:8080 \
|
||||
-d \
|
||||
|
|
|
@ -9,7 +9,7 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
|
||||
|
||||
from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
|
@ -47,7 +47,7 @@ def app() -> Flask:
|
|||
@pytest.fixture()
|
||||
def with_db_and_bpmn_file_cleanup() -> None:
|
||||
"""Process_group_resource."""
|
||||
db.session.query(ActiveTaskUserModel).delete()
|
||||
db.session.query(HumanTaskUserModel).delete()
|
||||
|
||||
for model in SpiffworkflowBaseDBModel._all_subclasses():
|
||||
db.session.query(model).delete()
|
||||
|
|
|
@ -68,7 +68,7 @@ services:
|
|||
- "7000:7000"
|
||||
network_mode: host
|
||||
volumes:
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ./log:/app/log
|
||||
healthcheck:
|
||||
test: curl localhost:7000/v1.0/status --fail
|
||||
|
@ -82,7 +82,7 @@ services:
|
|||
profiles:
|
||||
- debug
|
||||
volumes:
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
|
||||
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
|
||||
- ./:/app
|
||||
command: /app/bin/boot_in_docker_debug_mode
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""empty message
|
||||
|
||||
Revision ID: 4d75421c0af0
|
||||
Revision ID: 907bcf0c3d75
|
||||
Revises:
|
||||
Create Date: 2022-12-06 17:42:56.417673
|
||||
Create Date: 2022-12-28 13:52:13.030028
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
@ -10,7 +10,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '4d75421c0af0'
|
||||
revision = '907bcf0c3d75'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
@ -72,14 +72,15 @@ def upgrade():
|
|||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('uid', sa.String(length=50), nullable=True),
|
||||
sa.Column('service', sa.String(length=50), nullable=False),
|
||||
sa.Column('service', sa.String(length=255), nullable=False),
|
||||
sa.Column('service_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=True),
|
||||
sa.Column('display_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at_in_seconds', sa.Integer(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('service', 'service_id', name='service_key'),
|
||||
sa.UniqueConstraint('uid')
|
||||
sa.UniqueConstraint('username')
|
||||
)
|
||||
op.create_table('message_correlation_property',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
|
@ -174,11 +175,20 @@ def upgrade():
|
|||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique')
|
||||
)
|
||||
op.create_table('active_task',
|
||||
op.create_table('user_group_assignment_waiting',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('group_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['group_id'], ['group.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique')
|
||||
)
|
||||
op.create_table('human_task',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
|
||||
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('actual_owner_id', sa.Integer(), nullable=True),
|
||||
sa.Column('form_file_name', sa.String(length=50), nullable=True),
|
||||
sa.Column('ui_form_file_name', sa.String(length=50), nullable=True),
|
||||
sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True),
|
||||
|
@ -189,12 +199,15 @@ def upgrade():
|
|||
sa.Column('task_type', sa.String(length=50), nullable=True),
|
||||
sa.Column('task_status', sa.String(length=50), nullable=True),
|
||||
sa.Column('process_model_display_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('completed', sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique')
|
||||
sa.UniqueConstraint('task_id', 'process_instance_id', name='human_task_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False)
|
||||
op.create_table('message_correlation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('process_instance_id', sa.Integer(), nullable=False),
|
||||
|
@ -255,23 +268,20 @@ def upgrade():
|
|||
sa.Column('spiff_step', sa.Integer(), nullable=False),
|
||||
sa.Column('task_json', sa.JSON(), nullable=False),
|
||||
sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False),
|
||||
sa.Column('completed_by_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lane_assignment_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ),
|
||||
sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('active_task_user',
|
||||
op.create_table('human_task_user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('active_task_id', sa.Integer(), nullable=False),
|
||||
sa.Column('human_task_id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['active_task_id'], ['active_task.id'], ),
|
||||
sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('active_task_id', 'user_id', name='active_task_user_unique')
|
||||
sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique')
|
||||
)
|
||||
op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False)
|
||||
op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False)
|
||||
op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False)
|
||||
op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False)
|
||||
op.create_table('message_correlation_message_instance',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_instance_id', sa.Integer(), nullable=False),
|
||||
|
@ -291,9 +301,9 @@ def downgrade():
|
|||
op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance')
|
||||
op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance')
|
||||
op.drop_table('message_correlation_message_instance')
|
||||
op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user')
|
||||
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
|
||||
op.drop_table('active_task_user')
|
||||
op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user')
|
||||
op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user')
|
||||
op.drop_table('human_task_user')
|
||||
op.drop_table('spiff_step_details')
|
||||
op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata')
|
||||
op.drop_table('process_instance_metadata')
|
||||
|
@ -304,7 +314,9 @@ def downgrade():
|
|||
op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation')
|
||||
op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation')
|
||||
op.drop_table('message_correlation')
|
||||
op.drop_table('active_task')
|
||||
op.drop_index(op.f('ix_human_task_completed'), table_name='human_task')
|
||||
op.drop_table('human_task')
|
||||
op.drop_table('user_group_assignment_waiting')
|
||||
op.drop_table('user_group_assignment')
|
||||
op.drop_table('secret')
|
||||
op.drop_table('refresh_token')
|
|
@ -654,7 +654,7 @@ werkzeug = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/flask-bpmn"
|
||||
reference = "main"
|
||||
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
|
||||
resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b"
|
||||
|
||||
[[package]]
|
||||
name = "Flask-Cors"
|
||||
|
@ -1851,7 +1851,7 @@ lxml = "*"
|
|||
type = "git"
|
||||
url = "https://github.com/sartography/SpiffWorkflow"
|
||||
reference = "main"
|
||||
resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134"
|
||||
resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c"
|
||||
|
||||
[[package]]
|
||||
name = "SQLAlchemy"
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -18,11 +18,11 @@ from werkzeug.exceptions import NotFound
|
|||
|
||||
import spiffworkflow_backend.load_database_models # noqa: F401
|
||||
from spiffworkflow_backend.config import setup_config
|
||||
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
|
||||
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
|
||||
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
|
||||
openid_blueprint,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
|
||||
from spiffworkflow_backend.routes.user import verify_token
|
||||
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
|
@ -93,7 +93,8 @@ def create_app() -> flask.app.Flask:
|
|||
|
||||
if os.environ.get("FLASK_SESSION_SECRET_KEY") is None:
|
||||
raise KeyError(
|
||||
"Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY"
|
||||
"Cannot find the secret_key from the environment. Please set"
|
||||
" FLASK_SESSION_SECRET_KEY"
|
||||
)
|
||||
|
||||
app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY")
|
||||
|
@ -103,7 +104,6 @@ def create_app() -> flask.app.Flask:
|
|||
migrate.init_app(app, db)
|
||||
|
||||
app.register_blueprint(user_blueprint)
|
||||
app.register_blueprint(process_api_blueprint)
|
||||
app.register_blueprint(api_error_blueprint)
|
||||
app.register_blueprint(admin_blueprint, url_prefix="/admin")
|
||||
app.register_blueprint(openid_blueprint, url_prefix="/openid")
|
||||
|
@ -117,7 +117,7 @@ def create_app() -> flask.app.Flask:
|
|||
]
|
||||
CORS(app, origins=origins_re, max_age=3600)
|
||||
|
||||
connexion_app.add_api("api.yml", base_path="/v1.0")
|
||||
connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX)
|
||||
|
||||
mail = Mail(app)
|
||||
app.config["MAIL_APP"] = mail
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None:
|
|||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
||||
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||
)
|
||||
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||
)
|
||||
else:
|
||||
# use pswd to trick flake8 with hardcoded passwords
|
||||
db_pswd = os.environ.get("DB_PASSWORD")
|
||||
if db_pswd is None:
|
||||
db_pswd = ""
|
||||
app.config[
|
||||
"SQLALCHEMY_DATABASE_URI"
|
||||
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
||||
f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
|
||||
)
|
||||
else:
|
||||
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
|
||||
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
|
||||
|
@ -42,6 +42,7 @@ def load_config_file(app: Flask, env_config_module: str) -> None:
|
|||
"""Load_config_file."""
|
||||
try:
|
||||
app.config.from_object(env_config_module)
|
||||
print(f"loaded config: {env_config_module}")
|
||||
except ImportStringError as exception:
|
||||
if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
|
||||
raise ModuleNotFoundError(
|
||||
|
@ -62,6 +63,7 @@ def setup_config(app: Flask) -> None:
|
|||
)
|
||||
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
|
||||
app.config.from_object("spiffworkflow_backend.config.default")
|
||||
print("loaded config: default")
|
||||
|
||||
env_config_prefix = "spiffworkflow_backend.config."
|
||||
if (
|
||||
|
@ -69,6 +71,7 @@ def setup_config(app: Flask) -> None:
|
|||
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
|
||||
):
|
||||
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
|
||||
print("loaded config: terraform_deployed_environment")
|
||||
|
||||
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
|
||||
load_config_file(app, env_config_module)
|
||||
|
@ -87,6 +90,14 @@ def setup_config(app: Flask) -> None:
|
|||
"permissions",
|
||||
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
|
||||
)
|
||||
print(
|
||||
"set permissions file name config:"
|
||||
f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}"
|
||||
)
|
||||
print(
|
||||
"set permissions file name full path:"
|
||||
f" {app.config['PERMISSIONS_FILE_FULLPATH']}"
|
||||
)
|
||||
|
||||
# unversioned (see .gitignore) config that can override everything and include secrets.
|
||||
# src/spiffworkflow_backend/config/secrets.py
|
||||
|
|
|
@ -27,8 +27,6 @@ CONNECTOR_PROXY_URL = environ.get(
|
|||
"CONNECTOR_PROXY_URL", default="http://localhost:7004"
|
||||
)
|
||||
|
||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
|
||||
# Open ID server
|
||||
OPEN_ID_SERVER_URL = environ.get(
|
||||
"OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow"
|
||||
|
@ -63,7 +61,10 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
|||
|
||||
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
|
||||
# I.e., dev server could have `staging` here. Staging server might have `production` here.
|
||||
GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO")
|
||||
GIT_BRANCH = environ.get("GIT_BRANCH")
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
|
||||
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
|
||||
|
||||
# Datbase Configuration
|
||||
SPIFF_DATABASE_TYPE = environ.get(
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
"""Dev."""
|
||||
from os import environ
|
||||
|
||||
GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
|
||||
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
|
||||
GIT_USER_EMAIL = environ.get(
|
||||
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml"
|
||||
|
|
|
@ -12,3 +12,8 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
|||
RUN_BACKGROUND_SCHEDULER = (
|
||||
environ.get("RUN_BACKGROUND_SCHEDULER", default="true") == "true"
|
||||
)
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
|
||||
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
|
||||
)
|
||||
GIT_USERNAME = "sartography-automated-committer"
|
||||
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
groups:
|
||||
admin:
|
||||
users: [ciadmin1]
|
||||
|
||||
common-user:
|
||||
users: [ciuser1]
|
||||
users: [ciadmin1@spiffworkflow.org]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin, common-user]
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete, list, instantiate]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
default_group: everybody
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users:
|
||||
[
|
||||
admin@spiffworkflow.org,
|
||||
jakub@status.im,
|
||||
jarrad@status.im,
|
||||
kb@sartography.com,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub@status.im,
|
||||
amir@status.im,
|
||||
jarrad@status.im,
|
||||
sasha@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
harmeet@status.im,
|
||||
sasha@status.im,
|
||||
manuchehr@status.im,
|
||||
core@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
lead@status.im,
|
||||
lead1@status.im,
|
||||
]
|
||||
|
||||
test:
|
||||
users:
|
||||
[
|
||||
natalia@sartography.com,
|
||||
]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
# open system defaults for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-models/*
|
||||
|
||||
# basic perms for everybody
|
||||
read-all-process-instances-for-me:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /processes
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /service-tasks
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
user-groups-for-current-user:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /user-groups/for-current-user
|
||||
|
||||
|
||||
finance-admin:
|
||||
groups: ["Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-groups/manage-procurement:procurement:*
|
||||
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
manage-revenue-streams-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
create-test-instances:
|
||||
groups: ["test"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /process-instances/misc:test:*
|
|
@ -10,57 +10,60 @@ groups:
|
|||
admin:
|
||||
users:
|
||||
[
|
||||
admin,
|
||||
jakub,
|
||||
kb,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
admin@spiffworkflow.org,
|
||||
jakub@status.im,
|
||||
jarrad@status.im,
|
||||
kb@sartography.com,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
amir,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
sasha,
|
||||
fin,
|
||||
fin1,
|
||||
jakub@status.im,
|
||||
amir@status.im,
|
||||
jarrad@status.im,
|
||||
sasha@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
fin,
|
||||
fin1,
|
||||
harmeet,
|
||||
sasha,
|
||||
manuchehr,
|
||||
lead,
|
||||
lead1
|
||||
harmeet@status.im,
|
||||
sasha@status.im,
|
||||
manuchehr@status.im,
|
||||
core@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
lead@status.im,
|
||||
lead1@status.im,
|
||||
]
|
||||
|
||||
core-contributor:
|
||||
test:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
harmeet,
|
||||
natalia@sartography.com,
|
||||
]
|
||||
|
||||
admin-ro:
|
||||
users:
|
||||
[
|
||||
j@sartography.com,
|
||||
]
|
||||
|
||||
permissions:
|
||||
|
@ -69,135 +72,102 @@ permissions:
|
|||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
admin-readonly:
|
||||
groups: [admin-ro]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/service-tasks
|
||||
uri: /*
|
||||
admin-process-instances-for-readonly:
|
||||
groups: [admin-ro]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/*
|
||||
|
||||
|
||||
# read all for everybody
|
||||
# open system defaults for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-groups/*
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-models/*
|
||||
read-all-process-instance:
|
||||
uri: /process-models/*
|
||||
|
||||
# basic perms for everybody
|
||||
read-all-process-instances-for-me:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/*
|
||||
uri: /process-instances/for-me/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/reports/*
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/processes
|
||||
|
||||
task-data-read:
|
||||
groups: [demo]
|
||||
uri: /processes
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/task-data/*
|
||||
uri: /service-tasks
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
user-groups-for-current-user:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /user-groups/for-current-user
|
||||
|
||||
|
||||
manage-procurement-admin:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:*
|
||||
manage-procurement-admin-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement/*
|
||||
manage-procurement-admin-models:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement:*
|
||||
manage-procurement-admin-models-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement/*
|
||||
manage-procurement-admin-instances:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement:*
|
||||
manage-procurement-admin-instances-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement/*
|
||||
|
||||
finance-admin:
|
||||
groups: ["Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:procurement:*
|
||||
uri: /process-groups/manage-procurement:procurement:*
|
||||
|
||||
manage-revenue-streams-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
|
||||
manage-procurement-invoice-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
|
||||
manage-procurement-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:*
|
||||
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
core1-admin-models-instantiate:
|
||||
groups: ["core-contributor", "Finance Team"]
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/misc:category_number_one:process-model-with-form/process-instances
|
||||
core1-admin-instances:
|
||||
groups: ["core-contributor", "Finance Team"]
|
||||
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
manage-revenue-streams-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
create-test-instances:
|
||||
groups: ["test"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:*
|
||||
core1-admin-instances-slash:
|
||||
groups: ["core-contributor", "Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
|
||||
uri: /process-instances/misc:test:*
|
||||
|
|
|
@ -2,14 +2,17 @@ default_group: everybody
|
|||
|
||||
users:
|
||||
admin:
|
||||
service: local_open_id
|
||||
email: admin@spiffworkflow.org
|
||||
password: admin
|
||||
preferred_username: Admin
|
||||
nelson:
|
||||
service: local_open_id
|
||||
email: nelson@spiffworkflow.org
|
||||
password: nelson
|
||||
preferred_username: Nelson
|
||||
malala:
|
||||
service: local_open_id
|
||||
email: malala@spiffworkflow.org
|
||||
password: malala
|
||||
preferred_username: Malala
|
||||
|
@ -18,17 +21,17 @@ groups:
|
|||
admin:
|
||||
users:
|
||||
[
|
||||
admin,
|
||||
admin@spiffworkflow.org,
|
||||
]
|
||||
Education:
|
||||
users:
|
||||
[
|
||||
malala
|
||||
malala@spiffworkflow.org
|
||||
]
|
||||
President:
|
||||
users:
|
||||
[
|
||||
nelson
|
||||
nelson@spiffworkflow.org
|
||||
]
|
||||
|
||||
permissions:
|
||||
|
@ -44,45 +47,44 @@ permissions:
|
|||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
uri: /tasks/*
|
||||
|
||||
# Everyone can see everything (all groups, and processes are visible)
|
||||
read-all-process-groups:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-groups/*
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-models/*
|
||||
uri: /process-models/*
|
||||
read-all-process-instance:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-instances/*
|
||||
uri: /process-instances/*
|
||||
read-process-instance-reports:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/process-instances/reports/*
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [ everybody ]
|
||||
users: [ ]
|
||||
allowed_permissions: [ read ]
|
||||
uri: /v1.0/processes
|
||||
|
||||
# Members of the Education group can change they processes work.
|
||||
uri: /processes
|
||||
# Members of the Education group can change the processes under "education".
|
||||
education-admin:
|
||||
groups: ["Education", "President"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/education:*
|
||||
uri: /process-groups/education:*
|
||||
|
||||
# Anyone can start an education process.
|
||||
education-everybody:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*
|
||||
uri: /process-instances/misc:category_number_one:process-model-with-form/*
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
default_group: everybody
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users: [admin@spiffworkflow.org]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
|
@ -0,0 +1,148 @@
|
|||
default_group: everybody
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users:
|
||||
[
|
||||
admin@spiffworkflow.org,
|
||||
jakub@status.im,
|
||||
jarrad@status.im,
|
||||
kb@sartography.com,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub@status.im,
|
||||
amir@status.im,
|
||||
jarrad@status.im,
|
||||
sasha@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
alex@sartography.com,
|
||||
dan@sartography.com,
|
||||
mike@sartography.com,
|
||||
jason@sartography.com,
|
||||
j@sartography.com,
|
||||
elizabeth@sartography.com,
|
||||
jon@sartography.com,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
harmeet@status.im,
|
||||
sasha@status.im,
|
||||
manuchehr@status.im,
|
||||
core@status.im,
|
||||
fin@status.im,
|
||||
fin1@status.im,
|
||||
lead@status.im,
|
||||
lead1@status.im,
|
||||
]
|
||||
test:
|
||||
users:
|
||||
[
|
||||
natalia@sartography.com,
|
||||
]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /*
|
||||
admin-process-instances:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/*
|
||||
|
||||
# open system defaults for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-models/*
|
||||
|
||||
# basic perms for everybody
|
||||
read-all-process-instances-for-me:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /processes
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /service-tasks
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /tasks/*
|
||||
user-groups-for-current-user:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /user-groups/for-current-user
|
||||
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
manage-revenue-streams-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-procurement-invoice-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-instances-for-me:
|
||||
groups: ["demo"]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:*
|
||||
|
||||
create-test-instances:
|
||||
groups: ["test"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /process-instances/misc:test:*
|
|
@ -2,60 +2,7 @@ default_group: everybody
|
|||
|
||||
groups:
|
||||
admin:
|
||||
users:
|
||||
[
|
||||
admin,
|
||||
jakub,
|
||||
kb,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
]
|
||||
|
||||
Finance Team:
|
||||
users:
|
||||
[
|
||||
jakub,
|
||||
alex,
|
||||
dan,
|
||||
mike,
|
||||
jason,
|
||||
j,
|
||||
amir,
|
||||
jarrad,
|
||||
elizabeth,
|
||||
jon,
|
||||
natalia,
|
||||
sasha,
|
||||
fin,
|
||||
fin1,
|
||||
]
|
||||
|
||||
demo:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
fin,
|
||||
fin1,
|
||||
harmeet,
|
||||
sasha,
|
||||
manuchehr,
|
||||
lead,
|
||||
lead1
|
||||
]
|
||||
|
||||
core-contributor:
|
||||
users:
|
||||
[
|
||||
core,
|
||||
harmeet,
|
||||
]
|
||||
users: [admin@spiffworkflow.org]
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
|
@ -63,120 +10,3 @@ permissions:
|
|||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
tasks-crud:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
|
||||
service-tasks:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/service-tasks
|
||||
|
||||
|
||||
# read all for everybody
|
||||
read-all-process-groups:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-groups/*
|
||||
read-all-process-models:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-models/*
|
||||
read-all-process-instance:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/*
|
||||
read-process-instance-reports:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/process-instances/reports/*
|
||||
processes-read:
|
||||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/processes
|
||||
|
||||
task-data-read:
|
||||
groups: [demo]
|
||||
users: []
|
||||
allowed_permissions: [read]
|
||||
uri: /v1.0/task-data/*
|
||||
|
||||
|
||||
manage-procurement-admin:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:*
|
||||
manage-procurement-admin-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement/*
|
||||
manage-procurement-admin-models:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement:*
|
||||
manage-procurement-admin-models-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/manage-procurement/*
|
||||
manage-procurement-admin-instances:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement:*
|
||||
manage-procurement-admin-instances-slash:
|
||||
groups: ["Project Lead"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/manage-procurement/*
|
||||
|
||||
finance-admin:
|
||||
groups: ["Finance Team"]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/manage-procurement:procurement:*
|
||||
|
||||
manage-revenue-streams-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
manage-revenue-streams-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
|
||||
|
||||
manage-procurement-invoice-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
manage-procurement-invoice-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
|
||||
|
||||
manage-procurement-instantiate:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create]
|
||||
uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:*
|
||||
manage-procurement-instances:
|
||||
groups: ["core-contributor", "demo"]
|
||||
users: []
|
||||
allowed_permissions: [create, read]
|
||||
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
default_group: everybody
|
||||
|
||||
users:
|
||||
testadmin1:
|
||||
service: https://testing/openid/thing
|
||||
email: testadmin1@spiffworkflow.org
|
||||
password: admin
|
||||
preferred_username: El administrador de la muerte
|
||||
|
||||
groups:
|
||||
admin:
|
||||
users: [testadmin1, testadmin2]
|
||||
|
@ -14,7 +21,7 @@ permissions:
|
|||
admin:
|
||||
groups: [admin]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete, list, instantiate]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /*
|
||||
|
||||
read-all:
|
||||
|
@ -27,29 +34,29 @@ permissions:
|
|||
groups: [everybody]
|
||||
users: []
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/tasks/*
|
||||
uri: /tasks/*
|
||||
|
||||
# TODO: all uris should really have the same structure
|
||||
finance-admin-group:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-groups/finance/*
|
||||
uri: /process-groups/finance/*
|
||||
|
||||
finance-admin-model:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/finance/*
|
||||
uri: /process-models/finance/*
|
||||
|
||||
finance-admin-model-lanes:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-models/finance:model_with_lanes/*
|
||||
uri: /process-models/finance:model_with_lanes/*
|
||||
|
||||
finance-admin-instance-run:
|
||||
groups: ["Finance Team"]
|
||||
users: [testuser4]
|
||||
allowed_permissions: [create, read, update, delete]
|
||||
uri: /v1.0/process-instances/*
|
||||
uri: /process-instances/*
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
"""Qa1."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2")
|
||||
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
|
||||
GIT_USER_EMAIL = environ.get(
|
||||
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
|
||||
)
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml"
|
||||
)
|
|
@ -0,0 +1,7 @@
|
|||
"""Staging."""
|
||||
from os import environ
|
||||
|
||||
GIT_BRANCH = environ.get("GIT_BRANCH", default="staging")
|
||||
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
|
||||
GIT_COMMIT_ON_SAVE = False
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"
|
|
@ -5,8 +5,8 @@ from os import environ
|
|||
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
|
||||
|
||||
GIT_COMMIT_ON_SAVE = True
|
||||
GIT_USERNAME = environment_identifier_for_this_config_file_only
|
||||
GIT_USER_EMAIL = f"{environment_identifier_for_this_config_file_only}@example.com"
|
||||
GIT_USERNAME = "sartography-automated-committer"
|
||||
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
|
||||
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
|
||||
default="terraform_deployed_environment.yml",
|
||||
|
@ -24,3 +24,6 @@ SPIFFWORKFLOW_BACKEND_URL = (
|
|||
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
)
|
||||
CONNECTOR_PROXY_URL = f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
|
||||
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
|
||||
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
|
||||
)
|
||||
|
|
|
@ -15,6 +15,7 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
|
|||
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
|
||||
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug"
|
||||
)
|
||||
GIT_COMMIT_ON_SAVE = False
|
||||
|
||||
# NOTE: set this here since nox shoves tests and src code to
|
||||
# different places and this allows us to know exactly where we are at the start
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
"""Api_version."""
|
||||
V1_API_PATH_PREFIX = "/v1.0"
|
|
@ -17,7 +17,7 @@ from spiffworkflow_backend.models.user_group_assignment import (
|
|||
from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401
|
||||
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.spec_reference import (
|
||||
SpecReferenceCache,
|
||||
) # noqa: F401
|
||||
|
|
|
@ -27,6 +27,9 @@ class GroupModel(FlaskBpmnGroupModel):
|
|||
identifier = db.Column(db.String(255))
|
||||
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete")
|
||||
user_group_assignments_waiting = relationship( # type: ignore
|
||||
"UserGroupAssignmentWaitingModel", cascade="delete"
|
||||
)
|
||||
users = relationship( # type: ignore
|
||||
"UserModel",
|
||||
viewonly=True,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Active_task."""
|
||||
"""Human_task."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
@ -8,7 +8,6 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import RelationshipProperty
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
@ -17,29 +16,30 @@ from spiffworkflow_backend.models.user import UserModel
|
|||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from spiffworkflow_backend.models.active_task_user import ( # noqa: F401
|
||||
ActiveTaskUserModel,
|
||||
from spiffworkflow_backend.models.human_task_user import ( # noqa: F401
|
||||
HumanTaskUserModel,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
||||
"""ActiveTaskModel."""
|
||||
class HumanTaskModel(SpiffworkflowBaseDBModel):
|
||||
"""HumanTaskModel."""
|
||||
|
||||
__tablename__ = "active_task"
|
||||
__tablename__ = "human_task"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"task_id", "process_instance_id", name="active_task_unique"
|
||||
),
|
||||
db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"),
|
||||
)
|
||||
|
||||
actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
|
||||
lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id))
|
||||
completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True)
|
||||
|
||||
actual_owner_id: int = db.Column(ForeignKey(UserModel.id))
|
||||
# actual_owner: RelationshipProperty[UserModel] = relationship(UserModel)
|
||||
|
||||
form_file_name: str | None = db.Column(db.String(50))
|
||||
ui_form_file_name: str | None = db.Column(db.String(50))
|
||||
|
||||
|
@ -52,17 +52,18 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
|||
task_type: str = db.Column(db.String(50))
|
||||
task_status: str = db.Column(db.String(50))
|
||||
process_model_display_name: str = db.Column(db.String(255))
|
||||
completed: bool = db.Column(db.Boolean, default=False, nullable=False, index=True)
|
||||
|
||||
active_task_users = relationship("ActiveTaskUserModel", cascade="delete")
|
||||
human_task_users = relationship("HumanTaskUserModel", cascade="delete")
|
||||
potential_owners = relationship( # type: ignore
|
||||
"UserModel",
|
||||
viewonly=True,
|
||||
secondary="active_task_user",
|
||||
overlaps="active_task_user,users",
|
||||
secondary="human_task_user",
|
||||
overlaps="human_task_user,users",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def to_task(cls, task: ActiveTaskModel) -> Task:
|
||||
def to_task(cls, task: HumanTaskModel) -> Task:
|
||||
"""To_task."""
|
||||
new_task = Task(
|
||||
task.task_id,
|
||||
|
@ -79,7 +80,7 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel):
|
|||
if hasattr(task, "process_model_identifier"):
|
||||
new_task.process_model_identifier = task.process_model_identifier
|
||||
|
||||
# active tasks only have status when getting the list on the home page
|
||||
# human tasks only have status when getting the list on the home page
|
||||
# and it comes from the process_instance. it should not be confused with task_status.
|
||||
if hasattr(task, "status"):
|
||||
new_task.process_instance_status = task.status
|
|
@ -1,4 +1,4 @@
|
|||
"""Active_task_user."""
|
||||
"""Human_task_user."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
@ -7,26 +7,26 @@ from flask_bpmn.models.db import db
|
|||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
|
||||
from spiffworkflow_backend.models.active_task import ActiveTaskModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActiveTaskUserModel(SpiffworkflowBaseDBModel):
|
||||
"""ActiveTaskUserModel."""
|
||||
class HumanTaskUserModel(SpiffworkflowBaseDBModel):
|
||||
"""HumanTaskUserModel."""
|
||||
|
||||
__tablename__ = "active_task_user"
|
||||
__tablename__ = "human_task_user"
|
||||
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"active_task_id",
|
||||
"human_task_id",
|
||||
"user_id",
|
||||
name="active_task_user_unique",
|
||||
name="human_task_user_unique",
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
active_task_id = db.Column(
|
||||
ForeignKey(ActiveTaskModel.id), nullable=False, index=True # type: ignore
|
||||
human_task_id = db.Column(
|
||||
ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore
|
||||
)
|
||||
user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
|
|
@ -86,5 +86,6 @@ def ensure_failure_cause_is_set_if_message_instance_failed(
|
|||
if isinstance(instance, MessageInstanceModel):
|
||||
if instance.status == "failed" and instance.failure_cause is None:
|
||||
raise ValueError(
|
||||
f"{instance.__class__.__name__}: failure_cause must be set if status is failed"
|
||||
f"{instance.__class__.__name__}: failure_cause must be set if"
|
||||
" status is failed"
|
||||
)
|
||||
|
|
|
@ -32,14 +32,6 @@ class Permission(enum.Enum):
|
|||
update = "update"
|
||||
delete = "delete"
|
||||
|
||||
# maybe read to GET process_model/process-instances instead?
|
||||
list = "list"
|
||||
|
||||
# maybe use create instead on
|
||||
# POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/*
|
||||
# POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/332/run
|
||||
instantiate = "instantiate" # this is something you do to a process model
|
||||
|
||||
|
||||
class PermissionAssignmentModel(SpiffworkflowBaseDBModel):
|
||||
"""PermissionAssignmentModel."""
|
||||
|
|
|
@ -26,34 +26,12 @@ class ProcessInstanceNotFoundError(Exception):
|
|||
"""ProcessInstanceNotFoundError."""
|
||||
|
||||
|
||||
class NavigationItemSchema(Schema):
|
||||
"""NavigationItemSchema."""
|
||||
class ProcessInstanceTaskDataCannotBeUpdatedError(Exception):
|
||||
"""ProcessInstanceTaskDataCannotBeUpdatedError."""
|
||||
|
||||
class Meta:
|
||||
"""Meta."""
|
||||
|
||||
fields = [
|
||||
"spec_id",
|
||||
"name",
|
||||
"spec_type",
|
||||
"task_id",
|
||||
"description",
|
||||
"backtracks",
|
||||
"indent",
|
||||
"lane",
|
||||
"state",
|
||||
"children",
|
||||
]
|
||||
unknown = INCLUDE
|
||||
|
||||
state = marshmallow.fields.String(required=False, allow_none=True)
|
||||
description = marshmallow.fields.String(required=False, allow_none=True)
|
||||
backtracks = marshmallow.fields.String(required=False, allow_none=True)
|
||||
lane = marshmallow.fields.String(required=False, allow_none=True)
|
||||
task_id = marshmallow.fields.String(required=False, allow_none=True)
|
||||
children = marshmallow.fields.List(
|
||||
marshmallow.fields.Nested(lambda: NavigationItemSchema())
|
||||
)
|
||||
class ProcessInstanceCannotBeDeletedError(Exception):
|
||||
"""ProcessInstanceCannotBeDeletedError."""
|
||||
|
||||
|
||||
class ProcessInstanceStatus(SpiffEnum):
|
||||
|
@ -82,7 +60,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False)
|
||||
process_initiator = relationship("UserModel")
|
||||
|
||||
active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore
|
||||
active_human_tasks = relationship(
|
||||
"HumanTaskModel",
|
||||
primaryjoin=(
|
||||
"and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id,"
|
||||
" HumanTaskModel.completed == False)"
|
||||
),
|
||||
) # type: ignore
|
||||
|
||||
human_tasks = relationship(
|
||||
"HumanTaskModel",
|
||||
cascade="delete",
|
||||
overlaps="active_human_tasks",
|
||||
) # type: ignore
|
||||
message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore
|
||||
message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore
|
||||
|
||||
|
@ -93,7 +83,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
status: str = db.Column(db.String(50))
|
||||
|
||||
bpmn_xml_file_contents: bytes | None = None
|
||||
bpmn_xml_file_contents: str | None = None
|
||||
bpmn_version_control_type: str = db.Column(db.String(50))
|
||||
bpmn_version_control_identifier: str = db.Column(db.String(255))
|
||||
spiff_step: int = db.Column(db.Integer)
|
||||
|
@ -101,9 +91,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
@property
|
||||
def serialized(self) -> dict[str, Any]:
|
||||
"""Return object data in serializeable format."""
|
||||
local_bpmn_xml_file_contents = ""
|
||||
if self.bpmn_xml_file_contents:
|
||||
local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8")
|
||||
return {
|
||||
"id": self.id,
|
||||
"process_model_identifier": self.process_model_identifier,
|
||||
|
@ -112,7 +99,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"start_in_seconds": self.start_in_seconds,
|
||||
"end_in_seconds": self.end_in_seconds,
|
||||
"process_initiator_id": self.process_initiator_id,
|
||||
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
|
||||
"bpmn_xml_file_contents": self.bpmn_xml_file_contents,
|
||||
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
|
||||
"bpmn_version_control_type": self.bpmn_version_control_type,
|
||||
"spiff_step": self.spiff_step,
|
||||
|
@ -134,6 +121,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
"""Validate_status."""
|
||||
return self.validate_enum_field(key, value, ProcessInstanceStatus)
|
||||
|
||||
def can_submit_task(self) -> bool:
|
||||
"""Can_submit_task."""
|
||||
return not self.has_terminal_status() and self.status != "suspended"
|
||||
|
||||
def has_terminal_status(self) -> bool:
|
||||
"""Has_terminal_status."""
|
||||
return self.status in self.terminal_statuses()
|
||||
|
||||
@classmethod
|
||||
def terminal_statuses(cls) -> list[str]:
|
||||
"""Terminal_statuses."""
|
||||
return ["complete", "error", "terminated"]
|
||||
|
||||
|
||||
class ProcessInstanceModelSchema(Schema):
|
||||
"""ProcessInstanceModelSchema."""
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
"""Spiff_step_details."""
|
||||
"""Process_instance_metadata."""
|
||||
from dataclasses import dataclass
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
|
|
|
@ -8,6 +8,10 @@ from marshmallow import INCLUDE
|
|||
from sqlalchemy import UniqueConstraint
|
||||
|
||||
|
||||
class SpecReferenceNotFoundError(Exception):
|
||||
"""SpecReferenceNotFoundError."""
|
||||
|
||||
|
||||
@dataclass()
|
||||
class SpecReference:
|
||||
"""File Reference Information.
|
||||
|
|
|
@ -8,7 +8,7 @@ from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
|||
|
||||
@dataclass
|
||||
class SpiffLoggingModel(SpiffworkflowBaseDBModel):
|
||||
"""LoggingModel."""
|
||||
"""SpiffLoggingModel."""
|
||||
|
||||
__tablename__ = "spiff_logging"
|
||||
id: int = db.Column(db.Integer, primary_key=True)
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
"""Spiff_step_details."""
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import deferred
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
|
@ -20,10 +18,13 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
|
|||
process_instance_id: int = db.Column(
|
||||
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
|
||||
)
|
||||
# human_task_id: int = db.Column(
|
||||
# ForeignKey(HumanTaskModel.id) # type: ignore
|
||||
# )
|
||||
spiff_step: int = db.Column(db.Integer, nullable=False)
|
||||
task_json: str = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
|
||||
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
|
||||
completed_by_user_id: int = db.Column(db.Integer, nullable=True)
|
||||
lane_assignment_id: Optional[int] = db.Column(
|
||||
ForeignKey(GroupModel.id), nullable=True
|
||||
)
|
||||
# completed_by_user_id: int = db.Column(db.Integer, nullable=True)
|
||||
# lane_assignment_id: Optional[int] = db.Column(
|
||||
# ForeignKey(GroupModel.id), nullable=True
|
||||
# )
|
||||
|
|
|
@ -43,8 +43,8 @@ class Task:
|
|||
FIELD_TYPE_EMAIL = "email" # email: Email address
|
||||
FIELD_TYPE_URL = "url" # url: Website address
|
||||
|
||||
FIELD_PROP_AUTO_COMPLETE_MAX = (
|
||||
"autocomplete_num" # Not used directly, passed in from the front end.
|
||||
FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end.
|
||||
"autocomplete_num"
|
||||
)
|
||||
|
||||
# Required field
|
||||
|
@ -77,8 +77,8 @@ class Task:
|
|||
|
||||
# File specific field properties
|
||||
FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code
|
||||
FIELD_PROP_FILE_DATA = (
|
||||
"file_data" # to associate a bit of data with a specific file upload file.
|
||||
FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file.
|
||||
"file_data"
|
||||
)
|
||||
|
||||
# Additional properties
|
||||
|
@ -108,7 +108,7 @@ class Task:
|
|||
multi_instance_type: Union[MultiInstanceType, None] = None,
|
||||
multi_instance_count: str = "",
|
||||
multi_instance_index: str = "",
|
||||
process_name: str = "",
|
||||
process_identifier: str = "",
|
||||
properties: Union[dict, None] = None,
|
||||
process_instance_id: Union[int, None] = None,
|
||||
process_instance_status: Union[str, None] = None,
|
||||
|
@ -118,6 +118,8 @@ class Task:
|
|||
form_schema: Union[str, None] = None,
|
||||
form_ui_schema: Union[str, None] = None,
|
||||
parent: Optional[str] = None,
|
||||
event_definition: Union[dict[str, Any], None] = None,
|
||||
call_activity_process_identifier: Optional[str] = None,
|
||||
):
|
||||
"""__init__."""
|
||||
self.id = id
|
||||
|
@ -129,6 +131,8 @@ class Task:
|
|||
self.documentation = documentation
|
||||
self.lane = lane
|
||||
self.parent = parent
|
||||
self.event_definition = event_definition
|
||||
self.call_activity_process_identifier = call_activity_process_identifier
|
||||
|
||||
self.data = data
|
||||
if self.data is None:
|
||||
|
@ -151,7 +155,7 @@ class Task:
|
|||
self.multi_instance_index = (
|
||||
multi_instance_index # And the index of the currently repeating task.
|
||||
)
|
||||
self.process_name = process_name
|
||||
self.process_identifier = process_identifier
|
||||
|
||||
self.properties = properties # Arbitrary extension properties from BPMN editor.
|
||||
if self.properties is None:
|
||||
|
@ -177,7 +181,7 @@ class Task:
|
|||
"multi_instance_type": multi_instance_type,
|
||||
"multi_instance_count": self.multi_instance_count,
|
||||
"multi_instance_index": self.multi_instance_index,
|
||||
"process_name": self.process_name,
|
||||
"process_identifier": self.process_identifier,
|
||||
"properties": self.properties,
|
||||
"process_instance_id": self.process_instance_id,
|
||||
"process_instance_status": self.process_instance_status,
|
||||
|
@ -187,6 +191,8 @@ class Task:
|
|||
"form_schema": self.form_schema,
|
||||
"form_ui_schema": self.form_ui_schema,
|
||||
"parent": self.parent,
|
||||
"event_definition": self.event_definition,
|
||||
"call_activity_process_identifier": self.call_activity_process_identifier,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
@ -282,18 +288,19 @@ class TaskSchema(Schema):
|
|||
"multi_instance_type",
|
||||
"multi_instance_count",
|
||||
"multi_instance_index",
|
||||
"process_name",
|
||||
"process_identifier",
|
||||
"properties",
|
||||
"process_instance_id",
|
||||
"form_schema",
|
||||
"form_ui_schema",
|
||||
"event_definition",
|
||||
]
|
||||
|
||||
multi_instance_type = EnumField(MultiInstanceType)
|
||||
documentation = marshmallow.fields.String(required=False, allow_none=True)
|
||||
# form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)
|
||||
title = marshmallow.fields.String(required=False, allow_none=True)
|
||||
process_name = marshmallow.fields.String(required=False, allow_none=True)
|
||||
process_identifier = marshmallow.fields.String(required=False, allow_none=True)
|
||||
lane = marshmallow.fields.String(required=False, allow_none=True)
|
||||
|
||||
@marshmallow.post_load
|
||||
|
|
|
@ -1,22 +1,15 @@
|
|||
"""User."""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
import marshmallow
|
||||
from flask import current_app
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from marshmallow import Schema
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.orm import validates
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationProviderTypes,
|
||||
)
|
||||
|
||||
|
||||
class UserNotFoundError(Exception):
|
||||
|
@ -28,15 +21,18 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
__tablename__ = "user"
|
||||
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
# server and service id must be unique, not username.
|
||||
username = db.Column(db.String(255), nullable=False, unique=False)
|
||||
uid = db.Column(db.String(50), unique=True)
|
||||
service = db.Column(db.String(50), nullable=False, unique=False)
|
||||
username = db.Column(
|
||||
db.String(255), nullable=False, unique=True
|
||||
) # should always be a unique value
|
||||
service = db.Column(
|
||||
db.String(255), nullable=False, unique=False
|
||||
) # not 'openid' -- google, aws
|
||||
service_id = db.Column(db.String(255), nullable=False, unique=False)
|
||||
name = db.Column(db.String(255))
|
||||
display_name = db.Column(db.String(255))
|
||||
email = db.Column(db.String(255))
|
||||
updated_at_in_seconds: int = db.Column(db.Integer)
|
||||
created_at_in_seconds: int = db.Column(db.Integer)
|
||||
|
||||
user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") # type: ignore
|
||||
groups = relationship( # type: ignore
|
||||
|
@ -47,21 +43,6 @@ class UserModel(SpiffworkflowBaseDBModel):
|
|||
)
|
||||
principal = relationship("PrincipalModel", uselist=False) # type: ignore
|
||||
|
||||
@validates("service")
|
||||
def validate_service(self, key: str, value: Any) -> str:
|
||||
"""Validate_service."""
|
||||
try:
|
||||
ap_type = getattr(AuthenticationProviderTypes, value, None)
|
||||
except Exception as e:
|
||||
raise ValueError(f"invalid service type: {value}") from e
|
||||
if ap_type is not None:
|
||||
ap_value: str = ap_type.value
|
||||
return ap_value
|
||||
raise ApiError(
|
||||
error_code="invalid_service",
|
||||
message=f"Could not validate service with value: {value}",
|
||||
)
|
||||
|
||||
def encode_auth_token(self) -> str:
|
||||
"""Generate the Auth Token.
|
||||
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
"""UserGroupAssignment."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from sqlalchemy import ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
|
||||
|
||||
class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel):
|
||||
"""When a user is assigned to a group, but that username does not exist.
|
||||
|
||||
We cache it here to be applied in the event the user does log in to the system.
|
||||
"""
|
||||
|
||||
MATCH_ALL_USERS = "*"
|
||||
__tablename__ = "user_group_assignment_waiting"
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
"username", "group_id", name="user_group_assignment_staged_unique"
|
||||
),
|
||||
)
|
||||
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
username = db.Column(db.String(255), nullable=False)
|
||||
group_id = db.Column(ForeignKey(GroupModel.id), nullable=False)
|
||||
|
||||
group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore
|
||||
|
||||
def is_match_all(self) -> bool:
|
||||
"""Is_match_all."""
|
||||
if self.username == self.MATCH_ALL_USERS:
|
||||
return True
|
||||
return False
|
|
@ -141,7 +141,7 @@ def process_model_save(process_model_id: str, file_name: str) -> Union[str, Resp
|
|||
@admin_blueprint.route("/process-models/<process_model_id>/run", methods=["GET"])
|
||||
def process_model_run(process_model_id: str) -> Union[str, Response]:
|
||||
"""Process_model_run."""
|
||||
user = UserService.create_user("internal", "Mr. Test", username="Mr. Test")
|
||||
user = UserService.create_user("Mr. Test", "internal", "Mr. Test")
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_id, user
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
|
||||
import flask.wrappers
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
|
||||
|
||||
def status() -> flask.wrappers.Response:
|
||||
"""Status."""
|
||||
ProcessInstanceModel.query.filter().first()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
|
@ -0,0 +1,176 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel
|
||||
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
|
||||
from spiffworkflow_backend.models.message_model import MessageModel
|
||||
from spiffworkflow_backend.models.message_triggerable_process_model import (
|
||||
MessageTriggerableProcessModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
|
||||
|
||||
def message_instance_list(
|
||||
process_instance_id: Optional[int] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Message_instance_list."""
|
||||
# to make sure the process instance exists
|
||||
message_instances_query = MessageInstanceModel.query
|
||||
|
||||
if process_instance_id:
|
||||
message_instances_query = message_instances_query.filter_by(
|
||||
process_instance_id=process_instance_id
|
||||
)
|
||||
|
||||
message_instances = (
|
||||
message_instances_query.order_by(
|
||||
MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore
|
||||
MessageInstanceModel.id.desc(), # type: ignore
|
||||
)
|
||||
.join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id)
|
||||
.join(ProcessInstanceModel)
|
||||
.add_columns(
|
||||
MessageModel.identifier.label("message_identifier"),
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.process_model_display_name,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
for message_instance in message_instances:
|
||||
message_correlations: dict = {}
|
||||
for (
|
||||
mcmi
|
||||
) in (
|
||||
message_instance.MessageInstanceModel.message_correlations_message_instances
|
||||
):
|
||||
mc = MessageCorrelationModel.query.filter_by(
|
||||
id=mcmi.message_correlation_id
|
||||
).all()
|
||||
for m in mc:
|
||||
if m.name not in message_correlations:
|
||||
message_correlations[m.name] = {}
|
||||
message_correlations[m.name][
|
||||
m.message_correlation_property.identifier
|
||||
] = m.value
|
||||
message_instance.MessageInstanceModel.message_correlations = (
|
||||
message_correlations
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": message_instances.items,
|
||||
"pagination": {
|
||||
"count": len(message_instances.items),
|
||||
"total": message_instances.total,
|
||||
"pages": message_instances.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
# body: {
|
||||
# payload: dict,
|
||||
# process_instance_id: Optional[int],
|
||||
# }
|
||||
def message_start(
|
||||
message_identifier: str,
|
||||
body: Dict[str, Any],
|
||||
) -> flask.wrappers.Response:
|
||||
"""Message_start."""
|
||||
message_model = MessageModel.query.filter_by(identifier=message_identifier).first()
|
||||
if message_model is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="unknown_message",
|
||||
message=f"Could not find message with identifier: {message_identifier}",
|
||||
status_code=404,
|
||||
)
|
||||
)
|
||||
|
||||
if "payload" not in body:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="missing_payload",
|
||||
message="Body is missing payload.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
process_instance = None
|
||||
if "process_instance_id" in body:
|
||||
# to make sure we have a valid process_instance_id
|
||||
process_instance = _find_process_instance_by_id_or_raise(
|
||||
body["process_instance_id"]
|
||||
)
|
||||
|
||||
message_instance = MessageInstanceModel.query.filter_by(
|
||||
process_instance_id=process_instance.id,
|
||||
message_model_id=message_model.id,
|
||||
message_type="receive",
|
||||
status="ready",
|
||||
).first()
|
||||
if message_instance is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="cannot_find_waiting_message",
|
||||
message=(
|
||||
"Could not find waiting message for identifier"
|
||||
f" {message_identifier} and process instance"
|
||||
f" {process_instance.id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
MessageService.process_message_receive(
|
||||
message_instance, message_model.name, body["payload"]
|
||||
)
|
||||
|
||||
else:
|
||||
message_triggerable_process_model = (
|
||||
MessageTriggerableProcessModel.query.filter_by(
|
||||
message_model_id=message_model.id
|
||||
).first()
|
||||
)
|
||||
|
||||
if message_triggerable_process_model is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="cannot_start_message",
|
||||
message=(
|
||||
"Message with identifier cannot be start with message:"
|
||||
f" {message_identifier}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
process_instance = MessageService.process_message_triggerable_process_model(
|
||||
message_triggerable_process_model,
|
||||
message_model.name,
|
||||
body["payload"],
|
||||
g.user,
|
||||
)
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
|
@ -111,6 +111,7 @@ def token() -> dict:
|
|||
"iat": time.time(),
|
||||
"exp": time.time() + 86400, # Expire after a day.
|
||||
"sub": user_name,
|
||||
"email": user_details["email"],
|
||||
"preferred_username": user_details.get("preferred_username", user_name),
|
||||
},
|
||||
client_secret,
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,130 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
|
||||
ProcessEntityNotFoundError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroupSchema
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
|
||||
|
||||
def process_group_create(body: dict) -> flask.wrappers.Response:
|
||||
"""Add_process_group."""
|
||||
process_group = ProcessGroup(**body)
|
||||
ProcessModelService.add_process_group(process_group)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} added process group {process_group.id}"
|
||||
)
|
||||
return make_response(jsonify(process_group), 201)
|
||||
|
||||
|
||||
def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response:
|
||||
"""Process_group_delete."""
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
ProcessModelService().process_group_delete(process_group_id)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process group {process_group_id}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_group_update(
|
||||
modified_process_group_id: str, body: dict
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process Group Update."""
|
||||
body_include_list = ["display_name", "description"]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
process_group = ProcessGroup(id=process_group_id, **body_filtered)
|
||||
ProcessModelService.update_process_group(process_group)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process group {process_group_id}"
|
||||
)
|
||||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_group_list(
|
||||
process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_group_list."""
|
||||
if process_group_identifier is not None:
|
||||
process_groups = ProcessModelService.get_process_groups(
|
||||
process_group_identifier
|
||||
)
|
||||
else:
|
||||
process_groups = ProcessModelService.get_process_groups()
|
||||
batch = ProcessModelService().get_batch(
|
||||
items=process_groups, page=page, per_page=per_page
|
||||
)
|
||||
pages = len(process_groups) // per_page
|
||||
remainder = len(process_groups) % per_page
|
||||
if remainder > 0:
|
||||
pages += 1
|
||||
|
||||
response_json = {
|
||||
"results": ProcessGroupSchema(many=True).dump(batch),
|
||||
"pagination": {
|
||||
"count": len(batch),
|
||||
"total": len(process_groups),
|
||||
"pages": pages,
|
||||
},
|
||||
}
|
||||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_group_show(
|
||||
modified_process_group_id: str,
|
||||
) -> Any:
|
||||
"""Process_group_show."""
|
||||
process_group_id = _un_modify_modified_process_model_id(modified_process_group_id)
|
||||
try:
|
||||
process_group = ProcessModelService.get_process_group(process_group_id)
|
||||
except ProcessEntityNotFoundError as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_group_cannot_be_found",
|
||||
message=f"Process group cannot be found: {process_group_id}",
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
process_group.parent_groups = ProcessModelService.get_parent_group_array(
|
||||
process_group.id
|
||||
)
|
||||
return make_response(jsonify(process_group), 200)
|
||||
|
||||
|
||||
def process_group_move(
|
||||
modified_process_group_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_group_move."""
|
||||
original_process_group_id = _un_modify_modified_process_model_id(
|
||||
modified_process_group_identifier
|
||||
)
|
||||
new_process_group = ProcessModelService().process_group_move(
|
||||
original_process_group_id, new_location
|
||||
)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} moved process group {original_process_group_id} to"
|
||||
f" {new_process_group.id}"
|
||||
)
|
||||
return make_response(jsonify(new_process_group), 200)
|
|
@ -0,0 +1,693 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import TaskState # type: ignore
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import or_
|
||||
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema
|
||||
from spiffworkflow_backend.models.process_instance import (
|
||||
ProcessInstanceCannotBeDeletedError,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
|
||||
from spiffworkflow_backend.models.process_instance_metadata import (
|
||||
ProcessInstanceMetadataModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
|
||||
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
|
||||
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
|
||||
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService
|
||||
from spiffworkflow_backend.services.git_service import GitCommandError
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.message_service import MessageService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportFilter,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
def process_instance_create(
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
process_instance = (
|
||||
ProcessInstanceService.create_process_instance_from_process_model_identifier(
|
||||
process_model_identifier, g.user
|
||||
)
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def process_instance_run(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
do_engine_steps: bool = True,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_run."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
if process_instance.status != "not_started":
|
||||
raise ApiError(
|
||||
error_code="process_instance_not_runnable",
|
||||
message=(
|
||||
f"Process Instance ({process_instance.id}) is currently running or has"
|
||||
" already run."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
if do_engine_steps:
|
||||
try:
|
||||
processor.do_engine_steps(save=True)
|
||||
except ApiError as e:
|
||||
ErrorHandlingService().handle_error(processor, e)
|
||||
raise e
|
||||
except Exception as e:
|
||||
ErrorHandlingService().handle_error(processor, e)
|
||||
task = processor.bpmn_process_instance.last_task
|
||||
raise ApiError.from_task(
|
||||
error_code="unknown_exception",
|
||||
message=f"An unknown error occurred. Original error: {e}",
|
||||
status_code=400,
|
||||
task=task,
|
||||
) from e
|
||||
|
||||
if not current_app.config["RUN_BACKGROUND_SCHEDULER"]:
|
||||
MessageService.process_message_instances()
|
||||
|
||||
process_instance_api = ProcessInstanceService.processor_to_process_instance_api(
|
||||
processor
|
||||
)
|
||||
process_instance_data = processor.get_data()
|
||||
process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api)
|
||||
process_instance_metadata["data"] = process_instance_data
|
||||
return Response(
|
||||
json.dumps(process_instance_metadata), status=200, mimetype="application/json"
|
||||
)
|
||||
|
||||
|
||||
def process_instance_terminate(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_run."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.terminate()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_suspend(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_suspend."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.suspend()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_resume(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_resume."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
processor.resume()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_log_list(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
detailed: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_log_list."""
|
||||
# to make sure the process instance exists
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
log_query = SpiffLoggingModel.query.filter(
|
||||
SpiffLoggingModel.process_instance_id == process_instance.id
|
||||
)
|
||||
if not detailed:
|
||||
log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
|
||||
|
||||
logs = (
|
||||
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
|
||||
.join(
|
||||
UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True
|
||||
) # isouter since if we don't have a user, we still want the log
|
||||
.add_columns(
|
||||
UserModel.username,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": logs.items,
|
||||
"pagination": {
|
||||
"count": len(logs.items),
|
||||
"total": logs.total,
|
||||
"pages": logs.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def process_instance_list_for_me(
|
||||
process_model_identifier: Optional[str] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
start_from: Optional[int] = None,
|
||||
start_to: Optional[int] = None,
|
||||
end_from: Optional[int] = None,
|
||||
end_to: Optional[int] = None,
|
||||
process_status: Optional[str] = None,
|
||||
user_filter: Optional[bool] = False,
|
||||
report_identifier: Optional[str] = None,
|
||||
report_id: Optional[int] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list_for_me."""
|
||||
return process_instance_list(
|
||||
process_model_identifier=process_model_identifier,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
user_filter=user_filter,
|
||||
report_identifier=report_identifier,
|
||||
report_id=report_id,
|
||||
user_group_identifier=user_group_identifier,
|
||||
with_relation_to_me=True,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_list(
|
||||
process_model_identifier: Optional[str] = None,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
start_from: Optional[int] = None,
|
||||
start_to: Optional[int] = None,
|
||||
end_from: Optional[int] = None,
|
||||
end_to: Optional[int] = None,
|
||||
process_status: Optional[str] = None,
|
||||
with_relation_to_me: Optional[bool] = None,
|
||||
user_filter: Optional[bool] = False,
|
||||
report_identifier: Optional[str] = None,
|
||||
report_id: Optional[int] = None,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_list."""
|
||||
process_instance_report = ProcessInstanceReportService.report_with_identifier(
|
||||
g.user, report_id, report_identifier
|
||||
)
|
||||
|
||||
if user_filter:
|
||||
report_filter = ProcessInstanceReportFilter(
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
process_status=process_status.split(",") if process_status else None,
|
||||
)
|
||||
else:
|
||||
report_filter = (
|
||||
ProcessInstanceReportService.filter_from_metadata_with_overrides(
|
||||
process_instance_report=process_instance_report,
|
||||
process_model_identifier=process_model_identifier,
|
||||
user_group_identifier=user_group_identifier,
|
||||
start_from=start_from,
|
||||
start_to=start_to,
|
||||
end_from=end_from,
|
||||
end_to=end_to,
|
||||
process_status=process_status,
|
||||
with_relation_to_me=with_relation_to_me,
|
||||
)
|
||||
)
|
||||
|
||||
response_json = ProcessInstanceReportService.run_process_instance_report(
|
||||
report_filter=report_filter,
|
||||
process_instance_report=process_instance_report,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
user=g.user,
|
||||
)
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def process_instance_report_column_list() -> flask.wrappers.Response:
|
||||
"""Process_instance_report_column_list."""
|
||||
table_columns = ProcessInstanceReportService.builtin_column_options()
|
||||
columns_for_metadata = (
|
||||
db.session.query(ProcessInstanceMetadataModel.key)
|
||||
.order_by(ProcessInstanceMetadataModel.key)
|
||||
.distinct() # type: ignore
|
||||
.all()
|
||||
)
|
||||
columns_for_metadata_strings = [
|
||||
{"Header": i[0], "accessor": i[0], "filterable": True}
|
||||
for i in columns_for_metadata
|
||||
]
|
||||
return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
|
||||
|
||||
|
||||
def process_instance_show_for_me(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
process_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_show_for_me."""
|
||||
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
|
||||
return _get_process_instance(
|
||||
process_instance=process_instance,
|
||||
modified_process_model_identifier=modified_process_model_identifier,
|
||||
process_identifier=process_identifier,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_show(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
process_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return _get_process_instance(
|
||||
process_instance=process_instance,
|
||||
modified_process_model_identifier=modified_process_model_identifier,
|
||||
process_identifier=process_identifier,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_delete(
|
||||
process_instance_id: int, modified_process_model_identifier: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Create_process_instance."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
if not process_instance.has_terminal_status():
|
||||
raise ProcessInstanceCannotBeDeletedError(
|
||||
f"Process instance ({process_instance.id}) cannot be deleted since it does"
|
||||
f" not have a terminal status. Current status is {process_instance.status}."
|
||||
)
|
||||
|
||||
# (Pdb) db.session.delete
|
||||
# <bound method delete of <sqlalchemy.orm.scoping.scoped_session object at 0x103eaab30>>
|
||||
db.session.query(SpiffLoggingModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.query(SpiffStepDetailsModel).filter_by(
|
||||
process_instance_id=process_instance.id
|
||||
).delete()
|
||||
db.session.delete(process_instance)
|
||||
db.session.commit()
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_report_list(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_list."""
|
||||
process_instance_reports = ProcessInstanceReportModel.query.filter_by(
|
||||
created_by_id=g.user.id,
|
||||
).all()
|
||||
|
||||
return make_response(jsonify(process_instance_reports), 200)
|
||||
|
||||
|
||||
def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_create."""
|
||||
process_instance_report = ProcessInstanceReportModel.create_report(
|
||||
identifier=body["identifier"],
|
||||
user=g.user,
|
||||
report_metadata=body["report_metadata"],
|
||||
)
|
||||
|
||||
return make_response(jsonify(process_instance_report), 201)
|
||||
|
||||
|
||||
def process_instance_report_update(
|
||||
report_id: int,
|
||||
body: Dict[str, Any],
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_update."""
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
id=report_id,
|
||||
created_by_id=g.user.id,
|
||||
).first()
|
||||
if process_instance_report is None:
|
||||
raise ApiError(
|
||||
error_code="unknown_process_instance_report",
|
||||
message="Unknown process instance report",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
process_instance_report.report_metadata = body["report_metadata"]
|
||||
db.session.commit()
|
||||
|
||||
return make_response(jsonify(process_instance_report), 201)
|
||||
|
||||
|
||||
def process_instance_report_delete(
|
||||
report_id: int,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_delete."""
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
id=report_id,
|
||||
created_by_id=g.user.id,
|
||||
).first()
|
||||
if process_instance_report is None:
|
||||
raise ApiError(
|
||||
error_code="unknown_process_instance_report",
|
||||
message="Unknown process instance report",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
db.session.delete(process_instance_report)
|
||||
db.session.commit()
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_report_show(
|
||||
report_id: int,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_report_show."""
|
||||
process_instances = ProcessInstanceModel.query.order_by(
|
||||
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
|
||||
).paginate(page=page, per_page=per_page, error_out=False)
|
||||
|
||||
process_instance_report = ProcessInstanceReportModel.query.filter_by(
|
||||
id=report_id,
|
||||
created_by_id=g.user.id,
|
||||
).first()
|
||||
if process_instance_report is None:
|
||||
raise ApiError(
|
||||
error_code="unknown_process_instance_report",
|
||||
message="Unknown process instance report",
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
substitution_variables = request.args.to_dict()
|
||||
result_dict = process_instance_report.generate_report(
|
||||
process_instances.items, substitution_variables
|
||||
)
|
||||
|
||||
# update this if we go back to a database query instead of filtering in memory
|
||||
result_dict["pagination"] = {
|
||||
"count": len(result_dict["results"]),
|
||||
"total": len(result_dict["results"]),
|
||||
"pages": 1,
|
||||
}
|
||||
|
||||
return Response(json.dumps(result_dict), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_instance_task_list_without_task_data_for_me(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_without_task_data_for_me."""
|
||||
process_instance = _find_process_instance_for_me_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
get_task_data=False,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list_without_task_data(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_without_task_data."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
get_task_data=False,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list_with_task_data(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance_id: int,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list_with_task_data."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
return process_instance_task_list(
|
||||
modified_process_model_identifier,
|
||||
process_instance,
|
||||
all_tasks,
|
||||
spiff_step,
|
||||
get_task_data=True,
|
||||
)
|
||||
|
||||
|
||||
def process_instance_task_list(
|
||||
_modified_process_model_identifier: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
all_tasks: bool = False,
|
||||
spiff_step: int = 0,
|
||||
get_task_data: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_task_list."""
|
||||
if spiff_step > 0:
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if step_detail is not None and process_instance.bpmn_json is not None:
|
||||
bpmn_json = json.loads(process_instance.bpmn_json)
|
||||
bpmn_json["tasks"] = step_detail.task_json["tasks"]
|
||||
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
|
||||
spiff_tasks = None
|
||||
if all_tasks:
|
||||
spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||
else:
|
||||
spiff_tasks = processor.get_all_user_tasks()
|
||||
|
||||
tasks = []
|
||||
for spiff_task in spiff_tasks:
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||
if get_task_data:
|
||||
task.data = spiff_task.data
|
||||
tasks.append(task)
|
||||
|
||||
return make_response(jsonify(tasks), 200)
|
||||
|
||||
|
||||
def process_instance_reset(
|
||||
process_instance_id: int,
|
||||
modified_process_model_identifier: str,
|
||||
spiff_step: int = 0,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_instance_reset."""
|
||||
process_instance = ProcessInstanceService().get_process_instance(
|
||||
process_instance_id
|
||||
)
|
||||
step_detail = (
|
||||
db.session.query(SpiffStepDetailsModel)
|
||||
.filter(
|
||||
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if step_detail is not None and process_instance.bpmn_json is not None:
|
||||
bpmn_json = json.loads(process_instance.bpmn_json)
|
||||
bpmn_json["tasks"] = step_detail.task_json["tasks"]
|
||||
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
|
||||
process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||
|
||||
db.session.add(process_instance)
|
||||
try:
|
||||
db.session.commit()
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
raise ApiError(
|
||||
error_code="reset_process_instance_error",
|
||||
message=f"Could not update the Instance. Original error is {e}",
|
||||
) from e
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessInstanceModelSchema().dump(process_instance)),
|
||||
status=200,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def _get_process_instance(
|
||||
modified_process_model_identifier: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
process_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""_get_process_instance."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
try:
|
||||
current_version_control_revision = GitService.get_current_revision()
|
||||
except GitCommandError:
|
||||
current_version_control_revision = ""
|
||||
|
||||
process_model_with_diagram = None
|
||||
name_of_file_with_diagram = None
|
||||
if process_identifier:
|
||||
spec_reference = SpecReferenceCache.query.filter_by(
|
||||
identifier=process_identifier, type="process"
|
||||
).first()
|
||||
if spec_reference is None:
|
||||
raise SpecReferenceNotFoundError(
|
||||
"Could not find given process identifier in the cache:"
|
||||
f" {process_identifier}"
|
||||
)
|
||||
|
||||
process_model_with_diagram = ProcessModelService.get_process_model(
|
||||
spec_reference.process_model_id
|
||||
)
|
||||
name_of_file_with_diagram = spec_reference.file_name
|
||||
else:
|
||||
process_model_with_diagram = _get_process_model(process_model_identifier)
|
||||
if process_model_with_diagram.primary_file_name:
|
||||
name_of_file_with_diagram = process_model_with_diagram.primary_file_name
|
||||
|
||||
if process_model_with_diagram and name_of_file_with_diagram:
|
||||
if (
|
||||
process_instance.bpmn_version_control_identifier
|
||||
== current_version_control_revision
|
||||
):
|
||||
bpmn_xml_file_contents = SpecFileService.get_data(
|
||||
process_model_with_diagram, name_of_file_with_diagram
|
||||
).decode("utf-8")
|
||||
else:
|
||||
bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
|
||||
process_model_with_diagram,
|
||||
process_instance.bpmn_version_control_identifier,
|
||||
file_name=name_of_file_with_diagram,
|
||||
)
|
||||
process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
|
||||
|
||||
return make_response(jsonify(process_instance), 200)
|
||||
|
||||
|
||||
def _find_process_instance_for_me_or_raise(
|
||||
process_instance_id: int,
|
||||
) -> ProcessInstanceModel:
|
||||
"""_find_process_instance_for_me_or_raise."""
|
||||
process_instance: ProcessInstanceModel = (
|
||||
ProcessInstanceModel.query.filter_by(id=process_instance_id)
|
||||
.outerjoin(HumanTaskModel)
|
||||
.outerjoin(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
HumanTaskUserModel.user_id == g.user.id,
|
||||
),
|
||||
)
|
||||
.filter(
|
||||
or_(
|
||||
HumanTaskUserModel.id.is_not(None),
|
||||
ProcessInstanceModel.process_initiator_id == g.user.id,
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if process_instance is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_instance_cannot_be_found",
|
||||
message=(
|
||||
f"Process instance with id {process_instance_id} cannot be found"
|
||||
" that is associated with you."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
return process_instance
|
|
@ -0,0 +1,481 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Union
|
||||
|
||||
import connexion # type: ignore
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
from spiffworkflow_backend.models.file import FileSchema
|
||||
from spiffworkflow_backend.models.process_group import ProcessGroup
|
||||
from spiffworkflow_backend.models.process_instance_report import (
|
||||
ProcessInstanceReportModel,
|
||||
)
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_un_modify_modified_process_model_id,
|
||||
)
|
||||
from spiffworkflow_backend.services.git_service import GitService
|
||||
from spiffworkflow_backend.services.git_service import MissingGitConfigsError
|
||||
from spiffworkflow_backend.services.process_instance_report_service import (
|
||||
ProcessInstanceReportService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
def process_model_create(
|
||||
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_create."""
|
||||
body_include_list = [
|
||||
"id",
|
||||
"display_name",
|
||||
"primary_file_name",
|
||||
"primary_process_id",
|
||||
"description",
|
||||
"metadata_extraction_paths",
|
||||
]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
|
||||
_get_process_group_from_modified_identifier(modified_process_group_id)
|
||||
|
||||
process_model_info = ProcessModelInfo(**body_filtered) # type: ignore
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
message=f"Process Model could not be created from given body: {body}",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
ProcessModelService.add_process_model(process_model_info)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} created process model {process_model_info.id}"
|
||||
)
|
||||
return Response(
|
||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def process_model_delete(
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_delete."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
ProcessModelService().process_model_delete(process_model_identifier)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process model {process_model_identifier}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_update(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> Any:
|
||||
"""Process_model_update."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
body_include_list = [
|
||||
"display_name",
|
||||
"primary_file_name",
|
||||
"primary_process_id",
|
||||
"description",
|
||||
"metadata_extraction_paths",
|
||||
]
|
||||
body_filtered = {
|
||||
include_item: body[include_item]
|
||||
for include_item in body_include_list
|
||||
if include_item in body
|
||||
}
|
||||
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
ProcessModelService.update_process_model(process_model, body_filtered)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} updated process model {process_model_identifier}"
|
||||
)
|
||||
return ProcessModelInfoSchema().dump(process_model)
|
||||
|
||||
|
||||
def process_model_show(modified_process_model_identifier: str) -> Any:
|
||||
"""Process_model_show."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
files = sorted(
|
||||
SpecFileService.get_files(process_model),
|
||||
key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
|
||||
)
|
||||
process_model.files = files
|
||||
for file in process_model.files:
|
||||
file.references = SpecFileService.get_references_for_file(file, process_model)
|
||||
|
||||
process_model.parent_groups = ProcessModelService.get_parent_group_array(
|
||||
process_model.id
|
||||
)
|
||||
return make_response(jsonify(process_model), 200)
|
||||
|
||||
|
||||
def process_model_move(
|
||||
modified_process_model_identifier: str, new_location: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_move."""
|
||||
original_process_model_id = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
new_process_model = ProcessModelService().process_model_move(
|
||||
original_process_model_id, new_location
|
||||
)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} moved process model {original_process_model_id} to"
|
||||
f" {new_process_model.id}"
|
||||
)
|
||||
return make_response(jsonify(new_process_model), 200)
|
||||
|
||||
|
||||
def process_model_publish(
|
||||
modified_process_model_identifier: str, branch_to_update: Optional[str] = None
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_publish."""
|
||||
if branch_to_update is None:
|
||||
branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
|
||||
if branch_to_update is None:
|
||||
raise MissingGitConfigsError(
|
||||
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
|
||||
"This is required for publishing process models"
|
||||
)
|
||||
process_model_identifier = _un_modify_modified_process_model_id(
|
||||
modified_process_model_identifier
|
||||
)
|
||||
pr_url = GitService().publish(process_model_identifier, branch_to_update)
|
||||
data = {"ok": True, "pr_url": pr_url}
|
||||
return Response(json.dumps(data), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_list(
|
||||
process_group_identifier: Optional[str] = None,
|
||||
recursive: Optional[bool] = False,
|
||||
filter_runnable_by_user: Optional[bool] = False,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process model list!"""
|
||||
process_models = ProcessModelService.get_process_models(
|
||||
process_group_id=process_group_identifier,
|
||||
recursive=recursive,
|
||||
filter_runnable_by_user=filter_runnable_by_user,
|
||||
)
|
||||
batch = ProcessModelService().get_batch(
|
||||
process_models, page=page, per_page=per_page
|
||||
)
|
||||
pages = len(process_models) // per_page
|
||||
remainder = len(process_models) % per_page
|
||||
if remainder > 0:
|
||||
pages += 1
|
||||
response_json = {
|
||||
"results": ProcessModelInfoSchema(many=True).dump(batch),
|
||||
"pagination": {
|
||||
"count": len(batch),
|
||||
"total": len(process_models),
|
||||
"pages": pages,
|
||||
},
|
||||
}
|
||||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_file_update(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_update."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
|
||||
request_file = _get_file_from_request()
|
||||
request_file_contents = request_file.stream.read()
|
||||
if not request_file_contents:
|
||||
raise ApiError(
|
||||
error_code="file_contents_empty",
|
||||
message="Given request file does not have any content",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
SpecFileService.update_file(process_model, file_name, request_file_contents)
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} clicked save for"
|
||||
f" {process_model_identifier}/{file_name}"
|
||||
)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_file_delete(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_delete."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
try:
|
||||
SpecFileService.delete_file(process_model, file_name)
|
||||
except FileNotFoundError as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="process_model_file_cannot_be_found",
|
||||
message=f"Process model file cannot be found: {file_name}",
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} deleted process model file"
|
||||
f" {process_model_identifier}/{file_name}"
|
||||
)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def process_model_file_create(
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_file_create."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
request_file = _get_file_from_request()
|
||||
if not request_file.filename:
|
||||
raise ApiError(
|
||||
error_code="could_not_get_filename",
|
||||
message="Could not get filename from request",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
file = SpecFileService.add_file(
|
||||
process_model, request_file.filename, request_file.stream.read()
|
||||
)
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
file.file_contents = file_contents
|
||||
file.process_model_id = process_model.id
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} added process model file"
|
||||
f" {process_model_identifier}/{file.name}"
|
||||
)
|
||||
return Response(
|
||||
json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json"
|
||||
)
|
||||
|
||||
|
||||
def process_model_file_show(
|
||||
modified_process_model_identifier: str, file_name: str
|
||||
) -> Any:
|
||||
"""Process_model_file_show."""
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
files = SpecFileService.get_files(process_model, file_name)
|
||||
if len(files) == 0:
|
||||
raise ApiError(
|
||||
error_code="unknown file",
|
||||
message=(
|
||||
f"No information exists for file {file_name}"
|
||||
f" it does not exist in workflow {process_model_identifier}."
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
file = files[0]
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
file.file_contents = file_contents
|
||||
file.process_model_id = process_model.id
|
||||
return FileSchema().dump(file)
|
||||
|
||||
|
||||
# {
|
||||
# "natural_language_text": "Create a bug tracker process model \
|
||||
# with a bug-details form that collects summary, description, and priority"
|
||||
# }
|
||||
def process_model_create_with_natural_language(
|
||||
modified_process_group_id: str, body: Dict[str, str]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_model_create_with_natural_language."""
|
||||
pattern = re.compile(
|
||||
r"Create a (?P<pm_name>.*?) process model with a (?P<form_name>.*?) form that"
|
||||
r" collects (?P<columns>.*)"
|
||||
)
|
||||
match = pattern.match(body["natural_language_text"])
|
||||
if match is None:
|
||||
raise ApiError(
|
||||
error_code="natural_language_text_not_yet_supported",
|
||||
message=(
|
||||
"Natural language text is not yet supported. Please use the form:"
|
||||
f" {pattern.pattern}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
process_model_display_name = match.group("pm_name")
|
||||
process_model_identifier = re.sub(r"[ _]", "-", process_model_display_name)
|
||||
process_model_identifier = re.sub(r"-{2,}", "-", process_model_identifier).lower()
|
||||
|
||||
form_name = match.group("form_name")
|
||||
form_identifier = re.sub(r"[ _]", "-", form_name)
|
||||
form_identifier = re.sub(r"-{2,}", "-", form_identifier).lower()
|
||||
|
||||
column_names = match.group("columns")
|
||||
columns = re.sub(r"(, (and )?)", ",", column_names).split(",")
|
||||
|
||||
process_group = _get_process_group_from_modified_identifier(
|
||||
modified_process_group_id
|
||||
)
|
||||
qualified_process_model_identifier = (
|
||||
f"{process_group.id}/{process_model_identifier}"
|
||||
)
|
||||
|
||||
metadata_extraction_paths = []
|
||||
for column in columns:
|
||||
metadata_extraction_paths.append({"key": column, "path": column})
|
||||
|
||||
process_model_attributes = {
|
||||
"id": qualified_process_model_identifier,
|
||||
"display_name": process_model_display_name,
|
||||
"description": None,
|
||||
"metadata_extraction_paths": metadata_extraction_paths,
|
||||
}
|
||||
|
||||
process_model_info = ProcessModelInfo(**process_model_attributes) # type: ignore
|
||||
if process_model_info is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
message=f"Process Model could not be created from given body: {body}",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
bpmn_template_file = os.path.join(
|
||||
current_app.root_path, "templates", "basic_with_user_task_template.bpmn"
|
||||
)
|
||||
if not os.path.exists(bpmn_template_file):
|
||||
raise ApiError(
|
||||
error_code="bpmn_template_file_does_not_exist",
|
||||
message="Could not find the bpmn template file to create process model.",
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
ProcessModelService.add_process_model(process_model_info)
|
||||
bpmn_process_identifier = f"{process_model_identifier}_process"
|
||||
bpmn_template_contents = ""
|
||||
with open(bpmn_template_file, encoding="utf-8") as f:
|
||||
bpmn_template_contents = f.read()
|
||||
|
||||
bpmn_template_contents = bpmn_template_contents.replace(
|
||||
"natural_language_process_id_template", bpmn_process_identifier
|
||||
)
|
||||
bpmn_template_contents = bpmn_template_contents.replace(
|
||||
"form-identifier-id-template", form_identifier
|
||||
)
|
||||
|
||||
form_uischema_json: dict = {"ui:order": columns}
|
||||
|
||||
form_properties: dict = {}
|
||||
for column in columns:
|
||||
form_properties[column] = {
|
||||
"type": "string",
|
||||
"title": column,
|
||||
}
|
||||
form_schema_json = {
|
||||
"title": form_identifier,
|
||||
"description": "",
|
||||
"properties": form_properties,
|
||||
"required": [],
|
||||
}
|
||||
|
||||
SpecFileService.add_file(
|
||||
process_model_info,
|
||||
f"{process_model_identifier}.bpmn",
|
||||
str.encode(bpmn_template_contents),
|
||||
)
|
||||
SpecFileService.add_file(
|
||||
process_model_info,
|
||||
f"{form_identifier}-schema.json",
|
||||
str.encode(json.dumps(form_schema_json)),
|
||||
)
|
||||
SpecFileService.add_file(
|
||||
process_model_info,
|
||||
f"{form_identifier}-uischema.json",
|
||||
str.encode(json.dumps(form_uischema_json)),
|
||||
)
|
||||
|
||||
_commit_and_push_to_git(
|
||||
f"User: {g.user.username} created process model via natural language:"
|
||||
f" {process_model_info.id}"
|
||||
)
|
||||
|
||||
default_report_metadata = ProcessInstanceReportService.system_metadata_map(
|
||||
"default"
|
||||
)
|
||||
for column in columns:
|
||||
default_report_metadata["columns"].append(
|
||||
{"Header": column, "accessor": column, "filterable": True}
|
||||
)
|
||||
ProcessInstanceReportModel.create_report(
|
||||
identifier=process_model_identifier,
|
||||
user=g.user,
|
||||
report_metadata=default_report_metadata,
|
||||
)
|
||||
|
||||
return Response(
|
||||
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def _get_file_from_request() -> Any:
|
||||
"""Get_file_from_request."""
|
||||
request_file = connexion.request.files.get("file")
|
||||
if not request_file:
|
||||
raise ApiError(
|
||||
error_code="no_file_given",
|
||||
message="Given request does not contain a file",
|
||||
status_code=400,
|
||||
)
|
||||
return request_file
|
||||
|
||||
|
||||
def _get_process_group_from_modified_identifier(
|
||||
modified_process_group_id: str,
|
||||
) -> ProcessGroup:
|
||||
"""_get_process_group_from_modified_identifier."""
|
||||
if modified_process_group_id is None:
|
||||
raise ApiError(
|
||||
error_code="process_group_id_not_specified",
|
||||
message=(
|
||||
"Process Model could not be created when process_group_id path param is"
|
||||
" unspecified"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
unmodified_process_group_id = _un_modify_modified_process_model_id(
|
||||
modified_process_group_id
|
||||
)
|
||||
process_group = ProcessModelService.get_process_group(unmodified_process_group_id)
|
||||
if process_group is None:
|
||||
raise ApiError(
|
||||
error_code="process_model_could_not_be_created",
|
||||
message=(
|
||||
"Process Model could not be created from given body because Process"
|
||||
f" Group could not be found: {unmodified_process_group_id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
return process_group
|
|
@ -0,0 +1,134 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
from typing import Dict
|
||||
from typing import Union
|
||||
|
||||
import flask.wrappers
|
||||
from flask import current_app
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from lxml import etree # type: ignore
|
||||
from lxml.builder import ElementMaker # type: ignore
|
||||
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_get_required_parameter_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
def script_unit_test_create(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Script_unit_test_create."""
|
||||
bpmn_task_identifier = _get_required_parameter_or_raise(
|
||||
"bpmn_task_identifier", body
|
||||
)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
expected_output_json = _get_required_parameter_or_raise(
|
||||
"expected_output_json", body
|
||||
)
|
||||
|
||||
process_model_identifier = modified_process_model_identifier.replace(":", "/")
|
||||
process_model = _get_process_model(process_model_identifier)
|
||||
file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0]
|
||||
if file is None:
|
||||
raise ApiError(
|
||||
error_code="cannot_find_file",
|
||||
message=(
|
||||
"Could not find the primary bpmn file for process_model:"
|
||||
f" {process_model.id}"
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
# TODO: move this to an xml service or something
|
||||
file_contents = SpecFileService.get_data(process_model, file.name)
|
||||
bpmn_etree_element = etree.fromstring(file_contents)
|
||||
|
||||
nsmap = bpmn_etree_element.nsmap
|
||||
spiff_element_maker = ElementMaker(
|
||||
namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap
|
||||
)
|
||||
|
||||
script_task_elements = bpmn_etree_element.xpath(
|
||||
f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']",
|
||||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if len(script_task_elements) == 0:
|
||||
raise ApiError(
|
||||
error_code="missing_script_task",
|
||||
message=f"Cannot find a script task with id: {bpmn_task_identifier}",
|
||||
status_code=404,
|
||||
)
|
||||
script_task_element = script_task_elements[0]
|
||||
|
||||
extension_elements = None
|
||||
extension_elements_array = script_task_element.xpath(
|
||||
".//bpmn:extensionElements",
|
||||
namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"},
|
||||
)
|
||||
if len(extension_elements_array) == 0:
|
||||
bpmn_element_maker = ElementMaker(
|
||||
namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap
|
||||
)
|
||||
extension_elements = bpmn_element_maker("extensionElements")
|
||||
script_task_element.append(extension_elements)
|
||||
else:
|
||||
extension_elements = extension_elements_array[0]
|
||||
|
||||
unit_test_elements = None
|
||||
unit_test_elements_array = extension_elements.xpath(
|
||||
"//spiffworkflow:unitTests",
|
||||
namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"},
|
||||
)
|
||||
if len(unit_test_elements_array) == 0:
|
||||
unit_test_elements = spiff_element_maker("unitTests")
|
||||
extension_elements.append(unit_test_elements)
|
||||
else:
|
||||
unit_test_elements = unit_test_elements_array[0]
|
||||
|
||||
fuzz = "".join(
|
||||
random.choice(string.ascii_uppercase + string.digits) # noqa: S311
|
||||
for _ in range(7)
|
||||
)
|
||||
unit_test_id = f"unit_test_{fuzz}"
|
||||
|
||||
input_json_element = spiff_element_maker("inputJson", json.dumps(input_json))
|
||||
expected_output_json_element = spiff_element_maker(
|
||||
"expectedOutputJson", json.dumps(expected_output_json)
|
||||
)
|
||||
unit_test_element = spiff_element_maker("unitTest", id=unit_test_id)
|
||||
unit_test_element.append(input_json_element)
|
||||
unit_test_element.append(expected_output_json_element)
|
||||
unit_test_elements.append(unit_test_element)
|
||||
SpecFileService.update_file(
|
||||
process_model, file.name, etree.tostring(bpmn_etree_element)
|
||||
)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
||||
def script_unit_test_run(
|
||||
modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]]
|
||||
) -> flask.wrappers.Response:
|
||||
"""Script_unit_test_run."""
|
||||
# FIXME: We should probably clear this somewhere else but this works
|
||||
current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None
|
||||
current_app.config["THREAD_LOCAL_DATA"].spiff_step = None
|
||||
|
||||
python_script = _get_required_parameter_or_raise("python_script", body)
|
||||
input_json = _get_required_parameter_or_raise("input_json", body)
|
||||
expected_output_json = _get_required_parameter_or_raise(
|
||||
"expected_output_json", body
|
||||
)
|
||||
|
||||
result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts(
|
||||
python_script, input_json, expected_output_json
|
||||
)
|
||||
return make_response(jsonify(result), 200)
|
|
@ -0,0 +1,67 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.secret_model import SecretModel
|
||||
from spiffworkflow_backend.models.secret_model import SecretModelSchema
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
||||
|
||||
def secret_show(key: str) -> Optional[str]:
|
||||
"""Secret_show."""
|
||||
return SecretService.get_secret(key)
|
||||
|
||||
|
||||
def secret_list(
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
) -> Response:
|
||||
"""Secret_list."""
|
||||
secrets = (
|
||||
SecretModel.query.order_by(SecretModel.key)
|
||||
.join(UserModel)
|
||||
.add_columns(
|
||||
UserModel.username,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
response_json = {
|
||||
"results": secrets.items,
|
||||
"pagination": {
|
||||
"count": len(secrets.items),
|
||||
"total": secrets.total,
|
||||
"pages": secrets.pages,
|
||||
},
|
||||
}
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def secret_create(body: Dict) -> Response:
|
||||
"""Add secret."""
|
||||
secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
|
||||
return Response(
|
||||
json.dumps(SecretModelSchema().dump(secret_model)),
|
||||
status=201,
|
||||
mimetype="application/json",
|
||||
)
|
||||
|
||||
|
||||
def secret_update(key: str, body: dict) -> Response:
|
||||
"""Update secret."""
|
||||
SecretService().update_secret(key, body["value"], g.user.id)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def secret_delete(key: str) -> Response:
|
||||
"""Delete secret."""
|
||||
current_user = UserService.current_user()
|
||||
SecretService.delete_secret(key, current_user.id)
|
||||
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
|
|
@ -0,0 +1,49 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
|
||||
import flask.wrappers
|
||||
import werkzeug
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from flask import redirect
|
||||
from flask import request
|
||||
from flask.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.routes.user import verify_token
|
||||
from spiffworkflow_backend.services.secret_service import SecretService
|
||||
from spiffworkflow_backend.services.service_task_service import ServiceTaskService
|
||||
|
||||
|
||||
def service_task_list() -> flask.wrappers.Response:
|
||||
"""Service_task_list."""
|
||||
available_connectors = ServiceTaskService.available_connectors()
|
||||
return Response(
|
||||
json.dumps(available_connectors), status=200, mimetype="application/json"
|
||||
)
|
||||
|
||||
|
||||
def authentication_list() -> flask.wrappers.Response:
|
||||
"""Authentication_list."""
|
||||
available_authentications = ServiceTaskService.authentication_list()
|
||||
response_json = {
|
||||
"results": available_authentications,
|
||||
"connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"],
|
||||
"redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback",
|
||||
}
|
||||
|
||||
return Response(json.dumps(response_json), status=200, mimetype="application/json")
|
||||
|
||||
|
||||
def authentication_callback(
|
||||
service: str,
|
||||
auth_method: str,
|
||||
) -> werkzeug.wrappers.Response:
|
||||
"""Authentication_callback."""
|
||||
verify_token(request.args.get("token"), force_run=True)
|
||||
response = request.args["response"]
|
||||
SecretService().update_secret(
|
||||
f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True
|
||||
)
|
||||
return redirect(
|
||||
f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration"
|
||||
)
|
|
@ -0,0 +1,563 @@
|
|||
"""APIs for dealing with process groups, process models, and process instances."""
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import TypedDict
|
||||
from typing import Union
|
||||
|
||||
import flask.wrappers
|
||||
import jinja2
|
||||
from flask import g
|
||||
from flask import jsonify
|
||||
from flask import make_response
|
||||
from flask.wrappers import Response
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
from flask_bpmn.models.db import db
|
||||
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from sqlalchemy import and_
|
||||
from sqlalchemy import asc
|
||||
from sqlalchemy import desc
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from spiffworkflow_backend.models.group import GroupModel
|
||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_principal_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_process_instance_by_id_or_raise,
|
||||
)
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.file_system_service import FileSystemService
|
||||
from spiffworkflow_backend.services.process_instance_processor import (
|
||||
ProcessInstanceProcessor,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_instance_service import (
|
||||
ProcessInstanceService,
|
||||
)
|
||||
from spiffworkflow_backend.services.process_model_service import ProcessModelService
|
||||
from spiffworkflow_backend.services.spec_file_service import SpecFileService
|
||||
|
||||
|
||||
class TaskDataSelectOption(TypedDict):
|
||||
"""TaskDataSelectOption."""
|
||||
|
||||
value: str
|
||||
label: str
|
||||
|
||||
|
||||
class ReactJsonSchemaSelectOption(TypedDict):
|
||||
"""ReactJsonSchemaSelectOption."""
|
||||
|
||||
type: str
|
||||
title: str
|
||||
enum: list[str]
|
||||
|
||||
|
||||
# TODO: see comment for before_request
|
||||
# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"])
|
||||
def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Task_list_my_tasks."""
|
||||
principal = _find_principal_or_raise()
|
||||
human_tasks = (
|
||||
HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore
|
||||
.join(ProcessInstanceModel)
|
||||
.join(HumanTaskUserModel)
|
||||
.filter_by(user_id=principal.user_id)
|
||||
.filter(HumanTaskModel.completed == False) # noqa: E712
|
||||
# just need this add_columns to add the process_model_identifier. Then add everything back that was removed.
|
||||
.add_columns(
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.process_model_display_name,
|
||||
ProcessInstanceModel.status,
|
||||
HumanTaskModel.task_name,
|
||||
HumanTaskModel.task_title,
|
||||
HumanTaskModel.task_type,
|
||||
HumanTaskModel.task_status,
|
||||
HumanTaskModel.task_id,
|
||||
HumanTaskModel.id,
|
||||
HumanTaskModel.process_model_display_name,
|
||||
HumanTaskModel.process_instance_id,
|
||||
)
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items]
|
||||
|
||||
response_json = {
|
||||
"results": tasks,
|
||||
"pagination": {
|
||||
"count": len(human_tasks.items),
|
||||
"total": human_tasks.total,
|
||||
"pages": human_tasks.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def task_list_for_my_open_processes(
|
||||
page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_my_open_processes."""
|
||||
return _get_tasks(page=page, per_page=per_page)
|
||||
|
||||
|
||||
def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response:
|
||||
"""Task_list_for_me."""
|
||||
return _get_tasks(
|
||||
processes_started_by_user=False,
|
||||
has_lane_assignment_id=False,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
|
||||
|
||||
def task_list_for_my_groups(
|
||||
user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_list_for_my_groups."""
|
||||
return _get_tasks(
|
||||
user_group_identifier=user_group_identifier,
|
||||
processes_started_by_user=False,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
|
||||
|
||||
def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response:
|
||||
"""Task_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
|
||||
if process_instance.status == ProcessInstanceStatus.suspended.value:
|
||||
raise ApiError(
|
||||
error_code="error_suspended",
|
||||
message="The process instance is suspended",
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
process_model = _get_process_model(
|
||||
process_instance.process_model_identifier,
|
||||
)
|
||||
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
form_schema_file_name = ""
|
||||
form_ui_schema_file_name = ""
|
||||
spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance)
|
||||
extensions = spiff_task.task_spec.extensions
|
||||
|
||||
if "properties" in extensions:
|
||||
properties = extensions["properties"]
|
||||
if "formJsonSchemaFilename" in properties:
|
||||
form_schema_file_name = properties["formJsonSchemaFilename"]
|
||||
if "formUiSchemaFilename" in properties:
|
||||
form_ui_schema_file_name = properties["formUiSchemaFilename"]
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||
task.data = spiff_task.data
|
||||
task.process_model_display_name = process_model.display_name
|
||||
task.process_model_identifier = process_model.id
|
||||
|
||||
process_model_with_form = process_model
|
||||
refs = SpecFileService.get_references_for_process(process_model_with_form)
|
||||
all_processes = [i.identifier for i in refs]
|
||||
if task.process_identifier not in all_processes:
|
||||
bpmn_file_full_path = (
|
||||
ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier(
|
||||
task.process_identifier
|
||||
)
|
||||
)
|
||||
relative_path = os.path.relpath(
|
||||
bpmn_file_full_path, start=FileSystemService.root_path()
|
||||
)
|
||||
process_model_relative_path = os.path.dirname(relative_path)
|
||||
process_model_with_form = (
|
||||
ProcessModelService.get_process_model_from_relative_path(
|
||||
process_model_relative_path
|
||||
)
|
||||
)
|
||||
|
||||
if task.type == "User Task":
|
||||
if not form_schema_file_name:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="missing_form_file",
|
||||
message=(
|
||||
"Cannot find a form file for process_instance_id:"
|
||||
f" {process_instance_id}, task_id: {task_id}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
form_contents = _prepare_form_data(
|
||||
form_schema_file_name,
|
||||
task.data,
|
||||
process_model_with_form,
|
||||
)
|
||||
|
||||
try:
|
||||
# form_contents is a str
|
||||
form_dict = json.loads(form_contents)
|
||||
except Exception as exception:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="error_loading_form",
|
||||
message=(
|
||||
f"Could not load form schema from: {form_schema_file_name}."
|
||||
f" Error was: {str(exception)}"
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
) from exception
|
||||
|
||||
if task.data:
|
||||
_update_form_schema_with_task_data_as_needed(form_dict, task.data)
|
||||
|
||||
if form_contents:
|
||||
task.form_schema = form_dict
|
||||
|
||||
if form_ui_schema_file_name:
|
||||
ui_form_contents = _prepare_form_data(
|
||||
form_ui_schema_file_name,
|
||||
task.data,
|
||||
process_model_with_form,
|
||||
)
|
||||
if ui_form_contents:
|
||||
task.form_ui_schema = ui_form_contents
|
||||
|
||||
if task.properties and task.data and "instructionsForEndUser" in task.properties:
|
||||
if task.properties["instructionsForEndUser"]:
|
||||
task.properties["instructionsForEndUser"] = _render_jinja_template(
|
||||
task.properties["instructionsForEndUser"], task.data
|
||||
)
|
||||
return make_response(jsonify(task), 200)
|
||||
|
||||
|
||||
def process_data_show(
|
||||
process_instance_id: int,
|
||||
process_data_identifier: str,
|
||||
modified_process_model_identifier: str,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Process_data_show."""
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
all_process_data = processor.get_data()
|
||||
process_data_value = None
|
||||
if process_data_identifier in all_process_data:
|
||||
process_data_value = all_process_data[process_data_identifier]
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"process_data_identifier": process_data_identifier,
|
||||
"process_data_value": process_data_value,
|
||||
}
|
||||
),
|
||||
200,
|
||||
)
|
||||
|
||||
|
||||
def task_submit(
|
||||
process_instance_id: int,
|
||||
task_id: str,
|
||||
body: Dict[str, Any],
|
||||
terminate_loop: bool = False,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Task_submit_user_data."""
|
||||
principal = _find_principal_or_raise()
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
if not process_instance.can_submit_task():
|
||||
raise ApiError(
|
||||
error_code="process_instance_not_runnable",
|
||||
message=(
|
||||
f"Process Instance ({process_instance.id}) has status "
|
||||
f"{process_instance.status} which does not allow tasks to be submitted."
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
spiff_task = _get_spiff_task_from_process_instance(
|
||||
task_id, process_instance, processor=processor
|
||||
)
|
||||
AuthorizationService.assert_user_can_complete_spiff_task(
|
||||
process_instance.id, spiff_task, principal.user
|
||||
)
|
||||
|
||||
if spiff_task.state != TaskState.READY:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="invalid_state",
|
||||
message="You may not update a task unless it is in the READY state.",
|
||||
status_code=400,
|
||||
)
|
||||
)
|
||||
|
||||
if terminate_loop and spiff_task.is_looping():
|
||||
spiff_task.terminate_loop()
|
||||
|
||||
human_task = HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, task_id=task_id, completed=False
|
||||
).first()
|
||||
if human_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="no_human_task",
|
||||
message=(
|
||||
f"Cannot find a task to complete for task id '{task_id}' and"
|
||||
f" process instance {process_instance_id}."
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
ProcessInstanceService.complete_form_task(
|
||||
processor=processor,
|
||||
spiff_task=spiff_task,
|
||||
data=body,
|
||||
user=g.user,
|
||||
human_task=human_task,
|
||||
)
|
||||
|
||||
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
|
||||
# task spec, complete that form as well.
|
||||
# if update_all:
|
||||
# last_index = spiff_task.task_info()["mi_index"]
|
||||
# next_task = processor.next_task()
|
||||
# while next_task and next_task.task_info()["mi_index"] > last_index:
|
||||
# __update_task(processor, next_task, form_data, user)
|
||||
# last_index = next_task.task_info()["mi_index"]
|
||||
# next_task = processor.next_task()
|
||||
|
||||
next_human_task_assigned_to_me = (
|
||||
HumanTaskModel.query.filter_by(
|
||||
process_instance_id=process_instance_id, completed=False
|
||||
)
|
||||
.order_by(asc(HumanTaskModel.id)) # type: ignore
|
||||
.join(HumanTaskUserModel)
|
||||
.filter_by(user_id=principal.user_id)
|
||||
.first()
|
||||
)
|
||||
if next_human_task_assigned_to_me:
|
||||
return make_response(
|
||||
jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200
|
||||
)
|
||||
|
||||
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
|
||||
|
||||
|
||||
def _get_tasks(
|
||||
processes_started_by_user: bool = True,
|
||||
has_lane_assignment_id: bool = True,
|
||||
page: int = 1,
|
||||
per_page: int = 100,
|
||||
user_group_identifier: Optional[str] = None,
|
||||
) -> flask.wrappers.Response:
|
||||
"""Get_tasks."""
|
||||
user_id = g.user.id
|
||||
|
||||
# use distinct to ensure we only get one row per human task otherwise
|
||||
# we can get back multiple for the same human task row which throws off
|
||||
# pagination later on
|
||||
# https://stackoverflow.com/q/34582014/6090676
|
||||
human_tasks_query = (
|
||||
db.session.query(HumanTaskModel)
|
||||
.group_by(HumanTaskModel.id) # type: ignore
|
||||
.outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id)
|
||||
.join(ProcessInstanceModel)
|
||||
.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id)
|
||||
.filter(HumanTaskModel.completed == False) # noqa: E712
|
||||
)
|
||||
|
||||
assigned_user = aliased(UserModel)
|
||||
if processes_started_by_user:
|
||||
human_tasks_query = (
|
||||
human_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id == user_id
|
||||
)
|
||||
.outerjoin(
|
||||
HumanTaskUserModel,
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
)
|
||||
.outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id)
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
ProcessInstanceModel.process_initiator_id != user_id
|
||||
).join(
|
||||
HumanTaskUserModel,
|
||||
and_(
|
||||
HumanTaskUserModel.user_id == user_id,
|
||||
HumanTaskModel.id == HumanTaskUserModel.human_task_id,
|
||||
),
|
||||
)
|
||||
if has_lane_assignment_id:
|
||||
if user_group_identifier:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
GroupModel.identifier == user_group_identifier
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(
|
||||
HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore
|
||||
)
|
||||
else:
|
||||
human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore
|
||||
|
||||
human_tasks = (
|
||||
human_tasks_query.add_columns(
|
||||
ProcessInstanceModel.process_model_identifier,
|
||||
ProcessInstanceModel.status.label("process_instance_status"), # type: ignore
|
||||
ProcessInstanceModel.updated_at_in_seconds,
|
||||
ProcessInstanceModel.created_at_in_seconds,
|
||||
UserModel.username.label("process_initiator_username"),
|
||||
GroupModel.identifier.label("assigned_user_group_identifier"),
|
||||
HumanTaskModel.task_name,
|
||||
HumanTaskModel.task_title,
|
||||
HumanTaskModel.process_model_display_name,
|
||||
HumanTaskModel.process_instance_id,
|
||||
func.group_concat(assigned_user.username.distinct()).label(
|
||||
"potential_owner_usernames"
|
||||
),
|
||||
)
|
||||
.order_by(desc(HumanTaskModel.id)) # type: ignore
|
||||
.paginate(page=page, per_page=per_page, error_out=False)
|
||||
)
|
||||
|
||||
response_json = {
|
||||
"results": human_tasks.items,
|
||||
"pagination": {
|
||||
"count": len(human_tasks.items),
|
||||
"total": human_tasks.total,
|
||||
"pages": human_tasks.pages,
|
||||
},
|
||||
}
|
||||
|
||||
return make_response(jsonify(response_json), 200)
|
||||
|
||||
|
||||
def _prepare_form_data(
|
||||
form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo
|
||||
) -> str:
|
||||
"""Prepare_form_data."""
|
||||
if task_data is None:
|
||||
return ""
|
||||
|
||||
file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8")
|
||||
return _render_jinja_template(file_contents, task_data)
|
||||
|
||||
|
||||
def _render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str:
|
||||
"""Render_jinja_template."""
|
||||
jinja_environment = jinja2.Environment(
|
||||
autoescape=True, lstrip_blocks=True, trim_blocks=True
|
||||
)
|
||||
template = jinja_environment.from_string(unprocessed_template)
|
||||
return template.render(**data)
|
||||
|
||||
|
||||
def _get_spiff_task_from_process_instance(
|
||||
task_id: str,
|
||||
process_instance: ProcessInstanceModel,
|
||||
processor: Union[ProcessInstanceProcessor, None] = None,
|
||||
) -> SpiffTask:
|
||||
"""Get_spiff_task_from_process_instance."""
|
||||
if processor is None:
|
||||
processor = ProcessInstanceProcessor(process_instance)
|
||||
task_uuid = uuid.UUID(task_id)
|
||||
spiff_task = processor.bpmn_process_instance.get_task(task_uuid)
|
||||
|
||||
if spiff_task is None:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="empty_task",
|
||||
message="Processor failed to obtain task.",
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
return spiff_task
|
||||
|
||||
|
||||
# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches
|
||||
def _update_form_schema_with_task_data_as_needed(
|
||||
in_dict: dict, task_data: dict
|
||||
) -> None:
|
||||
"""Update_nested."""
|
||||
for k, value in in_dict.items():
|
||||
if "anyOf" == k:
|
||||
# value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"]
|
||||
if isinstance(value, list):
|
||||
if len(value) == 1:
|
||||
first_element_in_value_list = value[0]
|
||||
if isinstance(first_element_in_value_list, str):
|
||||
if first_element_in_value_list.startswith(
|
||||
"options_from_task_data_var:"
|
||||
):
|
||||
task_data_var = first_element_in_value_list.replace(
|
||||
"options_from_task_data_var:", ""
|
||||
)
|
||||
|
||||
if task_data_var not in task_data:
|
||||
raise (
|
||||
ApiError(
|
||||
error_code="missing_task_data_var",
|
||||
message=(
|
||||
"Task data is missing variable:"
|
||||
f" {task_data_var}"
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
)
|
||||
|
||||
select_options_from_task_data = task_data.get(task_data_var)
|
||||
if isinstance(select_options_from_task_data, list):
|
||||
if all(
|
||||
"value" in d and "label" in d
|
||||
for d in select_options_from_task_data
|
||||
):
|
||||
|
||||
def map_function(
|
||||
task_data_select_option: TaskDataSelectOption,
|
||||
) -> ReactJsonSchemaSelectOption:
|
||||
"""Map_function."""
|
||||
return {
|
||||
"type": "string",
|
||||
"enum": [task_data_select_option["value"]],
|
||||
"title": task_data_select_option["label"],
|
||||
}
|
||||
|
||||
options_for_react_json_schema_form = list(
|
||||
map(map_function, select_options_from_task_data)
|
||||
)
|
||||
|
||||
in_dict[k] = options_for_react_json_schema_form
|
||||
elif isinstance(value, dict):
|
||||
_update_form_schema_with_task_data_as_needed(value, task_data)
|
||||
elif isinstance(value, list):
|
||||
for o in value:
|
||||
if isinstance(o, dict):
|
||||
_update_form_schema_with_task_data_as_needed(o, task_data)
|
|
@ -16,8 +16,9 @@ from flask_bpmn.api.api_error import ApiError
|
|||
from werkzeug.wrappers import Response
|
||||
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.services.authentication_service import AuthenticationService
|
||||
from spiffworkflow_backend.services.authentication_service import (
|
||||
AuthenticationService,
|
||||
MissingAccessTokenError,
|
||||
)
|
||||
from spiffworkflow_backend.services.authorization_service import AuthorizationService
|
||||
from spiffworkflow_backend.services.user_service import UserService
|
||||
|
@ -66,16 +67,19 @@ def verify_token(
|
|||
user_model = get_user_from_decoded_internal_token(decoded_token)
|
||||
except Exception as e:
|
||||
current_app.logger.error(
|
||||
f"Exception in verify_token getting user from decoded internal token. {e}"
|
||||
"Exception in verify_token getting user from decoded"
|
||||
f" internal token. {e}"
|
||||
)
|
||||
elif "iss" in decoded_token.keys():
|
||||
try:
|
||||
if AuthenticationService.validate_id_token(token):
|
||||
user_info = decoded_token
|
||||
except ApiError as ae: # API Error is only thrown in the token is outdated.
|
||||
except (
|
||||
ApiError
|
||||
) as ae: # API Error is only thrown in the token is outdated.
|
||||
# Try to refresh the token
|
||||
user = UserService.get_user_by_service_and_service_id(
|
||||
"open_id", decoded_token["sub"]
|
||||
decoded_token["iss"], decoded_token["sub"]
|
||||
)
|
||||
if user:
|
||||
refresh_token = AuthenticationService.get_refresh_token(user.id)
|
||||
|
@ -104,10 +108,12 @@ def verify_token(
|
|||
) from e
|
||||
|
||||
if (
|
||||
user_info is not None and "error" not in user_info
|
||||
user_info is not None
|
||||
and "error" not in user_info
|
||||
and "iss" in user_info
|
||||
): # not sure what to test yet
|
||||
user_model = (
|
||||
UserModel.query.filter(UserModel.service == "open_id")
|
||||
UserModel.query.filter(UserModel.service == user_info["iss"])
|
||||
.filter(UserModel.service_id == user_info["sub"])
|
||||
.first()
|
||||
)
|
||||
|
@ -268,10 +274,10 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
|
|||
code, "/v1.0/login_api_return"
|
||||
)
|
||||
access_token: str = auth_token_object["access_token"]
|
||||
assert access_token # noqa: S101
|
||||
if access_token is None:
|
||||
raise MissingAccessTokenError("Cannot find the access token for the request")
|
||||
|
||||
return access_token
|
||||
# return redirect("localhost:7000/v1.0/ui")
|
||||
# return {'uid': 'user_1'}
|
||||
|
||||
|
||||
def logout(id_token: str, redirect_url: Optional[str]) -> Response:
|
||||
|
@ -292,7 +298,6 @@ def get_decoded_token(token: str) -> Optional[Dict]:
|
|||
try:
|
||||
decoded_token = jwt.decode(token, options={"verify_signature": False})
|
||||
except Exception as e:
|
||||
print(f"Exception in get_token_type: {e}")
|
||||
raise ApiError(
|
||||
error_code="invalid_token", message="Cannot decode token."
|
||||
) from e
|
||||
|
@ -340,9 +345,5 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
|
|||
)
|
||||
if user:
|
||||
return user
|
||||
user = UserModel(
|
||||
username=service_id,
|
||||
service=service,
|
||||
service_id=service_id,
|
||||
)
|
||||
user = UserService.create_user(service_id, service, service_id)
|
||||
return user
|
||||
|
|
|
@ -26,6 +26,7 @@ user_blueprint = Blueprint("main", __name__)
|
|||
# user = UserService.create_user('internal', username)
|
||||
# return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON)
|
||||
|
||||
|
||||
# def _create_user(username):
|
||||
# user = UserModel.query.filter_by(username=username).first()
|
||||
# if user is not None:
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue