Merge branch 'main' into feature/process-navigation

This commit is contained in:
Elizabeth Esswein 2022-12-15 11:28:46 -05:00
commit 33f34ca51d
129 changed files with 4882 additions and 1025 deletions

View File

@ -27,3 +27,8 @@ per-file-ignores =
# this file overwrites methods from the logging library so we can't change them
# and ignore long comment line
spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py:N802,B950
# TODO: fix the S issues:
# S607 Starting a process with a partial executable path
# S605 Starting a process with a shell: Seems safe, but may be changed in the future, consider rewriting without shell
spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py:S607,S101,D103,S605

117
.github/workflows/release_builds.yml vendored Normal file
View File

@ -0,0 +1,117 @@
name: Release Builds
on:
push:
tags: [ v* ]
jobs:
create_frontend_docker_container:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
IMAGE_NAME: sartography/spiffworkflow-frontend
permissions:
contents: read
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
context: spiffworkflow-frontend
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Frontend Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
# this action doesn't seem to respect working-directory so set context
context: spiffworkflow-frontend
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
create_backend_docker_container:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
IMAGE_NAME: sartography/spiffworkflow-backend
permissions:
contents: read
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
# this action doesn't seem to respect working-directory so set context
context: spiffworkflow-backend
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# Is this getting updated, I wonder?
create_demo-proxy:
runs-on: ubuntu-latest
env:
REGISTRY: ghcr.io
IMAGE_NAME: sartography/connector-proxy-demo
permissions:
contents: read
packages: write
steps:
- name: Check out the repository
uses: actions/checkout@v3.0.2
with:
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
fetch-depth: 0
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
context: connector-proxy-demo
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Build and push the connector proxy
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
# this action doesn't seem to respect working-directory so set context
context: connector-proxy-demo
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -36,8 +36,16 @@ uninstall:
.PHONY : tests
tests:
cd tests/$(NAME)
PYTHONPATH=../.. python -m unittest discover -v . "*Test.py"
python -m unittest discover -vs tests/SpiffWorkflow -p \*Test.py -t .
.PHONY : tests-par
tests-par:
@if ! command -v unittest-parallel >/dev/null 2>&1; then \
echo "unittest-parallel not found. Please install it with:"; \
echo " pip install unittest-parallel"; \
exit 1; \
fi
unittest-parallel --module-fixtures -vs tests/SpiffWorkflow -p \*Test.py -t .
.PHONY : tests-cov
tests-cov:

View File

@ -29,7 +29,7 @@ from .ValidationException import ValidationException
from ..specs.BpmnProcessSpec import BpmnProcessSpec
from ..specs.events.EndEvent import EndEvent
from ..specs.events.StartEvent import StartEvent
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent, EventBasedGateway
from ..specs.events.IntermediateEvent import SendTask, ReceiveTask
from ..specs.SubWorkflowTask import CallActivity, SubWorkflowTask, TransactionSubprocess
from ..specs.ExclusiveGateway import ExclusiveGateway
@ -47,7 +47,7 @@ from .task_parsers import (UserTaskParser, NoneTaskParser, ManualTaskParser,
ExclusiveGatewayParser, ParallelGatewayParser, InclusiveGatewayParser,
CallActivityParser, ScriptTaskParser, SubWorkflowParser,
ServiceTaskParser)
from .event_parsers import (StartEventParser, EndEventParser, BoundaryEventParser,
from .event_parsers import (EventBasedGatewayParser, StartEventParser, EndEventParser, BoundaryEventParser,
IntermediateCatchEventParser, IntermediateThrowEventParser,
SendTaskParser, ReceiveTaskParser)
@ -57,7 +57,8 @@ XSD_PATH = os.path.join(os.path.dirname(__file__), 'schema', 'BPMN20.xsd')
class BpmnValidator:
def __init__(self, xsd_path=XSD_PATH, imports=None):
schema = etree.parse(open(xsd_path))
with open(xsd_path) as xsd:
schema = etree.parse(xsd)
if imports is not None:
for ns, fn in imports.items():
elem = etree.Element(
@ -104,6 +105,7 @@ class BpmnParser(object):
full_tag('boundaryEvent'): (BoundaryEventParser, BoundaryEvent),
full_tag('receiveTask'): (ReceiveTaskParser, ReceiveTask),
full_tag('sendTask'): (SendTaskParser, SendTask),
full_tag('eventBasedGateway'): (EventBasedGatewayParser, EventBasedGateway),
}
OVERRIDE_PARSER_CLASSES = {}

View File

@ -121,6 +121,25 @@ class TaskParser(NodeParser):
elif len(self.xpath('./bpmn:standardLoopCharacteristics')) > 0:
self._set_multiinstance_attributes(True, 25, STANDARDLOOPCOUNT, loop_task=True)
def _add_boundary_event(self, children):
parent = _BoundaryEventParent(
self.spec, '%s.BoundaryEventParent' % self.get_id(),
self.task, lane=self.task.lane)
self.process_parser.parsed_nodes[self.node.get('id')] = parent
parent.connect_outgoing(self.task, '%s.FromBoundaryEventParent' % self.get_id(), None, None)
for event in children:
child = self.process_parser.parse_node(event)
if isinstance(child.event_definition, CancelEventDefinition) \
and not isinstance(self.task, TransactionSubprocess):
raise ValidationException('Cancel Events may only be used with transactions',
node=self.node,
filename=self.filename)
parent.connect_outgoing(child,
'%s.FromBoundaryEventParent' % event.get('id'),
None, None)
return parent
def parse_node(self):
"""
Parse this node, and all children, returning the connected task spec.
@ -139,30 +158,9 @@ class TaskParser(NodeParser):
boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.get_id())
if boundary_event_nodes:
parent_task = _BoundaryEventParent(
self.spec, '%s.BoundaryEventParent' % self.get_id(),
self.task, lane=self.task.lane)
self.process_parser.parsed_nodes[
self.node.get('id')] = parent_task
parent_task.connect_outgoing(
self.task, '%s.FromBoundaryEventParent' % self.get_id(),
None, None)
for boundary_event in boundary_event_nodes:
b = self.process_parser.parse_node(boundary_event)
if isinstance(b.event_definition, CancelEventDefinition) \
and not isinstance(self.task, TransactionSubprocess):
raise ValidationException(
'Cancel Events may only be used with transactions',
node=self.node,
filename=self.filename)
parent_task.connect_outgoing(
b,
'%s.FromBoundaryEventParent' % boundary_event.get(
'id'),
None, None)
parent = self._add_boundary_event(boundary_event_nodes)
else:
self.process_parser.parsed_nodes[
self.node.get('id')] = self.task
self.process_parser.parsed_nodes[self.node.get('id')] = self.task
children = []
outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.get_id())
@ -202,7 +200,7 @@ class TaskParser(NodeParser):
c, target_node, sequence_flow,
sequence_flow.get('id') == default_outgoing)
return parent_task if boundary_event_nodes else self.task
return parent if boundary_event_nodes else self.task
except ValidationException:
raise
except Exception as ex:

View File

@ -5,7 +5,7 @@ from SpiffWorkflow.bpmn.specs.events.event_definitions import CorrelationPropert
from .ValidationException import ValidationException
from .TaskParser import TaskParser
from .util import first, one
from ..specs.events.event_definitions import (TimerEventDefinition, MessageEventDefinition,
from ..specs.events.event_definitions import (MultipleEventDefinition, TimerEventDefinition, MessageEventDefinition,
ErrorEventDefinition, EscalationEventDefinition,
SignalEventDefinition,
CancelEventDefinition, CycleTimerEventDefinition,
@ -109,7 +109,7 @@ class EventDefinitionParser(TaskParser):
correlations.append(CorrelationProperty(key, expression, used_by))
return correlations
def _create_task(self, event_definition, cancel_activity=None):
def _create_task(self, event_definition, cancel_activity=None, parallel=None):
if isinstance(event_definition, MessageEventDefinition):
for prop in event_definition.correlation_properties:
@ -126,28 +126,40 @@ class EventDefinitionParser(TaskParser):
}
if cancel_activity is not None:
kwargs['cancel_activity'] = cancel_activity
if parallel is not None:
kwargs['parallel'] = parallel
return self.spec_class(self.spec, self.get_task_spec_name(), event_definition, **kwargs)
def get_event_definition(self, xpaths):
"""Returns the first event definition it can find in given list of xpaths"""
"""Returns all event definitions it can find in given list of xpaths"""
event_definitions = []
for path in xpaths:
event = first(self.xpath(path))
if event is not None:
for event in self.xpath(path):
if path == MESSAGE_EVENT_XPATH:
return self.parse_message_event(event)
event_definitions.append(self.parse_message_event(event))
elif path == SIGNAL_EVENT_XPATH:
return self.parse_signal_event(event)
event_definitions.append(self.parse_signal_event(event))
elif path == TIMER_EVENT_XPATH:
return self.parse_timer_event()
event_definitions.append(self.parse_timer_event())
elif path == CANCEL_EVENT_XPATH:
return self.parse_cancel_event()
event_definitions.append(self.parse_cancel_event())
elif path == ERROR_EVENT_XPATH:
return self.parse_error_event(event)
event_definitions.append(self.parse_error_event(event))
elif path == ESCALATION_EVENT_XPATH:
return self.parse_escalation_event(event)
event_definitions.append(self.parse_escalation_event(event))
elif path == TERMINATION_EVENT_XPATH:
return self.parse_terminate_event()
return NoneEventDefinition()
event_definitions.append(self.parse_terminate_event())
parallel = self.node.get('parallelMultiple') == 'true'
if len(event_definitions) == 0:
return NoneEventDefinition()
elif len(event_definitions) == 1:
return event_definitions[0]
else:
return MultipleEventDefinition(event_definitions, parallel)
class StartEventParser(EventDefinitionParser):
"""Parses a Start Event, and connects it to the internal spec.start task.
@ -158,8 +170,7 @@ class StartEventParser(EventDefinitionParser):
task = self._create_task(event_definition)
self.spec.start.connect(task)
if isinstance(event_definition, CycleTimerEventDefinition):
# We are misusing cycle timers, so this is a hack whereby we will
# revisit ourselves if we fire.
# We are misusing cycle timers, so this is a hack whereby we will revisit ourselves if we fire.
task.connect(task)
return task
@ -229,3 +240,22 @@ class BoundaryEventParser(EventDefinitionParser):
if isinstance(event_definition, NoneEventDefinition):
raise NotImplementedError('Unsupported Catch Event: %r', etree.tostring(self.node))
return self._create_task(event_definition, cancel_activity)
class EventBasedGatewayParser(EventDefinitionParser):
def create_task(self):
return self._create_task(MultipleEventDefinition())
def handles_multiple_outgoing(self):
return True
def connect_outgoing(self, outgoing_task, outgoing_task_node, sequence_flow_node, is_default):
self.task.event_definition.event_definitions.append(outgoing_task.event_definition)
self.task.connect_outgoing(
outgoing_task,
sequence_flow_node.get('id'),
sequence_flow_node.get('name', None),
self.parse_documentation(sequence_flow_node)
)

View File

@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification
from .dictionary import DictionaryConverter
from ..specs.events.event_definitions import SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
from ..specs.events.event_definitions import MultipleEventDefinition, SignalEventDefinition, MessageEventDefinition, NoneEventDefinition
from ..specs.events.event_definitions import TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition
from ..specs.events.event_definitions import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition
from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition
@ -91,7 +91,7 @@ class BpmnTaskSpecConverter(DictionaryConverter):
event_definitions = [ NoneEventDefinition, CancelEventDefinition, TerminateEventDefinition,
SignalEventDefinition, MessageEventDefinition, ErrorEventDefinition, EscalationEventDefinition,
TimerEventDefinition, CycleTimerEventDefinition ]
TimerEventDefinition, CycleTimerEventDefinition , MultipleEventDefinition]
for event_definition in event_definitions:
self.register(
@ -257,6 +257,9 @@ class BpmnTaskSpecConverter(DictionaryConverter):
dct['error_code'] = event_definition.error_code
if isinstance(event_definition, EscalationEventDefinition):
dct['escalation_code'] = event_definition.escalation_code
if isinstance(event_definition, MultipleEventDefinition):
dct['event_definitions'] = [self.convert(e) for e in event_definition.event_definitions]
dct['parallel'] = event_definition.parallel
return dct
@ -273,6 +276,8 @@ class BpmnTaskSpecConverter(DictionaryConverter):
internal, external = dct.pop('internal'), dct.pop('external')
if 'correlation_properties' in dct:
dct['correlation_properties'] = [CorrelationProperty(**prop) for prop in dct['correlation_properties']]
if 'event_definitions' in dct:
dct['event_definitions'] = [self.restore(d) for d in dct['event_definitions']]
event_definition = definition_class(**dct)
event_definition.internal = internal
event_definition.external = external

View File

@ -21,7 +21,7 @@ from ..specs.ParallelGateway import ParallelGateway
from ..specs.events.StartEvent import StartEvent
from ..specs.events.EndEvent import EndEvent
from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
from ..specs.events.IntermediateEvent import BoundaryEvent, EventBasedGateway, IntermediateCatchEvent, IntermediateThrowEvent
from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask
from ..workflow import BpmnWorkflow
@ -52,6 +52,7 @@ class StartTaskConverter(BpmnTaskSpecConverter):
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class LoopResetTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
@ -70,6 +71,7 @@ class LoopResetTaskConverter(BpmnTaskSpecConverter):
spec.destination_id = UUID(spec.destination_id)
return spec
class EndJoinConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
@ -310,3 +312,9 @@ class BoundaryEventParentConverter(BpmnTaskSpecConverter):
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class EventBasedGatewayConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(EventBasedGateway, data_converter, typename)

View File

@ -17,7 +17,7 @@ from .task_spec_converters import SimpleTaskConverter, StartTaskConverter, EndJo
from .task_spec_converters import NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter
from .task_spec_converters import CallActivityTaskConverter, TransactionSubprocessTaskConverter
from .task_spec_converters import StartEventConverter, EndEventConverter
from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter
from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter
from .task_spec_converters import SendTaskConverter, ReceiveTaskConverter
from .task_spec_converters import BoundaryEventConverter, BoundaryEventParentConverter
from .task_spec_converters import ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
@ -27,7 +27,7 @@ DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter,
CallActivityTaskConverter, TransactionSubprocessTaskConverter,
StartEventConverter, EndEventConverter, SendTaskConverter, ReceiveTaskConverter,
IntermediateCatchEventConverter, IntermediateThrowEventConverter,
IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter,
BoundaryEventConverter, BoundaryEventParentConverter,
ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
]

View File

@ -111,6 +111,7 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin):
def deserialize(cls, serializer, wf_spec, s_state):
return serializer.deserialize_boundary_event_parent(wf_spec, s_state, cls)
class BoundaryEvent(CatchingEvent):
"""Task Spec for a bpmn:boundaryEvent node."""
@ -128,7 +129,6 @@ class BoundaryEvent(CatchingEvent):
interrupting = 'Interrupting' if self.cancel_activity else 'Non-Interrupting'
return f'{interrupting} {self.event_definition.event_type} Event'
def catches(self, my_task, event_definition, correlations=None):
# Boundary events should only be caught while waiting
return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING
@ -148,3 +148,16 @@ class BoundaryEvent(CatchingEvent):
@classmethod
def deserialize(cls, serializer, wf_spec, s_state):
return serializer.deserialize_boundary_event(wf_spec, s_state, cls)
class EventBasedGateway(CatchingEvent):
@property
def spec_type(self):
return 'Event Based Gateway'
def _on_complete_hook(self, my_task):
for child in my_task.children:
if not child.task_spec.event_definition.has_fired(child):
child.cancel()
return super()._on_complete_hook(my_task)

View File

@ -69,10 +69,10 @@ class EventDefinition(object):
# We also don't have a more sophisticated method for addressing events to
# a particular process, but this at least provides a mechanism for distinguishing
# between processes and subprocesses.
if self.internal:
workflow.catch(event)
if self.external:
outer_workflow.catch(event, correlations)
if self.internal and (self.external and workflow != outer_workflow):
workflow.catch(event)
def __eq__(self, other):
return self.__class__.__name__ == other.__class__.__name__
@ -92,6 +92,7 @@ class EventDefinition(object):
obj.internal, obj.external = internal, external
return obj
class NamedEventDefinition(EventDefinition):
"""
Extend the base event class to provide a name for the event. Most throw/catch events
@ -115,7 +116,6 @@ class NamedEventDefinition(EventDefinition):
retdict['name'] = self.name
return retdict
class CancelEventDefinition(EventDefinition):
"""
Cancel events are only handled by the outerworkflow, as they can only be used inside
@ -398,3 +398,49 @@ class CycleTimerEventDefinition(EventDefinition):
retdict['label'] = self.label
retdict['cycle_definition'] = self.cycle_definition
return retdict
class MultipleEventDefinition(EventDefinition):
def __init__(self, event_definitions=None, parallel=False):
super().__init__()
self.event_definitions = event_definitions or []
self.parallel = parallel
@property
def event_type(self):
return 'Multiple'
def catch(self, my_task, event_definition=None):
event_definition.catch(my_task, event_definition)
if self.parallel:
# Parallel multiple need to match all events
seen_events = my_task.internal_data.get('seen_events', []) + [event_definition]
my_task._set_internal_data(seen_events=seen_events)
if all(event in seen_events for event in self.event_definitions):
my_task._set_internal_data(event_fired=True)
else:
my_task._set_internal_data(event_fired=False)
else:
# Otherwise, matching one is sufficient
my_task._set_internal_data(event_fired=True)
def reset(self, my_task):
my_task.internal_data.pop('seen_events', None)
super().reset(my_task)
def __eq__(self, other):
# This event can catch any of the events associated with it
for event in self.event_definitions:
if event == other:
return True
return False
def throw(self, my_task):
# Mutiple events throw all associated events when they fire
for event_definition in self.event_definitions:
self._throw(
event=event_definition,
workflow=my_task.workflow,
outer_workflow=my_task.workflow.outer_workflow
)

View File

@ -16,7 +16,7 @@
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition, MultipleEventDefinition
from .PythonScriptEngine import PythonScriptEngine
from .specs.events.event_types import CatchingEvent
from .specs.events.StartEvent import StartEvent
@ -113,6 +113,14 @@ class BpmnWorkflow(Workflow):
workflow = workflow.outer_workflow
return workflow
def _get_or_create_subprocess(self, task_spec, wf_spec):
if isinstance(task_spec.event_definition, MultipleEventDefinition):
for sp in self.subprocesses.values():
start = sp.get_tasks_from_spec_name(task_spec.name)
if len(start) and start[0].state == TaskState.WAITING:
return sp
return self.add_subprocess(wf_spec.name, f'{wf_spec.name}_{len(self.subprocesses)}')
def catch(self, event_definition, correlations=None):
"""
Send an event definition to any tasks that catch it.
@ -134,10 +142,8 @@ class BpmnWorkflow(Workflow):
for name, spec in self.subprocess_specs.items():
for task_spec in list(spec.task_specs.values()):
if isinstance(task_spec, StartEvent) and task_spec.event_definition == event_definition:
subprocess = self.add_subprocess(spec.name, f'{spec.name}_{len(self.subprocesses)}')
subprocess.correlations = correlations or {}
start = self.get_tasks_from_spec_name(task_spec.name, workflow=subprocess)[0]
task_spec.event_definition.catch(start, event_definition)
subprocess = self._get_or_create_subprocess(task_spec, spec)
subprocess.correlations.update(correlations or {})
# We need to get all the tasks that catch an event before completing any of them
# in order to prevent the scenario where multiple boundary events catch the

View File

@ -30,9 +30,7 @@ class DMNEngine:
a given task."""
result = {}
matched_rules = self.decide(task)
if len(matched_rules) == 1:
result = matched_rules[0].output_as_dict(task)
elif len(matched_rules) > 1: # This must be a multi-output
if self.decision_table.hit_policy == HitPolicy.COLLECT.value:
# each output will be an array of values, all outputs will
# be placed in a dict, which we will then merge.
for rule in matched_rules:
@ -41,6 +39,8 @@ class DMNEngine:
if not key in result:
result[key] = []
result[key].append(rule_output[key])
elif len(matched_rules) > 0:
result = matched_rules[0].output_as_dict(task)
return result

View File

@ -21,6 +21,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter):
return {
'id': table.id,
'name': table.name,
'hit_policy': table.hit_policy,
'inputs': [val.__dict__ for val in table.inputs],
'outputs': [val.__dict__ for val in table.outputs],
'rules': [self.rule_to_dict(rule) for rule in table.rules],

View File

@ -0,0 +1,107 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1jaorpt" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
<bpmn:process id="Process_0pvx19v" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0w4b5t2</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0w4b5t2" sourceRef="StartEvent_1" targetRef="Gateway_1434v9l" />
<bpmn:eventBasedGateway id="Gateway_1434v9l">
<bpmn:incoming>Flow_0w4b5t2</bpmn:incoming>
<bpmn:outgoing>Flow_0gge7fn</bpmn:outgoing>
<bpmn:outgoing>Flow_0px7ksu</bpmn:outgoing>
<bpmn:outgoing>Flow_1rfbrlf</bpmn:outgoing>
</bpmn:eventBasedGateway>
<bpmn:intermediateCatchEvent id="message_1_event">
<bpmn:incoming>Flow_0gge7fn</bpmn:incoming>
<bpmn:outgoing>Flow_1g4g85l</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
</bpmn:intermediateCatchEvent>
<bpmn:sequenceFlow id="Flow_0gge7fn" sourceRef="Gateway_1434v9l" targetRef="message_1_event" />
<bpmn:intermediateCatchEvent id="message_2_event">
<bpmn:incoming>Flow_0px7ksu</bpmn:incoming>
<bpmn:outgoing>Flow_18v90rx</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
</bpmn:intermediateCatchEvent>
<bpmn:sequenceFlow id="Flow_0px7ksu" sourceRef="Gateway_1434v9l" targetRef="message_2_event" />
<bpmn:intermediateCatchEvent id="timer_event">
<bpmn:incoming>Flow_1rfbrlf</bpmn:incoming>
<bpmn:outgoing>Flow_0mppjk9</bpmn:outgoing>
<bpmn:timerEventDefinition id="TimerEventDefinition_0reo0gl">
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">timedelta(seconds=1)</bpmn:timeDuration>
</bpmn:timerEventDefinition>
</bpmn:intermediateCatchEvent>
<bpmn:sequenceFlow id="Flow_1rfbrlf" sourceRef="Gateway_1434v9l" targetRef="timer_event" />
<bpmn:endEvent id="timer">
<bpmn:incoming>Flow_0mppjk9</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0mppjk9" sourceRef="timer_event" targetRef="timer" />
<bpmn:endEvent id="message_1_end">
<bpmn:incoming>Flow_1g4g85l</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1g4g85l" sourceRef="message_1_event" targetRef="message_1_end" />
<bpmn:endEvent id="message_2_end">
<bpmn:incoming>Flow_18v90rx</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_18v90rx" sourceRef="message_2_event" targetRef="message_2_end" />
</bpmn:process>
<bpmn:message id="Message_0lyfmat" name="message_1" />
<bpmn:message id="Message_1ntpwce" name="message_2" />
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_0pvx19v">
<bpmndi:BPMNEdge id="Flow_18v90rx_di" bpmnElement="Flow_18v90rx">
<di:waypoint x="408" y="230" />
<di:waypoint x="472" y="230" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1g4g85l_di" bpmnElement="Flow_1g4g85l">
<di:waypoint x="408" y="117" />
<di:waypoint x="472" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0mppjk9_di" bpmnElement="Flow_0mppjk9">
<di:waypoint x="408" y="340" />
<di:waypoint x="472" y="340" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1rfbrlf_di" bpmnElement="Flow_1rfbrlf">
<di:waypoint x="290" y="142" />
<di:waypoint x="290" y="340" />
<di:waypoint x="372" y="340" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0px7ksu_di" bpmnElement="Flow_0px7ksu">
<di:waypoint x="290" y="142" />
<di:waypoint x="290" y="230" />
<di:waypoint x="372" y="230" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0gge7fn_di" bpmnElement="Flow_0gge7fn">
<di:waypoint x="315" y="117" />
<di:waypoint x="372" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0w4b5t2_di" bpmnElement="Flow_0w4b5t2">
<di:waypoint x="215" y="117" />
<di:waypoint x="265" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0gplu2e_di" bpmnElement="Gateway_1434v9l">
<dc:Bounds x="265" y="92" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1og7irs_di" bpmnElement="message_1_event">
<dc:Bounds x="372" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0quxwe6_di" bpmnElement="message_2_event">
<dc:Bounds x="372" y="212" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1ea7gov_di" bpmnElement="timer_event">
<dc:Bounds x="372" y="322" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0emrepu_di" bpmnElement="timer">
<dc:Bounds x="472" y="322" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0x07cac_di" bpmnElement="message_1_end">
<dc:Bounds x="472" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1p7slpj_di" bpmnElement="message_2_end">
<dc:Bounds x="472" y="212" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
<bpmn:process id="main" name="Main" isExecutable="true">
<bpmn:startEvent id="StartEvent_1" parallelMultiple="true">
<bpmn:outgoing>Flow_1tr2mqr</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
</bpmn:startEvent>
<bpmn:task id="any_task" name="Any Task">
<bpmn:incoming>Flow_1tr2mqr</bpmn:incoming>
<bpmn:outgoing>Flow_1qjctmo</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_1tr2mqr" sourceRef="StartEvent_1" targetRef="any_task" />
<bpmn:endEvent id="Event_0hamwsf">
<bpmn:incoming>Flow_1qjctmo</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1qjctmo" sourceRef="any_task" targetRef="Event_0hamwsf" />
</bpmn:process>
<bpmn:message id="Message_0lyfmat" name="message_1" />
<bpmn:message id="Message_1ntpwce" name="message_2" />
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
<bpmndi:BPMNShape id="Event_1mddj6x_di" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1bn6xph_di" bpmnElement="any_task">
<dc:Bounds x="270" y="147" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
<dc:Bounds x="432" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1tr2mqr_di" bpmnElement="Flow_1tr2mqr">
<di:waypoint x="215" y="187" />
<di:waypoint x="270" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1qjctmo_di" bpmnElement="Flow_1qjctmo">
<di:waypoint x="370" y="187" />
<di:waypoint x="432" y="187" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
<bpmn:process id="main" name="Main" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1tr2mqr</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
</bpmn:startEvent>
<bpmn:task id="any_task" name="Any Task">
<bpmn:incoming>Flow_1tr2mqr</bpmn:incoming>
<bpmn:outgoing>Flow_1qjctmo</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_1tr2mqr" sourceRef="StartEvent_1" targetRef="any_task" />
<bpmn:endEvent id="Event_0hamwsf">
<bpmn:incoming>Flow_1qjctmo</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1qjctmo" sourceRef="any_task" targetRef="Event_0hamwsf" />
</bpmn:process>
<bpmn:message id="Message_0lyfmat" name="message_1" />
<bpmn:message id="Message_1ntpwce" name="message_2" />
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
<bpmndi:BPMNShape id="Event_1mddj6x_di" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1bn6xph_di" bpmnElement="any_task">
<dc:Bounds x="270" y="147" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
<dc:Bounds x="432" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1tr2mqr_di" bpmnElement="Flow_1tr2mqr">
<di:waypoint x="215" y="187" />
<di:waypoint x="270" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1qjctmo_di" bpmnElement="Flow_1qjctmo">
<di:waypoint x="370" y="187" />
<di:waypoint x="432" y="187" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
<bpmn:message id="Message_0lyfmat" name="message_1" />
<bpmn:message id="Message_1ntpwce" name="message_2" />
<bpmn:collaboration id="top">
<bpmn:participant id="responder" name="Responder" processRef="respond" />
<bpmn:participant id="initiator" name="Initiator" processRef="initiate" />
</bpmn:collaboration>
<bpmn:process id="respond" name="Respond" isExecutable="true">
<bpmn:startEvent id="Event_07g2lnb" parallelMultiple="true">
<bpmn:outgoing>Flow_04uk4n8</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
</bpmn:startEvent>
<bpmn:endEvent id="Event_0hamwsf">
<bpmn:incoming>Flow_08al33k</bpmn:incoming>
</bpmn:endEvent>
<bpmn:task id="any_task" name="Any Task">
<bpmn:incoming>Flow_04uk4n8</bpmn:incoming>
<bpmn:outgoing>Flow_08al33k</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_04uk4n8" sourceRef="Event_07g2lnb" targetRef="any_task" />
<bpmn:sequenceFlow id="Flow_08al33k" sourceRef="any_task" targetRef="Event_0hamwsf" />
</bpmn:process>
<bpmn:process id="initiate" name="Initiate" isExecutable="true">
<bpmn:startEvent id="Event_0jamixt">
<bpmn:outgoing>Flow_1wgdi4h</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1wgdi4h" sourceRef="Event_0jamixt" targetRef="Event_0n8a7vh" />
<bpmn:sequenceFlow id="Flow_1wxjn4e" sourceRef="Event_0n8a7vh" targetRef="Event_0vork94" />
<bpmn:endEvent id="Event_0vork94">
<bpmn:incoming>Flow_1wxjn4e</bpmn:incoming>
</bpmn:endEvent>
<bpmn:intermediateThrowEvent id="Event_0n8a7vh">
<bpmn:incoming>Flow_1wgdi4h</bpmn:incoming>
<bpmn:outgoing>Flow_1wxjn4e</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox_throw" messageRef="Message_0lyfmat" />
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze_throw" messageRef="Message_1ntpwce" />
</bpmn:intermediateThrowEvent>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top">
<bpmndi:BPMNShape id="Participant_0ctz0ow_di" bpmnElement="responder" isHorizontal="true">
<dc:Bounds x="120" y="62" width="430" height="250" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_07g2lnb_di" bpmnElement="Event_07g2lnb">
<dc:Bounds x="192" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
<dc:Bounds x="432" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1yaipjy_di" bpmnElement="any_task">
<dc:Bounds x="280" y="147" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_04uk4n8_di" bpmnElement="Flow_04uk4n8">
<di:waypoint x="228" y="187" />
<di:waypoint x="280" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_08al33k_di" bpmnElement="Flow_08al33k">
<di:waypoint x="380" y="187" />
<di:waypoint x="432" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Participant_0gyj8ha_di" bpmnElement="initiator" isHorizontal="true">
<dc:Bounds x="120" y="350" width="430" height="250" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0jamixt_di" bpmnElement="Event_0jamixt">
<dc:Bounds x="192" y="452" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0vork94_di" bpmnElement="Event_0vork94">
<dc:Bounds x="432" y="452" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0n8a7vh_di" bpmnElement="Event_0n8a7vh">
<dc:Bounds x="322" y="452" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1wgdi4h_di" bpmnElement="Flow_1wgdi4h">
<di:waypoint x="228" y="470" />
<di:waypoint x="322" y="470" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1wxjn4e_di" bpmnElement="Flow_1wxjn4e">
<di:waypoint x="358" y="470" />
<di:waypoint x="432" y="470" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,87 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_19o7vxg" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="5.0.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.17.0">
<bpmn:message id="Message_0lyfmat" name="message_1" />
<bpmn:message id="Message_1ntpwce" name="message_2" />
<bpmn:collaboration id="top">
<bpmn:participant id="responder" name="Responder" processRef="respond" />
<bpmn:participant id="initiator" name="Initiator" processRef="initiate" />
</bpmn:collaboration>
<bpmn:process id="respond" name="Respond" isExecutable="true">
<bpmn:endEvent id="Event_0hamwsf">
<bpmn:incoming>Flow_1tr2mqr</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1tr2mqr" sourceRef="StartEvent_1" targetRef="Event_0hamwsf" />
<bpmn:intermediateCatchEvent id="StartEvent_1" parallelMultiple="true">
<bpmn:incoming>Flow_1wohnl8</bpmn:incoming>
<bpmn:outgoing>Flow_1tr2mqr</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox" messageRef="Message_0lyfmat" />
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze" messageRef="Message_1ntpwce" />
</bpmn:intermediateCatchEvent>
<bpmn:startEvent id="Event_07g2lnb">
<bpmn:outgoing>Flow_1wohnl8</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1wohnl8" sourceRef="Event_07g2lnb" targetRef="StartEvent_1" />
</bpmn:process>
<bpmn:process id="initiate" name="Initiate" isExecutable="true">
<bpmn:startEvent id="Event_0jamixt">
<bpmn:outgoing>Flow_1wgdi4h</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1wgdi4h" sourceRef="Event_0jamixt" targetRef="Event_0n8a7vh" />
<bpmn:sequenceFlow id="Flow_1wxjn4e" sourceRef="Event_0n8a7vh" targetRef="Event_0vork94" />
<bpmn:endEvent id="Event_0vork94">
<bpmn:incoming>Flow_1wxjn4e</bpmn:incoming>
</bpmn:endEvent>
<bpmn:intermediateThrowEvent id="Event_0n8a7vh">
<bpmn:incoming>Flow_1wgdi4h</bpmn:incoming>
<bpmn:outgoing>Flow_1wxjn4e</bpmn:outgoing>
<bpmn:messageEventDefinition id="MessageEventDefinition_158nhox_throw" messageRef="Message_0lyfmat" />
<bpmn:messageEventDefinition id="MessageEventDefinition_1w1pnze_throw" messageRef="Message_1ntpwce" />
</bpmn:intermediateThrowEvent>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top">
<bpmndi:BPMNShape id="Participant_0ctz0ow_di" bpmnElement="responder" isHorizontal="true">
<dc:Bounds x="120" y="62" width="430" height="250" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0hamwsf_di" bpmnElement="Event_0hamwsf">
<dc:Bounds x="432" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1mddj6x_di" bpmnElement="StartEvent_1">
<dc:Bounds x="312" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_07g2lnb_di" bpmnElement="Event_07g2lnb">
<dc:Bounds x="192" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1tr2mqr_di" bpmnElement="Flow_1tr2mqr">
<di:waypoint x="348" y="187" />
<di:waypoint x="432" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1wohnl8_di" bpmnElement="Flow_1wohnl8">
<di:waypoint x="228" y="187" />
<di:waypoint x="312" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Participant_0gyj8ha_di" bpmnElement="initiator" isHorizontal="true">
<dc:Bounds x="120" y="350" width="430" height="250" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0jamixt_di" bpmnElement="Event_0jamixt">
<dc:Bounds x="192" y="452" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0vork94_di" bpmnElement="Event_0vork94">
<dc:Bounds x="432" y="452" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0n8a7vh_di" bpmnElement="Event_0n8a7vh">
<dc:Bounds x="322" y="452" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1wgdi4h_di" bpmnElement="Flow_1wgdi4h">
<di:waypoint x="228" y="470" />
<di:waypoint x="322" y="470" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1wxjn4e_di" bpmnElement="Flow_1wxjn4e">
<di:waypoint x="358" y="470" />
<di:waypoint x="432" y="470" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,46 @@
from datetime import timedelta
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
from SpiffWorkflow.task import TaskState
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
class EventBsedGatewayTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v')
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.workflow = BpmnWorkflow(self.spec, script_engine=self.script_engine)
def testEventBasedGateway(self):
self.actual_test()
def testEventBasedGatewaySaveRestore(self):
self.actual_test(True)
def actual_test(self, save_restore=False):
self.workflow.do_engine_steps()
waiting_tasks = self.workflow.get_waiting_tasks()
if save_restore:
self.save_restore()
self.workflow.script_engine = self.script_engine
self.assertEqual(len(waiting_tasks), 1)
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.assertEqual(self.workflow.is_completed(), True)
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.COMPLETED)
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED)
self.assertEqual(self.workflow.get_tasks_from_spec_name('timer_event')[0].state, TaskState.CANCELLED)
def testMultipleStart(self):
spec, subprocess = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main')
workflow = BpmnWorkflow(spec)
workflow.do_engine_steps()
workflow.catch(MessageEventDefinition('message_1'))
workflow.catch(MessageEventDefinition('message_2'))
workflow.refresh_waiting_tasks()
workflow.do_engine_steps()

View File

@ -0,0 +1,81 @@
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
class MultipleStartEventTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, self.subprocesses = self.load_workflow_spec('multiple-start.bpmn', 'main')
self.workflow = BpmnWorkflow(self.spec)
def testMultipleStartEvent(self):
self.actual_test()
def testMultipleStartEventSaveRestore(self):
self.actual_test(True)
def actual_test(self, save_restore=False):
self.workflow.do_engine_steps()
waiting_tasks = self.workflow.get_waiting_tasks()
if save_restore:
self.save_restore()
# The start event should be waiting
self.assertEqual(len(waiting_tasks), 1)
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
# Now the first task should be ready
ready_tasks = self.workflow.get_ready_user_tasks()
self.assertEqual(len(ready_tasks), 1)
self.assertEqual(ready_tasks[0].task_spec.name, 'any_task')
class ParallelStartEventTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, self.subprocesses = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main')
self.workflow = BpmnWorkflow(self.spec)
def testParallelStartEvent(self):
self.actual_test()
def testParallelStartEventSaveRestore(self):
self.actual_test(True)
def actual_test(self, save_restore=False):
self.workflow.do_engine_steps()
waiting_tasks = self.workflow.get_waiting_tasks()
if save_restore:
self.save_restore()
# The start event should be waiting
self.assertEqual(len(waiting_tasks), 1)
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
# It should still be waiting because it has to receive both messages
waiting_tasks = self.workflow.get_waiting_tasks()
self.assertEqual(len(waiting_tasks), 1)
self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1')
self.workflow.catch(MessageEventDefinition('message_2'))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
# Now the first task should be ready
ready_tasks = self.workflow.get_ready_user_tasks()
self.assertEqual(len(ready_tasks), 1)
self.assertEqual(ready_tasks[0].task_spec.name, 'any_task')

View File

@ -0,0 +1,47 @@
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
class MultipleThrowEventIntermediateCatchTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, subprocesses = self.load_collaboration('multiple-throw.bpmn','top')
self.workflow = BpmnWorkflow(self.spec, subprocesses)
def testMultipleThrowEventIntermediateCatch(self):
self.actual_test()
def testMultipleThrowEventIntermediateCatchSaveRestore(self):
self.actual_test(True)
def actual_test(self, save_restore=False):
if save_restore:
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(len(self.workflow.get_waiting_tasks()), 0)
self.assertEqual(self.workflow.is_completed(), True)
class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase):
def setUp(self):
specs = self.get_all_specs('multiple-throw-start.bpmn')
self.spec = specs.pop('initiate')
self.workflow = BpmnWorkflow(self.spec, specs)
def testMultipleThrowEventStartEvent(self):
self.actual_test()
def testMultipleThrowEventStartEventSaveRestore(self):
self.actual_test(True)
def actual_test(self, save_restore=False):
if save_restore:
self.save_restore()
self.workflow.do_engine_steps()
ready_tasks = self.workflow.get_ready_user_tasks()
self.assertEqual(len(ready_tasks), 1)
ready_tasks[0].complete()
self.workflow.do_engine_steps()
self.assertEqual(self.workflow.is_completed(), True)

View File

@ -3,6 +3,8 @@ import unittest
from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.dmn.serializer.task_spec_converters import \
BusinessRuleTaskConverter
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
from tests.SpiffWorkflow.dmn.DecisionRunner import DecisionRunner
from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import \
@ -27,7 +29,18 @@ class HitPolicyTest(BpmnWorkflowTestCase):
self.assertEqual('COLLECT', decision_table.hit_policy)
res = runner.result({'type': 'stooge'})
self.assertEqual(4, len(res['name']))
res = runner.result({'type': 'farmer'})
self.assertEqual(1, len(res['name']))
self.assertEqual('Elmer Fudd', res['name'][0])
def testSerializeHitPolicy(self):
file_name = os.path.join(os.path.dirname(__file__), 'data', 'collect_hit.dmn')
runner = PythonDecisionRunner(file_name)
decision_table = runner.decision_table
self.assertEqual("COLLECT", decision_table.hit_policy)
dict = BusinessRuleTaskConverter().decision_table_to_dict(decision_table)
new_table = BusinessRuleTaskConverter().decision_table_from_dict(dict)
self.assertEqual("COLLECT", new_table.hit_policy)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(HitPolicyTest)

View File

@ -3,9 +3,7 @@
import os
import sys
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from tests.SpiffWorkflow.util import run_workflow
from .TaskSpecTest import TaskSpecTest
@ -25,7 +23,8 @@ class ExecuteTest(TaskSpecTest):
args=self.cmd_args)
def setUp(self):
self.cmd_args = ["python", "ExecuteProcessMock.py"]
script_path = os.path.join(os.path.dirname(__file__), '..', 'ExecuteProcessMock.py')
self.cmd_args = ["python", script_path]
TaskSpecTest.setUp(self)
def testConstructor(self):

View File

@ -0,0 +1,27 @@
FROM ghcr.io/sartography/python:3.11
RUN pip install poetry
RUN useradd _gunicorn --no-create-home --user-group
RUN apt-get update && \
apt-get install -y -q \
gcc libssl-dev \
curl gunicorn3
WORKDIR /app
COPY pyproject.toml poetry.lock /app/
RUN poetry install --without dev
RUN set -xe \
&& apt-get remove -y gcc python3-dev libssl-dev \
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY . /app/
# run poetry install again AFTER copying the app into the image
# otherwise it does not know what the main app module is
RUN poetry install --without dev
CMD ./bin/boot_server_in_docker

View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
port="${CONNECTOR_PROXY_PORT:-}"
if [[ -z "$port" ]]; then
port=7004
fi
workers=3
# THIS MUST BE THE LAST COMMAND!
# default --limit-request-line is 4094. see https://stackoverflow.com/a/66688382/6090676
exec poetry run gunicorn --bind "0.0.0.0:$port" --workers="$workers" --limit-request-line 8192 --timeout 90 --capture-output --access-logfile '-' --log-level debug app:app

View File

@ -55,7 +55,7 @@ optional = false
python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
unicode-backport = ["unicodedata2"]
[[package]]
name = "click"
@ -127,6 +127,23 @@ Flask = "*"
oauthlib = ">=1.1.2,<2.0.3 || >2.0.3,<2.0.4 || >2.0.4,<2.0.5 || >2.0.5,<3.0.0"
requests-oauthlib = ">=0.6.2,<1.2.0"
[[package]]
name = "gunicorn"
version = "20.1.0"
description = "WSGI HTTP Server for UNIX"
category = "main"
optional = false
python-versions = ">=3.5"
[package.dependencies]
setuptools = ">=3.0"
[package.extras]
eventlet = ["eventlet (>=0.24.1)"]
gevent = ["gevent (>=1.4.0)"]
setproctitle = ["setproctitle"]
tornado = ["tornado (>=0.2)"]
[[package]]
name = "idna"
version = "3.4"
@ -214,7 +231,7 @@ urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-oauthlib"
@ -245,6 +262,19 @@ botocore = ">=1.12.36,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
[[package]]
name = "setuptools"
version = "65.6.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "main"
optional = false
python-versions = ">=3.7"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "simplejson"
version = "3.17.6"
@ -310,7 +340,7 @@ watchdog = ["watchdog"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "86cf682d49dc495c8cf6dc60a8aedc31ad32a293e6ceaf7b1428e0c232f8319e"
content-hash = "cc395c0c1ce2b0b7ca063a17617981b2d55db39802265b36f0bc3c4383c89919"
[metadata.files]
boto3 = [
@ -350,6 +380,10 @@ Flask-OAuthlib = [
{file = "Flask-OAuthlib-0.9.6.tar.gz", hash = "sha256:5bb79c8a8e670c2eb4cb553dfc3283b6c8d1202f674934676dc173cee94fe39c"},
{file = "Flask_OAuthlib-0.9.6-py3-none-any.whl", hash = "sha256:a5c3b62959aa1922470a62b6ebf4273b75f1c29561a7eb4a69cde85d45a1d669"},
]
gunicorn = [
{file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"},
{file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"},
]
idna = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
@ -428,6 +462,10 @@ s3transfer = [
{file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
{file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
]
setuptools = [
{file = "setuptools-65.6.0-py3-none-any.whl", hash = "sha256:6211d2f5eddad8757bd0484923ca7c0a6302ebc4ab32ea5e94357176e0ca0840"},
{file = "setuptools-65.6.0.tar.gz", hash = "sha256:d1eebf881c6114e51df1664bc2c9133d022f78d12d5f4f665b9191f084e2862d"},
]
simplejson = [
{file = "simplejson-3.17.6-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a89acae02b2975b1f8e4974cb8cdf9bf9f6c91162fb8dec50c259ce700f2770a"},
{file = "simplejson-3.17.6-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:82ff356ff91be0ab2293fc6d8d262451eb6ac4fd999244c4b5f863e049ba219c"},

View File

@ -5,14 +5,14 @@ description = "An example showing how to use the Spiffworkflow-proxy's Flask Blu
authors = ["Dan <dan@sartography.com>"]
license = "LGPL"
readme = "README.md"
packages = [{include = "connector_proxy_demo", from = "src"}]
#packages = [{include = "connector_proxy_demo", from = "."}]
[tool.poetry.dependencies]
python = "^3.10"
Flask = "^2.2.2"
spiffworkflow-proxy = {git = "https://github.com/sartography/spiffworkflow-proxy"}
connector-aws = { git = "https://github.com/sartography/connector-aws.git"}
gunicorn = "^20.1.0"
[build-system]
requires = ["poetry-core"]
@ -20,5 +20,5 @@ build-backend = "poetry.core.masonry.api"
[tool.pytest.ini_options]
pythonpath = [
".", "src",
"."
]

83
docker-compose.yml Normal file
View File

@ -0,0 +1,83 @@
version: "3.8"
services:
spiffworkflow-db:
container_name: spiffworkflow-db
image: mysql:8.0.29
platform: linux/amd64
cap_add:
- SYS_NICE
restart: "no"
environment:
- MYSQL_DATABASE=spiffworkflow_backend_development
- MYSQL_ROOT_PASSWORD=my-secret-pw
- MYSQL_TCP_PORT=7003
ports:
- "7003"
healthcheck:
test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development
interval: 10s
timeout: 5s
retries: 10
spiffworkflow-backend:
container_name: spiffworkflow-backend
image: ghcr.io/sartography/spiffworkflow-backend:latest
depends_on:
spiffworkflow-db:
condition: service_healthy
environment:
- APPLICATION_ROOT=/
- SPIFFWORKFLOW_BACKEND_ENV=development
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=super_secret_key
- OPEN_ID_SERVER_URL=http://localhost:7000/openid
- SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001
- SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000
- SPIFFWORKFLOW_BACKEND_PORT=7000
- SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true
- SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development
- BPMN_SPEC_ABSOLUTE_DIR=/app/process_models
- SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false
- SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml
- RUN_BACKGROUND_SCHEDULER=true
- OPEN_ID_CLIENT_ID=spiffworkflow-backend
- OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key
ports:
- "7000:7000"
volumes:
- ./process_models:/app/process_models
- ./log:/app/log
healthcheck:
test: curl localhost:7000/v1.0/status --fail
interval: 10s
timeout: 5s
retries: 20
spiffworkflow-frontend:
container_name: spiffworkflow-frontend
image: ghcr.io/sartography/spiffworkflow-frontend
environment:
- APPLICATION_ROOT=/
- PORT0=7001
ports:
- "7001:7001"
spiffworkflow-connector:
container_name: spiffworkflow-connector
image: ghcr.io/sartography/connector-proxy-demo
environment:
- FLASK_ENV=${FLASK_ENV:-development}
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
ports:
- "7004:7004"
healthcheck:
test: curl localhost:7004/liveness --fail
interval: 10s
timeout: 5s
retries: 20
volumes:
spiffworkflow_backend:
driver: local

View File

@ -64,7 +64,6 @@ sphinx-click = "^4.3.0"
Pygments = "^2.13.0"
pyupgrade = "^3.2.2"
furo = ">=2021.11.12"
MonkeyType = "^22.2.0"
[tool.poetry.scripts]
flask-bpmn = "flask_bpmn.__main__:main"

83
poetry.lock generated
View File

@ -614,7 +614,7 @@ werkzeug = "*"
type = "git"
url = "https://github.com/sartography/flask-bpmn"
reference = "main"
resolved_reference = "5e40777f4013f71f2c1237f13f7dba1bdd5c0de3"
resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4"
[[package]]
name = "flask-cors"
@ -884,22 +884,6 @@ category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
name = "libcst"
version = "0.4.7"
description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs."
category = "dev"
optional = false
python-versions = ">=3.7"
[package.dependencies]
pyyaml = ">=5.2"
typing-extensions = ">=3.7.4.2"
typing-inspect = ">=0.4.0"
[package.extras]
dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"]
[[package]]
name = "livereload"
version = "2.6.3"
@ -1005,18 +989,6 @@ category = "dev"
optional = false
python-versions = "*"
[[package]]
name = "monkeytype"
version = "22.2.0"
description = "Generating type annotations from sampled production types"
category = "dev"
optional = false
python-versions = ">=3.6"
[package.dependencies]
libcst = ">=0.3.7"
mypy-extensions = "*"
[[package]]
name = "mypy"
version = "0.982"
@ -1788,7 +1760,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "580939cc8cb0b7ade1571483bd1e28f554434ac4"
resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4"
[[package]]
name = "sqlalchemy"
@ -1998,18 +1970,6 @@ category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
name = "typing-inspect"
version = "0.8.0"
description = "Runtime inspection utilities for typing module."
category = "dev"
optional = false
python-versions = "*"
[package.dependencies]
mypy-extensions = ">=0.3.0"
typing-extensions = ">=3.7.4"
[[package]]
name = "tzdata"
version = "2022.5"
@ -2151,7 +2111,7 @@ tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "p
[metadata]
lock-version = "1.1"
python-versions = ">=3.11,<3.12"
content-hash = "8c37333988fdd68bc6868faf474e628a690582acd17ee3b31b18e005a864fecf"
content-hash = "17e037a3784758eb23a5ed9889fd774913ebde97225692dcd9df159f03da8a22"
[metadata.files]
alabaster = [
@ -2484,6 +2444,7 @@ greenlet = [
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
@ -2492,6 +2453,7 @@ greenlet = [
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
@ -2500,6 +2462,7 @@ greenlet = [
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
@ -2566,32 +2529,6 @@ lazy-object-proxy = [
{file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"},
{file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"},
]
libcst = [
{file = "libcst-0.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc6f8965b6ca68d47e11321772887d81fa6fd8ea86e6ef87434ca2147de10747"},
{file = "libcst-0.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f47d809df59fcd83058b777b86a300154ee3a1f1b0523a398a67b5f8affd4c"},
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d19de56aa733b4ef024527e3ce4896d4b0e9806889797f409ec24caa651a44"},
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31da97bc986dc3f7a97f7d431fa911932aaf716d2f8bcda947fc964afd3b57cd"},
{file = "libcst-0.4.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b2e2c5e33e53669c20de0853cecfac1ffb8657ee727ab8527140f39049b820"},
{file = "libcst-0.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:76fae68bd6b7ce069e267b3322c806b4305341cea78d161ae40e0ed641c8c660"},
{file = "libcst-0.4.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bac76d69980bb3254f503f52128c256ef4d1bcbaabe4a17c3a9ebcd1fc0472c0"},
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f86535271eaefe84a99736875566a038449f92e1a2a61ea0b588d8359fbefd"},
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:617f7fa2610a8c86cf22d8d03416f25391383d05bd0ad1ca8ef68023ddd6b4f6"},
{file = "libcst-0.4.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3637fffe476c5b4ee2225c6474b83382518f2c1b2fe4771039e06bdd7835a4a"},
{file = "libcst-0.4.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f56565124c2541adee0634e411b2126b3f335306d19e91ed2bfe52efa698b219"},
{file = "libcst-0.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0ca2771ff3cfdf1f148349f89fcae64afa365213ed5c2703a69a89319325d0c8"},
{file = "libcst-0.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa438131b7befc7e5a3cbadb5a7b1506305de5d62262ea0556add0152f40925e"},
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6bd66a8be2ffad7b968d90dae86c62fd4739c0e011d71f3e76544a891ae743"},
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:214a9c4f4f90cd5b4bfa18e17877da4dd9a896821d9af9be86fa3effdc289b9b"},
{file = "libcst-0.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a37f2b459a8b51a41e260bd89c24ae41ab1d658f610c91650c79b1bbf27138"},
{file = "libcst-0.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:2f6766391d90472f036b88a95251c87d498ab068c377724f212ab0cc20509a68"},
{file = "libcst-0.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:234293aa8681a3d47fef1716c5622797a81cbe85a9381fe023815468cfe20eed"},
{file = "libcst-0.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa618dc359663a0a097c633452b104c1ca93365da7a811e655c6944f6b323239"},
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3569d9901c18940632414fb7a0943bffd326db9f726a9c041664926820857815"},
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beb5347e46b419f782589da060e9300957e71d561aa5574309883b71f93c1dfe"},
{file = "libcst-0.4.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e541ccfeebda1ae5f005fc120a5bf3e8ac9ccfda405ec3efd3df54fc4688ac3"},
{file = "libcst-0.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:3a2b7253cd2e3f0f8a3e23b5c2acb492811d865ef36e0816091c925f32b713d2"},
{file = "libcst-0.4.7.tar.gz", hash = "sha256:95c52c2130531f6e726a3b077442cfd486975435fecf3db8224d43fba7b85099"},
]
livereload = [
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
]
@ -2729,10 +2666,6 @@ mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
monkeytype = [
{file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"},
{file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"},
]
mypy = [
{file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"},
{file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"},
@ -3336,10 +3269,6 @@ typing-extensions = [
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
]
typing-inspect = [
{file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"},
{file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"},
]
tzdata = [
{file = "tzdata-2022.5-py2.py3-none-any.whl", hash = "sha256:323161b22b7802fdc78f20ca5f6073639c64f1a7227c40cd3e19fd1d0ce6650a"},
{file = "tzdata-2022.5.tar.gz", hash = "sha256:e15b2b3005e2546108af42a0eb4ccab4d9e225e2dfbf4f77aad50c70a4b1f3ab"},

View File

@ -99,7 +99,6 @@ sphinx-click = "^4.3.0"
Pygments = "^2.10.0"
pyupgrade = "^3.1.0"
furo = ">=2021.11.12"
MonkeyType = "^22.2.0"
[tool.poetry.scripts]
spiffworkflow-backend = "spiffworkflow_backend.__main__:main"

View File

@ -27,3 +27,5 @@ per-file-ignores =
# this file overwrites methods from the logging library so we can't change them
# and ignore long comment line
src/spiffworkflow_backend/services/logging_service.py:N802,B950
tests/spiffworkflow_backend/integration/test_process_api.py:S607,S101,D103,S605

View File

@ -1,5 +1,5 @@
pip==22.2.2
nox==2022.8.7
nox-poetry==1.0.1
nox==2022.11.21
nox-poetry==1.0.2
poetry==1.2.2
virtualenv==20.16.5

View File

@ -9,7 +9,7 @@ set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then
script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models"
export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models"
fi
if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then

View File

@ -1,13 +1,13 @@
"""Grabs tickets from csv and makes process instances."""
import os
from spiffworkflow_backend import get_hacked_up_app_for_script
from spiffworkflow_backend import create_app
from spiffworkflow_backend.services.data_setup_service import DataSetupService
def main() -> None:
"""Main."""
app = get_hacked_up_app_for_script()
app = create_app()
with app.app_context():
failing_process_models = DataSetupService.save_all_process_models()
for bpmn_errors in failing_process_models:

View File

@ -1251,12 +1251,17 @@
}, {
"id" : "f44558af-3601-4e54-b854-08396a247544",
"clientId" : "spiffworkflow-backend",
"name" : "",
"description" : "",
"rootUrl" : "",
"adminUrl" : "",
"baseUrl" : "",
"surrogateAuthRequired" : false,
"enabled" : true,
"alwaysDisplayInConsole" : false,
"clientAuthenticatorType" : "client-secret",
"secret" : "JXeQExm0JhQPLumgHtIIqf52bDalHz0q",
"redirectUris" : [ "http://localhost:7000/*", "https://api.unused-for-local-dev.spiffworkflow.org/*", "http://67.205.133.116:7000/*", "http://167.172.242.138:7000/*", "https://api.demo.spiffworkflow.org/*" ],
"redirectUris" : [ "http://localhost:7000/*", "https://api.unused-for-local-dev.spiffworkflow.org/*", "https://api.replace-me-with-spiff-subdomain.spiffworkflow.org/*", "http://67.205.133.116:7000/*", "http://167.172.242.138:7000/*" ],
"webOrigins" : [ ],
"notBefore" : 0,
"bearerOnly" : false,
@ -1273,7 +1278,7 @@
"saml.force.post.binding" : "false",
"saml.multivalued.roles" : "false",
"frontchannel.logout.session.required" : "false",
"post.logout.redirect.uris" : "+",
"post.logout.redirect.uris" : "https://replace-me-with-spiff-subdomain.spiffworkflow.org/*##http://localhost:7001/*",
"oauth2.device.authorization.grant.enabled" : "false",
"backchannel.logout.revoke.offline.tokens" : "false",
"saml.server.signature.keyinfo.ext" : "false",
@ -2161,7 +2166,7 @@
"subType" : "authenticated",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ]
}
}, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@ -2179,7 +2184,7 @@
"subType" : "anonymous",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ]
}
}, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@ -2269,7 +2274,7 @@
"internationalizationEnabled" : false,
"supportedLocales" : [ ],
"authenticationFlows" : [ {
"id" : "b30ab201-b13a-405f-bc57-cb5cd934bdc3",
"id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3",
"alias" : "Account verification options",
"description" : "Method with which to verity the existing account",
"providerId" : "basic-flow",
@ -2291,7 +2296,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "7d22faa2-1da8-49ae-a2cc-74e9c9f6ed51",
"id" : "4da99e29-371e-4f4b-a863-e5079f30a714",
"alias" : "Authentication Options",
"description" : "Authentication options.",
"providerId" : "basic-flow",
@ -2320,7 +2325,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ae089cf3-3179-4e12-a683-7969a31be566",
"id" : "d398c928-e201-4e8b-ab09-289bb351cd2e",
"alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2342,7 +2347,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "27a21643-2167-4847-a6b4-b07007671d9a",
"id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d",
"alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2364,7 +2369,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "0ee33ef7-da6b-4248-81c6-9f4f11b58195",
"id" : "98013bc1-e4dd-41f7-9849-1f898143b944",
"alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2386,7 +2391,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "e1d02af3-2886-42bb-95f4-bfa6f1299edc",
"id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2",
"alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow",
@ -2408,7 +2413,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "35cfc75f-70e3-487c-acd7-0627ab1dbdf1",
"id" : "2470e6f4-9a01-476a-9057-75d78e577182",
"alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow",
@ -2430,7 +2435,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "cc2f7206-8d15-46db-b974-71e67d4d1077",
"id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9",
"alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow",
@ -2453,7 +2458,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "d8314533-eacb-40ef-8f44-7c06321e9793",
"id" : "97c83e43-cba8-4d92-b108-9181bca07a1e",
"alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account",
"providerId" : "basic-flow",
@ -2475,7 +2480,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "d58a5ff1-9a9c-45a9-9f97-1324565e9679",
"id" : "fbabd64c-20de-4b8c-bfd2-be6822572278",
"alias" : "browser",
"description" : "browser based authentication",
"providerId" : "basic-flow",
@ -2511,7 +2516,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "3ea2aed9-12d9-4999-a104-67f5c5f7841a",
"id" : "0628a99f-b194-495d-8e54-cc4ca8684956",
"alias" : "clients",
"description" : "Base authentication for clients",
"providerId" : "client-flow",
@ -2547,7 +2552,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "c605af3c-bede-4f8f-a5c5-94176171c82c",
"id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a",
"alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow",
@ -2576,7 +2581,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "901b4d6c-9c27-4d3d-981a-1b5281c1ea2b",
"id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa",
"alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow",
@ -2591,7 +2596,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "9d1de1bf-b170-4235-92f1-5dfd3ec31c45",
"id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9",
"alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow",
@ -2614,7 +2619,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "8ee6b54f-4d31-4847-9ddc-36cb4c01b92b",
"id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9",
"alias" : "forms",
"description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow",
@ -2636,7 +2641,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "76d3380b-218b-443d-a3ea-bea712f4a1f4",
"id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a",
"alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow",
@ -2658,7 +2663,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "cd756473-4606-4150-9ba5-5b96e6f39c3a",
"id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03",
"alias" : "registration",
"description" : "registration flow",
"providerId" : "basic-flow",
@ -2674,7 +2679,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "574fcee6-e152-4069-b328-a7fe33aded3a",
"id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3",
"alias" : "registration form",
"description" : "registration form",
"providerId" : "form-flow",
@ -2710,7 +2715,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "e5a890ee-140a-4ab3-8d79-87e3499385b0",
"id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828",
"alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow",
@ -2746,7 +2751,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "6243167c-7e2e-4cc7-b35d-bad7862dc9ef",
"id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c",
"alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow",
@ -2762,13 +2767,13 @@
} ]
} ],
"authenticatorConfig" : [ {
"id" : "ae605746-d169-4a81-8348-b5f52e07ae14",
"id" : "817a93da-29df-447f-ab05-cd9557e66745",
"alias" : "create unique user config",
"config" : {
"require.password.update.after.registration" : "false"
}
}, {
"id" : "c5feb20c-eea5-4556-b9f8-797be4d67e26",
"id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878",
"alias" : "review profile config",
"config" : {
"update.profile.on.first.login" : "missing"

View File

@ -18,7 +18,19 @@ set -o errtrace -o errexit -o nounset -o pipefail
if ! docker network inspect spiffworkflow > /dev/null 2>&1; then
docker network create spiffworkflow
fi
docker rm keycloak 2>/dev/null || echo 'no keycloak container found, safe to start new container'
# https://stackoverflow.com/a/60579344/6090676
container_name="keycloak"
if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then
echo ":: Found container - $container_name"
if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then
echo ":: Stopping running container - $container_name"
docker stop $container_name
fi
echo ":: Removing stopped container - $container_name"
docker rm $container_name
fi
docker run \
-p 7002:8080 \
-d \

View File

@ -68,7 +68,7 @@ services:
- "7000:7000"
network_mode: host
volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
- ./log:/app/log
healthcheck:
test: curl localhost:7000/v1.0/status --fail
@ -82,7 +82,7 @@ services:
profiles:
- debug
volumes:
- ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models
- ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models
- ./:/app
command: /app/bin/boot_in_docker_debug_mode

View File

@ -1,8 +1,8 @@
"""empty message
Revision ID: ff1c1628337c
Revision ID: 4d75421c0af0
Revises:
Create Date: 2022-11-28 15:08:52.014254
Create Date: 2022-12-06 17:42:56.417673
"""
from alembic import op
@ -10,7 +10,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ff1c1628337c'
revision = '4d75421c0af0'
down_revision = None
branch_labels = None
depends_on = None
@ -79,8 +79,7 @@ def upgrade():
sa.Column('email', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('service', 'service_id', name='service_key'),
sa.UniqueConstraint('uid'),
sa.UniqueConstraint('username')
sa.UniqueConstraint('uid')
)
op.create_table('message_correlation_property',
sa.Column('id', sa.Integer(), nullable=False),
@ -249,6 +248,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('process_instance_id', 'key', name='process_instance_metadata_unique')
)
op.create_index(op.f('ix_process_instance_metadata_key'), 'process_instance_metadata', ['key'], unique=False)
op.create_table('spiff_step_details',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('process_instance_id', sa.Integer(), nullable=False),
@ -295,6 +295,7 @@ def downgrade():
op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user')
op.drop_table('active_task_user')
op.drop_table('spiff_step_details')
op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata')
op.drop_table('process_instance_metadata')
op.drop_table('permission_assignment')
op.drop_table('message_instance')

View File

@ -1851,7 +1851,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "062eaf15d28c66f8cf07f68409429560251b12c7"
resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134"
[[package]]
name = "SQLAlchemy"
@ -2989,7 +2989,18 @@ psycopg2 = [
{file = "psycopg2-2.9.4.tar.gz", hash = "sha256:d529926254e093a1b669f692a3aa50069bc71faf5b0ecd91686a78f62767d52f"},
]
pyasn1 = [
{file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
{file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
{file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
{file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
{file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
{file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
{file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
{file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
{file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
{file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
{file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
{file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pycodestyle = [

View File

@ -19,6 +19,9 @@ from werkzeug.exceptions import NotFound
import spiffworkflow_backend.load_database_models # noqa: F401
from spiffworkflow_backend.config import setup_config
from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint
from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import (
openid_blueprint,
)
from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint
from spiffworkflow_backend.routes.user import verify_token
from spiffworkflow_backend.routes.user_blueprint import user_blueprint
@ -103,6 +106,7 @@ def create_app() -> flask.app.Flask:
app.register_blueprint(process_api_blueprint)
app.register_blueprint(api_error_blueprint)
app.register_blueprint(admin_blueprint, url_prefix="/admin")
app.register_blueprint(openid_blueprint, url_prefix="/openid")
# preflight options requests will be allowed if they meet the requirements of the url regex.
# we will add an Access-Control-Max-Age header to the response to tell the browser it doesn't

View File

@ -338,9 +338,9 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
/process-models/{modified_process_model_id}/files:
/process-models/{modified_process_model_identifier}/files:
parameters:
- name: modified_process_model_id
- name: modified_process_model_identifier
in: path
required: true
description: The process_model_id, modified to replace slashes (/)
@ -445,6 +445,32 @@ paths:
schema:
$ref: "#/components/schemas/ProcessModel"
/process-models/{modified_process_model_identifier}/publish:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: the modified process model id
schema:
type: string
- name: branch_to_update
in: query
required: false
description: the name of the branch we want to merge into
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_publish
summary: Merge changes from this model to another branch.
tags:
- Process Models
responses:
"200":
description: The process model was published.
content:
application/json:
schema:
type: string
/processes:
get:
@ -464,6 +490,25 @@ paths:
items:
$ref: "#/components/schemas/Process"
/github-webhook-receive:
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.github_webhook_receive
summary: receives push webhooks from github so we can keep our process model repo up to date
requestBody:
content:
application/json:
schema:
$ref: "#/components/schemas/ProcessModelCategory"
tags:
- git
responses:
"200":
description: Success
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances:
parameters:
- name: process_model_identifier
@ -544,6 +589,12 @@ paths:
description: Specifies the identifier of a report to use, if any
schema:
type: string
- name: report_id
in: query
required: false
description: Specifies the identifier of a report to use, if any
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list
summary: Returns a list of process instances for a given process model
@ -559,33 +610,6 @@ paths:
items:
$ref: "#/components/schemas/Workflow"
/process-instances/{process_instance_id}/task/{task_id}/update:
parameters:
- name: process_instance_id
in: path
required: true
description: The unique id of the process instance
schema:
type: string
- name: task_id
in: path
required: true
description: The unique id of the task
schema:
type: string
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data
summary: Update the task data for requested instance and task
tags:
- Process Instances
responses:
"200":
description: Task Updated Successfully
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
/process-instances/{process_instance_id}/event:
parameters:
- name: process_instance_id
@ -661,15 +685,14 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/process-models/{modified_process_model_id}/process-instances:
/process-instances/{modified_process_model_identifier}:
parameters:
- name: modified_process_model_id
- name: modified_process_model_identifier
in: path
required: true
description: The unique id of an existing process model.
schema:
type: string
# process_instance_create
post:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create
summary: Creates an process instance from a process model and returns the instance
@ -683,28 +706,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/process-instances/{process_instance_id}:
parameters:
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
summary: Deletes a single process instance
tags:
- Process Instances
responses:
"200":
description: The process instance was deleted.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}:
/process-instances/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_identifier
in: path
@ -718,6 +720,12 @@ paths:
description: The unique id of an existing process instance.
schema:
type: integer
- name: process_identifier
in: query
required: false
description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity.
schema:
type: string
get:
tags:
- Process Instances
@ -730,6 +738,18 @@ paths:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
delete:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete
summary: Deletes a single process instance
tags:
- Process Instances
responses:
"200":
description: The process instance was deleted.
content:
application/json:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances/{modified_process_model_identifier}/{process_instance_id}/run:
parameters:
@ -758,7 +778,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/process-instances/{process_instance_id}/terminate:
/process-instances/{modified_process_model_identifier}/{process_instance_id}/terminate:
parameters:
- name: process_instance_id
in: path
@ -779,7 +799,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances/{process_instance_id}/suspend:
/process-instances/{modified_process_model_identifier}/{process_instance_id}/suspend:
parameters:
- name: process_instance_id
in: path
@ -800,7 +820,7 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances/{process_instance_id}/resume:
/process-instances/{modified_process_model_identifier}/{process_instance_id}/resume:
parameters:
- name: process_instance_id
in: path
@ -862,14 +882,30 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
/process-instances/reports/{report_identifier}:
/process-instances/reports/columns:
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list
summary: Returns all available columns for a process instance report.
tags:
- Process Instances
responses:
"200":
description: Workflow.
content:
application/json:
schema:
type: array
items:
$ref: "#/components/schemas/Workflow"
/process-instances/reports/{report_id}:
parameters:
- name: report_identifier
- name: report_id
in: path
required: true
description: The unique id of an existing report
schema:
type: string
type: integer
- name: page
in: query
required: false
@ -921,9 +957,9 @@ paths:
schema:
$ref: "#/components/schemas/OkTrue"
/process-models/{modified_process_model_id}/files/{file_name}:
/process-models/{modified_process_model_identifier}/files/{file_name}:
parameters:
- name: modified_process_model_id
- name: modified_process_model_identifier
in: path
required: true
description: The modified process model id
@ -1100,9 +1136,9 @@ paths:
items:
$ref: "#/components/schemas/Task"
/process-instances/{modified_process_model_id}/{process_instance_id}/tasks:
/task-data/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_id
- name: modified_process_model_identifier
in: path
required: true
description: The modified id of an existing process model
@ -1141,11 +1177,44 @@ paths:
items:
$ref: "#/components/schemas/Task"
/service_tasks:
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The modified id of an existing process model
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
- name: task_id
in: path
required: true
description: The unique id of the task.
schema:
type: string
put:
operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data
summary: Update the task data for requested instance and task
tags:
- Process Instances
responses:
"200":
description: Task Updated Successfully
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
/service-tasks:
get:
tags:
- Service Tasks
operationId: spiffworkflow_backend.routes.process_api_blueprint.service_tasks_show
operationId: spiffworkflow_backend.routes.process_api_blueprint.service_task_list
summary: Gets all available service task connectors
responses:
"200":
@ -1325,7 +1394,7 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/process-instances/{process_instance_id}/logs:
/logs/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: process_instance_id
in: path
@ -1345,6 +1414,12 @@ paths:
description: The number of items to show per page. Defaults to page 10.
schema:
type: integer
- name: detailed
in: query
required: false
description: Show the detailed view, which includes all log entries
schema:
type: boolean
get:
tags:
- Process Instances

View File

@ -14,13 +14,13 @@ class ConfigurationError(Exception):
def setup_database_uri(app: Flask) -> None:
"""Setup_database_uri."""
if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
if os.environ.get("SPIFF_DATABASE_TYPE") == "sqlite":
if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite":
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
elif os.environ.get("SPIFF_DATABASE_TYPE") == "postgres":
elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres":
app.config[
"SQLALCHEMY_DATABASE_URI"
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
@ -33,11 +33,22 @@ def setup_database_uri(app: Flask) -> None:
"SQLALCHEMY_DATABASE_URI"
] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}"
else:
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get(
app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI"
)
def load_config_file(app: Flask, env_config_module: str) -> None:
"""Load_config_file."""
try:
app.config.from_object(env_config_module)
except ImportStringError as exception:
if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true":
raise ModuleNotFoundError(
f"Cannot find config module: {env_config_module}"
) from exception
def setup_config(app: Flask) -> None:
"""Setup_config."""
# ensure the instance folder exists
@ -52,30 +63,22 @@ def setup_config(app: Flask) -> None:
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config.from_object("spiffworkflow_backend.config.default")
env_config_prefix = "spiffworkflow_backend.config."
if (
os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") == "true"
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
):
load_config_file(app, f"{env_config_prefix}terraform_deployed_environment")
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
load_config_file(app, env_config_module)
# This allows config/testing.py or instance/config.py to override the default config
if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing":
app.config.from_pyfile("config/testing.py", silent=True)
else:
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
env_config_prefix = "spiffworkflow_backend.config."
env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"]
try:
app.config.from_object(env_config_module)
except ImportStringError as exception:
if (
os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") == "true"
and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None
):
app.config.from_object("{env_config_prefix}terraform_deployed_environment")
else:
raise ModuleNotFoundError(
f"Cannot find config module: {env_config_module}"
) from exception
setup_database_uri(app)
setup_logger(app)
app.config["PERMISSIONS_FILE_FULLPATH"] = None
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join(
@ -92,5 +95,8 @@ def setup_config(app: Flask) -> None:
if app.config["BPMN_SPEC_ABSOLUTE_DIR"] is None:
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
setup_database_uri(app)
setup_logger(app)
thread_local_data = threading.local()
app.config["THREAD_LOCAL_DATA"] = thread_local_data

View File

@ -27,12 +27,13 @@ CONNECTOR_PROXY_URL = environ.get(
"CONNECTOR_PROXY_URL", default="http://localhost:7004"
)
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
# Open ID server
OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002")
OPEN_ID_SERVER_URL = environ.get(
"OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow"
)
# Replace above line with this to use the built-in Open ID Server.
# OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7000/openid")
OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend")
OPEN_ID_REALM_NAME = environ.get("OPEN_ID_REALM_NAME", default="spiffworkflow")
OPEN_ID_CLIENT_SECRET_KEY = environ.get(
"OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q"
) # noqa: S105
@ -57,3 +58,19 @@ SENTRY_TRACES_SAMPLE_RATE = environ.get(
SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
"SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="info"
)
# When a user clicks on the `Publish` button, this is the default branch this server merges into.
# I.e., dev server could have `staging` here. Staging server might have `production` here.
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO")
GIT_BRANCH = environ.get("GIT_BRANCH")
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
# Datbase Configuration
SPIFF_DATABASE_TYPE = environ.get(
"SPIFF_DATABASE_TYPE", default="mysql"
) # can also be sqlite, postgres
# Overide above with specific sqlalchymy connection string.
SPIFFWORKFLOW_BACKEND_DATABASE_URI = environ.get(
"SPIFFWORKFLOW_BACKEND_DATABASE_URI", default=None
)

View File

@ -2,8 +2,8 @@
from os import environ
GIT_COMMIT_ON_SAVE = True
GIT_COMMIT_USERNAME = "demo"
GIT_COMMIT_EMAIL = "demo@example.com"
GIT_USERNAME = "demo"
GIT_USER_EMAIL = "demo@example.com"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
default="terraform_deployed_environment.yml",

View File

@ -0,0 +1,8 @@
"""Dev."""
from os import environ
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer")
GIT_USER_EMAIL = environ.get(
"GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com"
)

View File

@ -12,3 +12,10 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get(
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="true") == "true"
)
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
)
GIT_USERNAME = "sartography-automated-committer"
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
GIT_BRANCH_TO_PUBLISH_TO = "main"
GIT_BRANCH = "main"

View File

@ -1,5 +1,11 @@
default_group: everybody
users:
admin:
email: admin@spiffworkflow.org
password: admin
preferred_username: Admin
groups:
admin:
users:
@ -11,8 +17,6 @@ groups:
dan,
mike,
jason,
j,
amir,
jarrad,
elizabeth,
jon,
@ -27,7 +31,6 @@ groups:
dan,
mike,
jason,
j,
amir,
jarrad,
elizabeth,
@ -58,6 +61,12 @@ groups:
harmeet,
]
admin-ro:
users:
[
j,
]
permissions:
admin:
groups: [admin]
@ -65,11 +74,28 @@ permissions:
allowed_permissions: [create, read, update, delete]
uri: /*
admin-readonly:
groups: [admin-ro]
users: []
allowed_permissions: [read]
uri: /*
admin-process-instances-for-readonly:
groups: [admin-ro]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
# read all for everybody
read-all-process-groups:
@ -98,6 +124,12 @@ permissions:
allowed_permissions: [read]
uri: /v1.0/processes
task-data-read:
groups: [demo]
users: []
allowed_permissions: [read]
uri: /v1.0/task-data/*
manage-procurement-admin:
groups: ["Project Lead"]
@ -170,17 +202,17 @@ permissions:
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
core1-admin-models-instantiate:
groups: ["core-contributor"]
groups: ["core-contributor", "Finance Team"]
users: []
allowed_permissions: [create]
uri: /v1.0/process-models/misc:category_number_one:process-model-with-form/process-instances
core1-admin-instances:
groups: ["core-contributor"]
groups: ["core-contributor", "Finance Team"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:*
core1-admin-instances-slash:
groups: ["core-contributor"]
groups: ["core-contributor", "Finance Team"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*

View File

@ -0,0 +1,88 @@
default_group: everybody
users:
admin:
email: admin@spiffworkflow.org
password: admin
preferred_username: Admin
nelson:
email: nelson@spiffworkflow.org
password: nelson
preferred_username: Nelson
malala:
email: malala@spiffworkflow.org
password: malala
preferred_username: Malala
groups:
admin:
users:
[
admin,
]
Education:
users:
[
malala
]
President:
users:
[
nelson
]
permissions:
# Admins have access to everything.
admin:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /*
# Everybody can participate in tasks assigned to them.
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
# Everyone can see everything (all groups, and processes are visible)
read-all-process-groups:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-groups/*
read-all-process-models:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-models/*
read-all-process-instance:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-instances/*
read-process-instance-reports:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/process-instances/reports/*
processes-read:
groups: [ everybody ]
users: [ ]
allowed_permissions: [ read ]
uri: /v1.0/processes
# Members of the Education group can change they processes work.
education-admin:
groups: ["Education", "President"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-groups/education:*
# Anyone can start an education process.
education-everybody:
groups: [everybody]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/*

View File

@ -0,0 +1,165 @@
default_group: everybody
groups:
admin:
users:
[
admin,
jakub,
kb,
alex,
dan,
mike,
jason,
j,
jarrad,
elizabeth,
jon,
natalia,
]
Finance Team:
users:
[
jakub,
alex,
dan,
mike,
jason,
j,
amir,
jarrad,
elizabeth,
jon,
natalia,
sasha,
fin,
fin1,
]
demo:
users:
[
core,
fin,
fin1,
harmeet,
sasha,
manuchehr,
lead,
lead1
]
core-contributor:
users:
[
core,
harmeet,
]
permissions:
admin:
groups: [admin]
users: []
allowed_permissions: [read]
uri: /*
admin-process-instances:
groups: [admin]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/*
tasks-crud:
groups: [everybody]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
# read all for everybody
read-all-process-groups:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-groups/*
read-all-process-models:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-models/*
read-all-process-instance:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/*
read-process-instance-reports:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/process-instances/reports/*
processes-read:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/processes
manage-procurement-admin-instances:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement:*
manage-procurement-admin-instances-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [create, read, update, delete]
uri: /v1.0/process-instances/manage-procurement/*
manage-procurement-admin-instance-logs:
groups: ["Project Lead"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:*
manage-procurement-admin-instance-logs-slash:
groups: ["Project Lead"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement/*
manage-revenue-streams-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-revenue-streams-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/*
manage-procurement-invoice-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-invoice-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:procurement:core-contributor-invoice-management:*
manage-procurement-instances:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [create, read]
uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:*
manage-procurement-instance-logs:
groups: ["core-contributor", "demo"]
users: []
allowed_permissions: [read]
uri: /v1.0/logs/manage-procurement:vendor-lifecycle-management:*

View File

@ -12,7 +12,6 @@ groups:
mike,
jason,
j,
amir,
jarrad,
elizabeth,
jon,
@ -71,6 +70,13 @@ permissions:
allowed_permissions: [create, read, update, delete]
uri: /v1.0/tasks/*
service-tasks:
groups: [everybody]
users: []
allowed_permissions: [read]
uri: /v1.0/service-tasks
# read all for everybody
read-all-process-groups:
groups: [everybody]
@ -98,6 +104,12 @@ permissions:
allowed_permissions: [read]
uri: /v1.0/processes
task-data-read:
groups: [demo]
users: []
allowed_permissions: [read]
uri: /v1.0/task-data/*
manage-procurement-admin:
groups: ["Project Lead"]

View File

@ -1,9 +1,7 @@
"""Staging."""
from os import environ
GIT_COMMIT_ON_SAVE = True
GIT_COMMIT_USERNAME = "staging"
GIT_COMMIT_EMAIL = "staging@example.com"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="staging.yml"
)
GIT_BRANCH = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging")
GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main")
GIT_COMMIT_ON_SAVE = False
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml"

View File

@ -0,0 +1,29 @@
"""Terraform-deployed environment."""
from os import environ
# default.py already ensured that this key existed as was not None
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
GIT_COMMIT_ON_SAVE = True
GIT_USERNAME = "sartography-automated-committer"
GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME",
default="terraform_deployed_environment.yml",
)
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
)
OPEN_ID_SERVER_URL = f"https://keycloak.{environment_identifier_for_this_config_file_only}.spiffworkflow.org/realms/spiffworkflow"
SPIFFWORKFLOW_FRONTEND_URL = (
f"https://{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
)
SPIFFWORKFLOW_BACKEND_URL = (
f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
)
CONNECTOR_PROXY_URL = f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org"
GIT_CLONE_URL_FOR_PUBLISHING = environ.get(
"GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git"
)

View File

@ -1,16 +0,0 @@
"""Terraform-deployed environment."""
from os import environ
# default.py already ensured that this key existed as was not None
environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"]
GIT_COMMIT_ON_SAVE = True
GIT_COMMIT_USERNAME = environment_identifier_for_this_config_file_only
GIT_COMMIT_EMAIL = f"{environment_identifier_for_this_config_file_only}@example.com"
SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get(
"SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="terraform_deployed_environment.yml"
)
RUN_BACKGROUND_SCHEDULER = (
environ.get("RUN_BACKGROUND_SCHEDULER", default="false") == "true"
)

View File

@ -93,7 +93,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
created_at_in_seconds: int = db.Column(db.Integer)
status: str = db.Column(db.String(50))
bpmn_xml_file_contents: bytes | None = None
bpmn_xml_file_contents: str | None = None
bpmn_version_control_type: str = db.Column(db.String(50))
bpmn_version_control_identifier: str = db.Column(db.String(255))
spiff_step: int = db.Column(db.Integer)
@ -101,9 +101,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
@property
def serialized(self) -> dict[str, Any]:
"""Return object data in serializeable format."""
local_bpmn_xml_file_contents = ""
if self.bpmn_xml_file_contents:
local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8")
return {
"id": self.id,
"process_model_identifier": self.process_model_identifier,
@ -112,7 +109,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
"start_in_seconds": self.start_in_seconds,
"end_in_seconds": self.end_in_seconds,
"process_initiator_id": self.process_initiator_id,
"bpmn_xml_file_contents": local_bpmn_xml_file_contents,
"bpmn_xml_file_contents": self.bpmn_xml_file_contents,
"bpmn_version_control_identifier": self.bpmn_version_control_identifier,
"bpmn_version_control_type": self.bpmn_version_control_type,
"spiff_step": self.spiff_step,

View File

@ -1,4 +1,4 @@
"""Spiff_step_details."""
"""Process_instance_metadata."""
from dataclasses import dataclass
from flask_bpmn.models.db import db
@ -23,7 +23,7 @@ class ProcessInstanceMetadataModel(SpiffworkflowBaseDBModel):
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
key: str = db.Column(db.String(255), nullable=False)
key: str = db.Column(db.String(255), nullable=False, index=True)
value: str = db.Column(db.String(255), nullable=False)
updated_at_in_seconds: int = db.Column(db.Integer, nullable=False)

View File

@ -26,6 +26,10 @@ from spiffworkflow_backend.services.process_instance_processor import (
ReportMetadata = dict[str, Any]
class ProcessInstanceReportAlreadyExistsError(Exception):
"""ProcessInstanceReportAlreadyExistsError."""
class ProcessInstanceReportResult(TypedDict):
"""ProcessInstanceReportResult."""
@ -63,7 +67,7 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
),
)
id = db.Column(db.Integer, primary_key=True)
id: int = db.Column(db.Integer, primary_key=True)
identifier: str = db.Column(db.String(50), nullable=False, index=True)
report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore
created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True)
@ -71,6 +75,11 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
created_at_in_seconds = db.Column(db.Integer)
updated_at_in_seconds = db.Column(db.Integer)
@classmethod
def default_order_by(cls) -> list[str]:
"""Default_order_by."""
return ["-start_in_seconds", "-id"]
@classmethod
def add_fixtures(cls) -> None:
"""Add_fixtures."""
@ -120,21 +129,27 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
identifier: str,
user: UserModel,
report_metadata: ReportMetadata,
) -> None:
) -> ProcessInstanceReportModel:
"""Make_fixture_report."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=identifier,
created_by_id=user.id,
).first()
if process_instance_report is None:
process_instance_report = cls(
identifier=identifier,
created_by_id=user.id,
report_metadata=report_metadata,
if process_instance_report is not None:
raise ProcessInstanceReportAlreadyExistsError(
f"Process instance report with identifier already exists: {identifier}"
)
db.session.add(process_instance_report)
db.session.commit()
process_instance_report = cls(
identifier=identifier,
created_by_id=user.id,
report_metadata=report_metadata,
)
db.session.add(process_instance_report)
db.session.commit()
return process_instance_report # type: ignore
@classmethod
def ticket_for_month_report(cls) -> dict:
@ -204,18 +219,8 @@ class ProcessInstanceReportModel(SpiffworkflowBaseDBModel):
user: UserModel,
) -> ProcessInstanceReportModel:
"""Create_with_attributes."""
# <<<<<<< HEAD
# process_model = ProcessModelService.get_process_model(
# process_model_id=f"{process_model_identifier}"
# )
# process_instance_report = cls(
# identifier=identifier,
# process_group_identifier="process_model.process_group_id",
# process_model_identifier=process_model.id,
# =======
process_instance_report = cls(
identifier=identifier,
# >>>>>>> main
created_by_id=user.id,
report_metadata=report_metadata,
)

View File

@ -38,6 +38,7 @@ class ProcessModelInfo:
fault_or_suspend_on_exception: str = NotificationType.fault.value
exception_notification_addresses: list[str] = field(default_factory=list)
parent_groups: list[dict] | None = None
metadata_extraction_paths: list[dict[str, str]] | None = None
def __post_init__(self) -> None:
"""__post_init__."""
@ -76,6 +77,13 @@ class ProcessModelInfoSchema(Schema):
exception_notification_addresses = marshmallow.fields.List(
marshmallow.fields.String
)
metadata_extraction_paths = marshmallow.fields.List(
marshmallow.fields.Dict(
keys=marshmallow.fields.Str(required=False),
values=marshmallow.fields.Str(required=False),
required=False,
)
)
@post_load
def make_spec(

View File

@ -8,6 +8,10 @@ from marshmallow import INCLUDE
from sqlalchemy import UniqueConstraint
class SpecReferenceNotFoundError(Exception):
"""SpecReferenceNotFoundError."""
@dataclass()
class SpecReference:
"""File Reference Information.

View File

@ -21,7 +21,7 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel):
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
spiff_step: int = db.Column(db.Integer, nullable=False)
task_json: str = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore
timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False)
completed_by_user_id: int = db.Column(db.Integer, nullable=True)
lane_assignment_id: Optional[int] = db.Column(

View File

@ -108,7 +108,7 @@ class Task:
multi_instance_type: Union[MultiInstanceType, None] = None,
multi_instance_count: str = "",
multi_instance_index: str = "",
process_name: str = "",
process_identifier: str = "",
properties: Union[dict, None] = None,
process_instance_id: Union[int, None] = None,
process_instance_status: Union[str, None] = None,
@ -118,7 +118,8 @@ class Task:
form_schema: Union[str, None] = None,
form_ui_schema: Union[str, None] = None,
parent: Optional[str] = None,
event_definition: Union[dict[str, Any], None] = None
event_definition: Union[dict[str, Any], None] = None,
call_activity_process_identifier: Optional[str] = None,
):
"""__init__."""
self.id = id
@ -131,6 +132,7 @@ class Task:
self.lane = lane
self.parent = parent
self.event_definition = event_definition
self.call_activity_process_identifier = call_activity_process_identifier
self.data = data
if self.data is None:
@ -153,7 +155,7 @@ class Task:
self.multi_instance_index = (
multi_instance_index # And the index of the currently repeating task.
)
self.process_name = process_name
self.process_identifier = process_identifier
self.properties = properties # Arbitrary extension properties from BPMN editor.
if self.properties is None:
@ -179,7 +181,7 @@ class Task:
"multi_instance_type": multi_instance_type,
"multi_instance_count": self.multi_instance_count,
"multi_instance_index": self.multi_instance_index,
"process_name": self.process_name,
"process_identifier": self.process_identifier,
"properties": self.properties,
"process_instance_id": self.process_instance_id,
"process_instance_status": self.process_instance_status,
@ -190,6 +192,7 @@ class Task:
"form_ui_schema": self.form_ui_schema,
"parent": self.parent,
"event_definition": self.event_definition,
"call_activity_process_identifier": self.call_activity_process_identifier,
}
@classmethod
@ -285,7 +288,7 @@ class TaskSchema(Schema):
"multi_instance_type",
"multi_instance_count",
"multi_instance_index",
"process_name",
"process_identifier",
"properties",
"process_instance_id",
"form_schema",
@ -297,7 +300,7 @@ class TaskSchema(Schema):
documentation = marshmallow.fields.String(required=False, allow_none=True)
# form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True)
title = marshmallow.fields.String(required=False, allow_none=True)
process_name = marshmallow.fields.String(required=False, allow_none=True)
process_identifier = marshmallow.fields.String(required=False, allow_none=True)
lane = marshmallow.fields.String(required=False, allow_none=True)
@marshmallow.post_load

View File

@ -30,7 +30,8 @@ class UserModel(SpiffworkflowBaseDBModel):
__table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),)
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(255), nullable=False, unique=True)
# server and service id must be unique, not username.
username = db.Column(db.String(255), nullable=False, unique=False)
uid = db.Column(db.String(50), unique=True)
service = db.Column(db.String(50), nullable=False, unique=False)
service_id = db.Column(db.String(255), nullable=False, unique=False)
@ -83,10 +84,6 @@ class UserModel(SpiffworkflowBaseDBModel):
algorithm="HS256",
)
def is_admin(self) -> bool:
"""Is_admin."""
return True
# @classmethod
# def from_open_id_user_info(cls, user_info: dict) -> Any:
# """From_open_id_user_info."""

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,153 @@
"""OpenID Implementation for demos and local development.
A very insecure and partial OpenID implementation for use in demos and testing.
Provides the bare minimum endpoints required by SpiffWorkflow to
handle openid authentication -- definitely not a production ready system.
This is just here to make local development, testing, and demonstration easier.
"""
import base64
import time
from typing import Any
from urllib.parse import urlencode
import jwt
import yaml
from flask import Blueprint
from flask import current_app
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from werkzeug.wrappers import Response
openid_blueprint = Blueprint(
"openid", __name__, template_folder="templates", static_folder="static"
)
OPEN_ID_CODE = ":this_is_not_secure_do_not_use_in_production"
@openid_blueprint.route("/.well-known/openid-configuration", methods=["GET"])
def well_known() -> dict:
"""Open ID Discovery endpoint.
These urls can be very different from one openid impl to the next, this is just a small subset.
"""
host_url = request.host_url.strip("/")
return {
"issuer": f"{host_url}/openid",
"authorization_endpoint": f"{host_url}{url_for('openid.auth')}",
"token_endpoint": f"{host_url}{url_for('openid.token')}",
"end_session_endpoint": f"{host_url}{url_for('openid.end_session')}",
}
@openid_blueprint.route("/auth", methods=["GET"])
def auth() -> str:
"""Accepts a series of parameters."""
return render_template(
"login.html",
state=request.args.get("state"),
response_type=request.args.get("response_type"),
client_id=request.args.get("client_id"),
scope=request.args.get("scope"),
redirect_uri=request.args.get("redirect_uri"),
error_message=request.args.get("error_message", ""),
)
@openid_blueprint.route("/form_submit", methods=["POST"])
def form_submit() -> Any:
"""Handles the login form submission."""
users = get_users()
if (
request.values["Uname"] in users
and request.values["Pass"] == users[request.values["Uname"]]["password"]
):
# Redirect back to the end user with some detailed information
state = request.values.get("state")
data = {
"state": state,
"code": request.values["Uname"] + OPEN_ID_CODE,
"session_state": "",
}
url = request.values.get("redirect_uri") + "?" + urlencode(data)
return redirect(url)
else:
return render_template(
"login.html",
state=request.values.get("state"),
response_type=request.values.get("response_type"),
client_id=request.values.get("client_id"),
scope=request.values.get("scope"),
redirect_uri=request.values.get("redirect_uri"),
error_message="Login failed. Please try again.",
)
@openid_blueprint.route("/token", methods=["POST"])
def token() -> dict:
"""Url that will return a valid token, given the super secret sauce."""
request.values.get("grant_type")
code = request.values.get("code")
request.values.get("redirect_uri")
"""We just stuffed the user name on the front of the code, so grab it."""
user_name, secret_hash = code.split(":")
user_details = get_users()[user_name]
"""Get authentication from headers."""
authorization = request.headers.get("Authorization", "Basic ")
authorization = authorization[6:] # Remove "Basic"
authorization = base64.b64decode(authorization).decode("utf-8")
client_id, client_secret = authorization.split(":")
base_url = request.host_url + "openid"
id_token = jwt.encode(
{
"iss": base_url,
"aud": [client_id, "account"],
"iat": time.time(),
"exp": time.time() + 86400, # Expire after a day.
"sub": user_name,
"preferred_username": user_details.get("preferred_username", user_name),
},
client_secret,
algorithm="HS256",
)
response = {
"access_token": id_token,
"id_token": id_token,
"refresh_token": id_token,
}
return response
@openid_blueprint.route("/end_session", methods=["GET"])
def end_session() -> Response:
"""Logout."""
redirect_url = request.args.get("post_logout_redirect_uri", "http://localhost")
request.args.get("id_token_hint")
return redirect(redirect_url)
@openid_blueprint.route("/refresh", methods=["POST"])
def refresh() -> str:
"""Refresh."""
return ""
permission_cache = None
def get_users() -> Any:
"""Load users from a local configuration file."""
global permission_cache
if not permission_cache:
with open(current_app.config["PERMISSIONS_FILE_FULLPATH"]) as file:
permission_cache = yaml.safe_load(file)
if "users" in permission_cache:
return permission_cache["users"]
else:
return {}

View File

@ -0,0 +1,112 @@
body{
margin: 0;
padding: 0;
background-color:white;
font-family: 'Arial';
}
header {
width: 100%;
background-color: black;
}
.logo_small {
padding: 5px 20px;
}
.error {
margin: 20px auto;
color: red;
font-weight: bold;
text-align: center;
}
.login{
width: 400px;
overflow: hidden;
margin: 20px auto;
padding: 50px;
background: #fff;
border-radius: 15px ;
}
h2{
text-align: center;
color: #277582;
padding: 20px;
}
label{
color: #fff;
width: 200px;
display: inline-block;
}
#log {
width: 100px;
height: 50px;
border: none;
padding-left: 7px;
background-color:#202020;
color: #DDD;
text-align: left;
}
.cds--btn--primary {
background-color: #0f62fe;
border: 1px solid #0000;
color: #fff;
}
.cds--btn {
align-items: center;
border: 0;
border-radius: 0;
box-sizing: border-box;
cursor: pointer;
display: inline-flex;
flex-shrink: 0;
font-family: inherit;
font-size: 100%;
font-size: .875rem;
font-weight: 400;
justify-content: space-between;
letter-spacing: .16px;
line-height: 1.28572;
margin: 0;
max-width: 20rem;
min-height: 3rem;
outline: none;
padding: calc(0.875rem - 3px) 63px calc(0.875rem - 3px) 15px;
position: relative;
text-align: left;
text-decoration: none;
transition: background 70ms cubic-bezier(0, 0, .38, .9), box-shadow 70ms cubic-bezier(0, 0, .38, .9), border-color 70ms cubic-bezier(0, 0, .38, .9), outline 70ms cubic-bezier(0, 0, .38, .9);
vertical-align: initial;
vertical-align: top;
width: max-content;
}
.cds--btn:hover {
background-color: #0145c5;
}
.cds--btn:focus {
background-color: #01369a;
}
.cds--text-input {
background-color: #eee;
border: none;
border-bottom: 1px solid #8d8d8d;
color: #161616;
font-family: inherit;
font-size: .875rem;
font-weight: 400;
height: 2.5rem;
letter-spacing: .16px;
line-height: 1.28572;
outline: 2px solid #0000;
outline-offset: -2px;
padding: 0 1rem;
transition: background-color 70ms cubic-bezier(.2,0,.38,.9),outline 70ms cubic-bezier(.2,0,.38,.9);
width: 100%;
}
span{
color: white;
font-size: 17px;
}
a{
float: right;
background-color: grey;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

View File

@ -0,0 +1,36 @@
<!DOCTYPE html>
<html>
<head>
<title>Login Form</title>
<link rel="stylesheet" type="text/css" href="{{ url_for('openid.static', filename='login.css') }}">
</head>
<body>
<header>
<img class="logo_small" src="{{ url_for('openid.static', filename='logo_small.png') }}"/>
</header>
<h2>Login</h2>
<div class="error">{{error_message}}</div>
<div class="login">
<form id="login" method="post" action="{{ url_for('openid.form_submit') }}">
<input type="text" class="cds--text-input" name="Uname" id="Uname" placeholder="Username">
<br><br>
<input type="Password" class="cds--text-input" name="Pass" id="Pass" placeholder="Password">
<br><br>
<input type="hidden" name="state" value="{{state}}"/>
<input type="hidden" name="response_type" value="{{response_type}}"/>
<input type="hidden" name="client_id" value="{{client_id}}"/>
<input type="hidden" name="scope" value="{{scope}}"/>
<input type="hidden" name="redirect_uri" value="{{redirect_uri}}"/>
<input type="submit" name="log" class="cds--btn cds--btn--primary" value="Log In">
<br><br>
<!-- should maybe add this stuff in eventually, but this is just for testing.
<input type="checkbox" id="check">
<span>Remember me</span>
<br><br>
Forgot <a href="#">Password</a>
-->
</form>
</div>
</body>
</html>

View File

@ -1,6 +1,7 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
import random
import re
import string
import uuid
from typing import Any
@ -30,7 +31,9 @@ from SpiffWorkflow.task import TaskState
from sqlalchemy import and_
from sqlalchemy import asc
from sqlalchemy import desc
from sqlalchemy.orm import joinedload
from sqlalchemy import func
from sqlalchemy.orm import aliased
from sqlalchemy.orm import selectinload
from spiffworkflow_backend.exceptions.process_entity_not_found_error import (
ProcessEntityNotFoundError,
@ -52,6 +55,9 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSche
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -60,6 +66,7 @@ from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema
from spiffworkflow_backend.models.secret_model import SecretModel
from spiffworkflow_backend.models.secret_model import SecretModelSchema
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
@ -152,9 +159,9 @@ def modify_process_model_id(process_model_id: str) -> str:
return process_model_id.replace("/", ":")
def un_modify_modified_process_model_id(modified_process_model_id: str) -> str:
def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str:
"""Un_modify_modified_process_model_id."""
return modified_process_model_id.replace(":", "/")
return modified_process_model_identifier.replace(":", "/")
def process_group_add(body: dict) -> flask.wrappers.Response:
@ -256,19 +263,26 @@ def process_model_create(
modified_process_group_id: str, body: Dict[str, Union[str, bool, int]]
) -> flask.wrappers.Response:
"""Process_model_create."""
process_model_info = ProcessModelInfoSchema().load(body)
body_include_list = [
"id",
"display_name",
"primary_file_name",
"primary_process_id",
"description",
"metadata_extraction_paths",
]
body_filtered = {
include_item: body[include_item]
for include_item in body_include_list
if include_item in body
}
if modified_process_group_id is None:
raise ApiError(
error_code="process_group_id_not_specified",
message="Process Model could not be created when process_group_id path param is unspecified",
status_code=400,
)
if process_model_info is None:
raise ApiError(
error_code="process_model_could_not_be_created",
message=f"Process Model could not be created from given body: {body}",
status_code=400,
)
unmodified_process_group_id = un_modify_modified_process_model_id(
modified_process_group_id
@ -281,6 +295,14 @@ def process_model_create(
status_code=400,
)
process_model_info = ProcessModelInfo(**body_filtered) # type: ignore
if process_model_info is None:
raise ApiError(
error_code="process_model_could_not_be_created",
message=f"Process Model could not be created from given body: {body}",
status_code=400,
)
ProcessModelService.add_process_model(process_model_info)
return Response(
json.dumps(ProcessModelInfoSchema().dump(process_model_info)),
@ -294,7 +316,6 @@ def process_model_delete(
) -> flask.wrappers.Response:
"""Process_model_delete."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
# process_model_identifier = f"{process_group_id}/{process_model_id}"
ProcessModelService().process_model_delete(process_model_identifier)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
@ -309,6 +330,7 @@ def process_model_update(
"primary_file_name",
"primary_process_id",
"description",
"metadata_extraction_paths",
]
body_filtered = {
include_item: body[include_item]
@ -316,7 +338,6 @@ def process_model_update(
if include_item in body
}
# process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier)
ProcessModelService.update_process_model(process_model, body_filtered)
return ProcessModelInfoSchema().dump(process_model)
@ -325,11 +346,11 @@ def process_model_update(
def process_model_show(modified_process_model_identifier: str) -> Any:
"""Process_model_show."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
# process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model = get_process_model(process_model_identifier)
# TODO: Temporary. Should not need the next line once models have correct ids
# process_model.id = process_model_identifier
files = sorted(SpecFileService.get_files(process_model))
files = sorted(
SpecFileService.get_files(process_model),
key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index,
)
process_model.files = files
for file in process_model.files:
file.references = SpecFileService.get_references_for_file(file, process_model)
@ -353,6 +374,20 @@ def process_model_move(
return make_response(jsonify(new_process_model), 201)
def process_model_publish(
modified_process_model_identifier: str, branch_to_update: Optional[str] = None
) -> flask.wrappers.Response:
"""Process_model_publish."""
if branch_to_update is None:
branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"]
process_model_identifier = un_modify_modified_process_model_id(
modified_process_model_identifier
)
pr_url = GitService().publish(process_model_identifier, branch_to_update)
data = {"ok": True, "pr_url": pr_url}
return Response(json.dumps(data), status=200, mimetype="application/json")
def process_model_list(
process_group_identifier: Optional[str] = None,
recursive: Optional[bool] = False,
@ -394,9 +429,9 @@ def process_list() -> Any:
return SpecReferenceSchema(many=True).dump(references)
def get_file(modified_process_model_id: str, file_name: str) -> Any:
def get_file(modified_process_model_identifier: str, file_name: str) -> Any:
"""Get_file."""
process_model_identifier = modified_process_model_id.replace(":", "/")
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = get_process_model(process_model_identifier)
files = SpecFileService.get_files(process_model, file_name)
if len(files) == 0:
@ -416,11 +451,10 @@ def get_file(modified_process_model_id: str, file_name: str) -> Any:
def process_model_file_update(
modified_process_model_id: str, file_name: str
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_update."""
process_model_identifier = modified_process_model_id.replace(":", "/")
# process_model_identifier = f"{process_group_id}/{process_model_id}"
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = get_process_model(process_model_identifier)
request_file = get_file_from_request()
@ -446,10 +480,10 @@ def process_model_file_update(
def process_model_file_delete(
modified_process_model_id: str, file_name: str
modified_process_model_identifier: str, file_name: str
) -> flask.wrappers.Response:
"""Process_model_file_delete."""
process_model_identifier = modified_process_model_id.replace(":", "/")
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = get_process_model(process_model_identifier)
try:
SpecFileService.delete_file(process_model, file_name)
@ -465,9 +499,9 @@ def process_model_file_delete(
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def add_file(modified_process_model_id: str) -> flask.wrappers.Response:
def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response:
"""Add_file."""
process_model_identifier = modified_process_model_id.replace(":", "/")
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_model = get_process_model(process_model_identifier)
request_file = get_file_from_request()
if not request_file.filename:
@ -488,10 +522,12 @@ def add_file(modified_process_model_id: str) -> flask.wrappers.Response:
)
def process_instance_create(modified_process_model_id: str) -> flask.wrappers.Response:
def process_instance_create(
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Create_process_instance."""
process_model_identifier = un_modify_modified_process_model_id(
modified_process_model_id
modified_process_model_identifier
)
process_instance = (
ProcessInstanceService.create_process_instance_from_process_model_identifier(
@ -549,6 +585,7 @@ def process_instance_run(
def process_instance_terminate(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_run."""
process_instance = ProcessInstanceService().get_process_instance(
@ -561,6 +598,7 @@ def process_instance_terminate(
def process_instance_suspend(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_suspend."""
process_instance = ProcessInstanceService().get_process_instance(
@ -573,6 +611,7 @@ def process_instance_suspend(
def process_instance_resume(
process_instance_id: int,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_instance_resume."""
process_instance = ProcessInstanceService().get_process_instance(
@ -584,19 +623,24 @@ def process_instance_resume(
def process_instance_log_list(
modified_process_model_identifier: str,
process_instance_id: int,
page: int = 1,
per_page: int = 100,
detailed: bool = False,
) -> flask.wrappers.Response:
"""Process_instance_log_list."""
# to make sure the process instance exists
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
log_query = SpiffLoggingModel.query.filter(
SpiffLoggingModel.process_instance_id == process_instance.id
)
if not detailed:
log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore
logs = (
SpiffLoggingModel.query.filter(
SpiffLoggingModel.process_instance_id == process_instance.id
)
.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore
.join(
UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True
) # isouter since if we don't have a user, we still want the log
@ -642,6 +686,7 @@ def message_instance_list(
.add_columns(
MessageModel.identifier.label("message_identifier"),
ProcessInstanceModel.process_model_identifier,
ProcessInstanceModel.process_model_display_name,
)
.paginate(page=page, per_page=per_page, error_out=False)
)
@ -776,10 +821,11 @@ def process_instance_list(
with_tasks_completed_by_my_group: Optional[bool] = None,
user_filter: Optional[bool] = False,
report_identifier: Optional[str] = None,
report_id: Optional[int] = None,
) -> flask.wrappers.Response:
"""Process_instance_list."""
process_instance_report = ProcessInstanceReportService.report_with_identifier(
g.user, report_identifier
g.user, report_id, report_identifier
)
if user_filter:
@ -810,11 +856,10 @@ def process_instance_list(
)
)
# process_model_identifier = un_modify_modified_process_model_id(modified_process_model_identifier)
process_instance_query = ProcessInstanceModel.query
# Always join that hot user table for good performance at serialization time.
process_instance_query = process_instance_query.options(
joinedload(ProcessInstanceModel.process_initiator)
selectinload(ProcessInstanceModel.process_initiator)
)
if report_filter.process_model_identifier is not None:
@ -928,25 +973,78 @@ def process_instance_list(
UserGroupAssignmentModel.user_id == g.user.id
)
instance_metadata_aliases = {}
stock_columns = ProcessInstanceReportService.get_column_names_for_model(
ProcessInstanceModel
)
for column in process_instance_report.report_metadata["columns"]:
if column["accessor"] in stock_columns:
continue
instance_metadata_alias = aliased(ProcessInstanceMetadataModel)
instance_metadata_aliases[column["accessor"]] = instance_metadata_alias
filter_for_column = None
if "filter_by" in process_instance_report.report_metadata:
filter_for_column = next(
(
f
for f in process_instance_report.report_metadata["filter_by"]
if f["field_name"] == column["accessor"]
),
None,
)
isouter = True
conditions = [
ProcessInstanceModel.id == instance_metadata_alias.process_instance_id,
instance_metadata_alias.key == column["accessor"],
]
if filter_for_column:
isouter = False
conditions.append(
instance_metadata_alias.value == filter_for_column["field_value"]
)
process_instance_query = process_instance_query.join(
instance_metadata_alias, and_(*conditions), isouter=isouter
).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"]))
order_by_query_array = []
order_by_array = process_instance_report.report_metadata["order_by"]
if len(order_by_array) < 1:
order_by_array = ProcessInstanceReportModel.default_order_by()
for order_by_option in order_by_array:
attribute = re.sub("^-", "", order_by_option)
if attribute in stock_columns:
if order_by_option.startswith("-"):
order_by_query_array.append(
getattr(ProcessInstanceModel, attribute).desc()
)
else:
order_by_query_array.append(
getattr(ProcessInstanceModel, attribute).asc()
)
elif attribute in instance_metadata_aliases:
if order_by_option.startswith("-"):
order_by_query_array.append(
func.max(instance_metadata_aliases[attribute].value).desc()
)
else:
order_by_query_array.append(
func.max(instance_metadata_aliases[attribute].value).asc()
)
process_instances = (
process_instance_query.group_by(ProcessInstanceModel.id)
.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
)
.add_columns(ProcessInstanceModel.id)
.order_by(*order_by_query_array)
.paginate(page=page, per_page=per_page, error_out=False)
)
results = list(
map(
ProcessInstanceService.serialize_flat_with_task_data,
process_instances.items,
)
results = ProcessInstanceReportService.add_metadata_columns_to_process_instance(
process_instances.items, process_instance_report.report_metadata["columns"]
)
report_metadata = process_instance_report.report_metadata
response_json = {
"report_identifier": process_instance_report.identifier,
"report_metadata": report_metadata,
"report": process_instance_report,
"results": results,
"filters": report_filter.to_dict(),
"pagination": {
@ -959,33 +1057,74 @@ def process_instance_list(
return make_response(jsonify(response_json), 200)
def process_instance_report_column_list() -> flask.wrappers.Response:
"""Process_instance_report_column_list."""
table_columns = ProcessInstanceReportService.builtin_column_options()
columns_for_metadata = (
db.session.query(ProcessInstanceMetadataModel.key)
.order_by(ProcessInstanceMetadataModel.key)
.distinct() # type: ignore
.all()
)
columns_for_metadata_strings = [
{"Header": i[0], "accessor": i[0], "filterable": True}
for i in columns_for_metadata
]
return make_response(jsonify(table_columns + columns_for_metadata_strings), 200)
def process_instance_show(
modified_process_model_identifier: str, process_instance_id: int
modified_process_model_identifier: str,
process_instance_id: int,
process_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Create_process_instance."""
process_model_identifier = modified_process_model_identifier.replace(":", "/")
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
current_version_control_revision = GitService.get_current_revision()
process_model = get_process_model(process_model_identifier)
if process_model.primary_file_name:
process_model_with_diagram = None
name_of_file_with_diagram = None
if process_identifier:
spec_reference = SpecReferenceCache.query.filter_by(
identifier=process_identifier
).first()
if spec_reference is None:
raise SpecReferenceNotFoundError(
f"Could not find given process identifier in the cache: {process_identifier}"
)
process_model_with_diagram = ProcessModelService.get_process_model(
spec_reference.process_model_id
)
name_of_file_with_diagram = spec_reference.file_name
else:
process_model_with_diagram = get_process_model(process_model_identifier)
if process_model_with_diagram.primary_file_name:
name_of_file_with_diagram = process_model_with_diagram.primary_file_name
if process_model_with_diagram and name_of_file_with_diagram:
if (
process_instance.bpmn_version_control_identifier
== current_version_control_revision
):
bpmn_xml_file_contents = SpecFileService.get_data(
process_model, process_model.primary_file_name
)
process_model_with_diagram, name_of_file_with_diagram
).decode("utf-8")
else:
bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision(
process_model, process_instance.bpmn_version_control_identifier
process_model_with_diagram,
process_instance.bpmn_version_control_identifier,
file_name=name_of_file_with_diagram,
)
process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents
return make_response(jsonify(process_instance), 200)
def process_instance_delete(process_instance_id: int) -> flask.wrappers.Response:
def process_instance_delete(
process_instance_id: int, modified_process_model_identifier: str
) -> flask.wrappers.Response:
"""Create_process_instance."""
process_instance = find_process_instance_by_id_or_raise(process_instance_id)
@ -1015,22 +1154,22 @@ def process_instance_report_list(
def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response:
"""Process_instance_report_create."""
ProcessInstanceReportModel.create_report(
process_instance_report = ProcessInstanceReportModel.create_report(
identifier=body["identifier"],
user=g.user,
report_metadata=body["report_metadata"],
)
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
return make_response(jsonify(process_instance_report), 201)
def process_instance_report_update(
report_identifier: str,
report_id: int,
body: Dict[str, Any],
) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier,
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
@ -1043,15 +1182,15 @@ def process_instance_report_update(
process_instance_report.report_metadata = body["report_metadata"]
db.session.commit()
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
return make_response(jsonify(process_instance_report), 201)
def process_instance_report_delete(
report_identifier: str,
report_id: int,
) -> flask.wrappers.Response:
"""Process_instance_report_create."""
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier,
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
@ -1067,11 +1206,9 @@ def process_instance_report_delete(
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def service_tasks_show() -> flask.wrappers.Response:
"""Service_tasks_show."""
def service_task_list() -> flask.wrappers.Response:
"""Service_task_list."""
available_connectors = ServiceTaskService.available_connectors()
print(available_connectors)
return Response(
json.dumps(available_connectors), status=200, mimetype="application/json"
)
@ -1105,19 +1242,17 @@ def authentication_callback(
def process_instance_report_show(
report_identifier: str,
report_id: int,
page: int = 1,
per_page: int = 100,
) -> flask.wrappers.Response:
"""Process_instance_list."""
process_instances = ProcessInstanceModel.query.order_by( # .filter_by(process_model_identifier=process_model.id)
"""Process_instance_report_show."""
process_instances = ProcessInstanceModel.query.order_by(
ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore
).paginate(
page=page, per_page=per_page, error_out=False
)
).paginate(page=page, per_page=per_page, error_out=False)
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier,
id=report_id,
created_by_id=g.user.id,
).first()
if process_instance_report is None:
@ -1279,7 +1414,7 @@ def get_tasks(
def process_instance_task_list(
modified_process_model_id: str,
modified_process_model_identifier: str,
process_instance_id: int,
all_tasks: bool = False,
spiff_step: int = 0,
@ -1298,7 +1433,8 @@ def process_instance_task_list(
)
if step_detail is not None and process_instance.bpmn_json is not None:
bpmn_json = json.loads(process_instance.bpmn_json)
bpmn_json["tasks"] = step_detail.task_json
bpmn_json["tasks"] = step_detail.task_json["tasks"]
bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"]
process_instance.bpmn_json = json.dumps(bpmn_json)
processor = ProcessInstanceProcessor(process_instance)
@ -1396,9 +1532,6 @@ def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response
task.form_ui_schema = ui_form_contents
if task.properties and task.data and "instructionsForEndUser" in task.properties:
print(
f"task.properties['instructionsForEndUser']: {task.properties['instructionsForEndUser']}"
)
if task.properties["instructionsForEndUser"]:
task.properties["instructionsForEndUser"] = render_jinja_template(
task.properties["instructionsForEndUser"], task.data
@ -1602,6 +1735,8 @@ def get_file_from_request() -> Any:
return request_file
# process_model_id uses forward slashes on all OSes
# this seems to return an object where process_model.id has backslashes on windows
def get_process_model(process_model_id: str) -> ProcessModelInfo:
"""Get_process_model."""
process_model = None
@ -1713,9 +1848,26 @@ def get_spiff_task_from_process_instance(
return spiff_task
# sample body:
# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
# "full_name": "sartography/sample-process-models", "private": False .... }}
# test with: ngrok http 7000
# where 7000 is the port the app is running on locally
def github_webhook_receive(body: Dict) -> Response:
"""Github_webhook_receive."""
auth_header = request.headers.get("X-Hub-Signature-256")
AuthorizationService.verify_sha256_token(auth_header)
result = GitService.handle_web_hook(body)
return Response(
json.dumps({"git_pull": result}), status=200, mimetype="application/json"
)
#
# Methods for secrets CRUD - maybe move somewhere else:
#
def get_secret(key: str) -> Optional[str]:
"""Get_secret."""
return SecretService.get_secret(key)
@ -1748,7 +1900,6 @@ def secret_list(
def add_secret(body: Dict) -> Response:
"""Add secret."""
secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id)
assert secret_model # noqa: S101
return Response(
json.dumps(SecretModelSchema().dump(secret_model)),
status=201,
@ -1845,7 +1996,12 @@ def _update_form_schema_with_task_data_as_needed(
_update_form_schema_with_task_data_as_needed(o, task_data)
def update_task_data(process_instance_id: str, task_id: str, body: Dict) -> Response:
def update_task_data(
process_instance_id: str,
modified_process_model_identifier: str,
task_id: str,
body: Dict,
) -> Response:
"""Update task data."""
process_instance = ProcessInstanceModel.query.filter(
ProcessInstanceModel.id == int(process_instance_id)

View File

@ -1,6 +1,7 @@
"""User."""
import ast
import base64
import json
from typing import Any
from typing import Dict
from typing import Optional
@ -15,8 +16,9 @@ from flask_bpmn.api.api_error import ApiError
from werkzeug.wrappers import Response
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authentication_service import AuthenticationService
from spiffworkflow_backend.services.authentication_service import (
AuthenticationService,
MissingAccessTokenError,
)
from spiffworkflow_backend.services.authorization_service import AuthorizationService
from spiffworkflow_backend.services.user_service import UserService
@ -58,7 +60,6 @@ def verify_token(
decoded_token = get_decoded_token(token)
if decoded_token is not None:
if "token_type" in decoded_token:
token_type = decoded_token["token_type"]
if token_type == "internal": # noqa: S105
@ -68,11 +69,11 @@ def verify_token(
current_app.logger.error(
f"Exception in verify_token getting user from decoded internal token. {e}"
)
elif "iss" in decoded_token.keys():
try:
user_info = AuthenticationService.get_user_info_from_open_id(token)
except ApiError as ae:
if AuthenticationService.validate_id_token(token):
user_info = decoded_token
except ApiError as ae: # API Error is only thrown in the token is outdated.
# Try to refresh the token
user = UserService.get_user_by_service_and_service_id(
"open_id", decoded_token["sub"]
@ -86,14 +87,9 @@ def verify_token(
)
)
if auth_token and "error" not in auth_token:
# redirect to original url, with auth_token?
user_info = (
AuthenticationService.get_user_info_from_open_id(
auth_token["access_token"]
)
)
if not user_info:
raise ae
# We have the user, but this code is a bit convoluted, and will later demand
# a user_info object so it can look up the user. Sorry to leave this crap here.
user_info = {"sub": user.service_id}
else:
raise ae
else:
@ -203,6 +199,18 @@ def login(redirect_url: str = "/") -> Response:
return redirect(login_redirect_url)
def parse_id_token(token: str) -> Any:
"""Parse the id token."""
parts = token.split(".")
if len(parts) != 3:
raise Exception("Incorrect id token format")
payload = parts[1]
padded = payload + "=" * (4 - len(payload) % 4)
decoded = base64.b64decode(padded)
return json.loads(decoded)
def login_return(code: str, state: str, session_state: str) -> Optional[Response]:
"""Login_return."""
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
@ -211,10 +219,9 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
if "id_token" in auth_token_object:
id_token = auth_token_object["id_token"]
user_info = parse_id_token(id_token)
if AuthenticationService.validate_id_token(id_token):
user_info = AuthenticationService.get_user_info_from_open_id(
auth_token_object["access_token"]
)
if user_info and "error" not in user_info:
user_model = AuthorizationService.create_user_from_sign_in(user_info)
g.user = user_model.id
@ -262,10 +269,10 @@ def login_api_return(code: str, state: str, session_state: str) -> str:
code, "/v1.0/login_api_return"
)
access_token: str = auth_token_object["access_token"]
assert access_token # noqa: S101
if access_token is None:
raise MissingAccessTokenError("Cannot find the access token for the request")
return access_token
# return redirect("localhost:7000/v1.0/ui")
# return {'uid': 'user_1'}
def logout(id_token: str, redirect_url: Optional[str]) -> Response:
@ -332,15 +339,11 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo
.filter(UserModel.service_id == service_id)
.first()
)
# user: UserModel = UserModel.query.filter()
if user:
return user
user = UserModel(
username=service_id,
uid=service_id,
service=service,
service_id=service_id,
name="API User",
)
return user

View File

@ -1,4 +1,4 @@
"""Get_env."""
"""Save process instance metadata."""
from typing import Any
from flask_bpmn.models.db import db

View File

@ -16,6 +16,10 @@ from werkzeug.wrappers import Response
from spiffworkflow_backend.models.refresh_token import RefreshTokenModel
class MissingAccessTokenError(Exception):
"""MissingAccessTokenError."""
class AuthenticationProviderTypes(enum.Enum):
"""AuthenticationServiceProviders."""
@ -26,58 +30,35 @@ class AuthenticationProviderTypes(enum.Enum):
class AuthenticationService:
"""AuthenticationService."""
ENDPOINT_CACHE: dict = (
{}
) # We only need to find the openid endpoints once, then we can cache them.
@staticmethod
def get_open_id_args() -> tuple:
"""Get_open_id_args."""
open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"]
open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"]
open_id_realm_name = current_app.config["OPEN_ID_REALM_NAME"]
open_id_client_secret_key = current_app.config[
"OPEN_ID_CLIENT_SECRET_KEY"
] # noqa: S105
return (
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
)
def client_id() -> str:
"""Returns the client id from the config."""
return current_app.config.get("OPEN_ID_CLIENT_ID", "")
@staticmethod
def server_url() -> str:
"""Returns the server url from the config."""
return current_app.config.get("OPEN_ID_SERVER_URL", "")
@staticmethod
def secret_key() -> str:
"""Returns the secret key from the config."""
return current_app.config.get("OPEN_ID_CLIENT_SECRET_KEY", "")
@classmethod
def get_user_info_from_open_id(cls, token: str) -> dict:
"""The token is an auth_token."""
(
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
) = cls.get_open_id_args()
headers = {"Authorization": f"Bearer {token}"}
request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/userinfo"
try:
request_response = requests.get(request_url, headers=headers)
except Exception as e:
current_app.logger.error(f"Exception in get_user_info_from_id_token: {e}")
raise ApiError(
error_code="token_error",
message=f"Exception in get_user_info_from_id_token: {e}",
status_code=401,
) from e
if request_response.status_code == 401:
raise ApiError(
error_code="invalid_token", message="Please login", status_code=401
)
elif request_response.status_code == 200:
user_info: dict = json.loads(request_response.text)
return user_info
raise ApiError(
error_code="user_info_error",
message="Cannot get user info in get_user_info_from_id_token",
status_code=401,
)
def open_id_endpoint_for_name(cls, name: str) -> str:
"""All openid systems provide a mapping of static names to the full path of that endpoint."""
if name not in AuthenticationService.ENDPOINT_CACHE:
request_url = f"{cls.server_url()}/.well-known/openid-configuration"
response = requests.get(request_url)
AuthenticationService.ENDPOINT_CACHE = response.json()
if name not in AuthenticationService.ENDPOINT_CACHE:
raise Exception(f"Unknown OpenID Endpoint: {name}")
return AuthenticationService.ENDPOINT_CACHE.get(name, "")
@staticmethod
def get_backend_url() -> str:
@ -87,17 +68,10 @@ class AuthenticationService:
def logout(self, id_token: str, redirect_url: Optional[str] = None) -> Response:
"""Logout."""
if redirect_url is None:
redirect_url = "/"
return_redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
(
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
) = AuthenticationService.get_open_id_args()
redirect_url = f"{self.get_backend_url()}/v1.0/logout_return"
request_url = (
f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/logout?"
+ f"post_logout_redirect_uri={return_redirect_url}&"
self.open_id_endpoint_for_name("end_session_endpoint")
+ f"?post_logout_redirect_uri={redirect_url}&"
+ f"id_token_hint={id_token}"
)
@ -113,18 +87,12 @@ class AuthenticationService:
self, state: str, redirect_url: str = "/v1.0/login_return"
) -> str:
"""Get_login_redirect_url."""
(
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
) = AuthenticationService.get_open_id_args()
return_redirect_url = f"{self.get_backend_url()}{redirect_url}"
login_redirect_url = (
f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/auth?"
+ f"state={state}&"
self.open_id_endpoint_for_name("authorization_endpoint")
+ f"?state={state}&"
+ "response_type=code&"
+ f"client_id={open_id_client_id}&"
+ f"client_id={self.client_id()}&"
+ "scope=openid&"
+ f"redirect_uri={return_redirect_url}"
)
@ -134,14 +102,7 @@ class AuthenticationService:
self, code: str, redirect_url: str = "/v1.0/login_return"
) -> dict:
"""Get_auth_token_object."""
(
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
) = AuthenticationService.get_open_id_args()
backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}"
backend_basic_auth_string = f"{self.client_id()}:{self.secret_key()}"
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
headers = {
@ -154,7 +115,7 @@ class AuthenticationService:
"redirect_uri": f"{self.get_backend_url()}{redirect_url}",
}
request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token"
request_url = self.open_id_endpoint_for_name("token_endpoint")
response = requests.post(request_url, data=data, headers=headers)
auth_token_object: dict = json.loads(response.text)
@ -165,12 +126,6 @@ class AuthenticationService:
"""Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation."""
valid = True
now = time.time()
(
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
) = cls.get_open_id_args()
try:
decoded_token = jwt.decode(id_token, options={"verify_signature": False})
except Exception as e:
@ -179,15 +134,15 @@ class AuthenticationService:
message="Cannot decode id_token",
status_code=401,
) from e
if decoded_token["iss"] != f"{open_id_server_url}/realms/{open_id_realm_name}":
if decoded_token["iss"] != cls.server_url():
valid = False
elif (
open_id_client_id not in decoded_token["aud"]
cls.client_id() not in decoded_token["aud"]
and "account" not in decoded_token["aud"]
):
valid = False
elif "azp" in decoded_token and decoded_token["azp"] not in (
open_id_client_id,
cls.client_id(),
"account",
):
valid = False
@ -235,20 +190,14 @@ class AuthenticationService:
refresh_token_object: RefreshTokenModel = RefreshTokenModel.query.filter(
RefreshTokenModel.user_id == user_id
).first()
assert refresh_token_object # noqa: S101
return refresh_token_object.token
if refresh_token_object:
return refresh_token_object.token
return None
@classmethod
def get_auth_token_from_refresh_token(cls, refresh_token: str) -> dict:
"""Get a new auth_token from a refresh_token."""
(
open_id_server_url,
open_id_client_id,
open_id_realm_name,
open_id_client_secret_key,
) = cls.get_open_id_args()
backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}"
"""Converts a refresh token to an Auth Token by calling the openid's auth endpoint."""
backend_basic_auth_string = f"{cls.client_id()}:{cls.secret_key()}"
backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii")
backend_basic_auth = base64.b64encode(backend_basic_auth_bytes)
headers = {
@ -259,11 +208,11 @@ class AuthenticationService:
data = {
"grant_type": "refresh_token",
"refresh_token": refresh_token,
"client_id": open_id_client_id,
"client_secret": open_id_client_secret_key,
"client_id": cls.client_id(),
"client_secret": cls.secret_key(),
}
request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token"
request_url = cls.open_id_endpoint_for_name("token_endpoint")
response = requests.post(request_url, data=data, headers=headers)
auth_token_object: dict = json.loads(response.text)

View File

@ -1,5 +1,9 @@
"""Authorization_service."""
import inspect
import re
from hashlib import sha256
from hmac import compare_digest
from hmac import HMAC
from typing import Optional
from typing import Union
@ -8,6 +12,7 @@ import yaml
from flask import current_app
from flask import g
from flask import request
from flask import scaffold
from flask_bpmn.api.api_error import ApiError
from flask_bpmn.models.db import db
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
@ -23,6 +28,7 @@ from spiffworkflow_backend.models.principal import PrincipalModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.models.user import UserNotFoundError
from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel
from spiffworkflow_backend.routes.openid_blueprint import openid_blueprint
from spiffworkflow_backend.services.group_service import GroupService
from spiffworkflow_backend.services.user_service import UserService
@ -42,6 +48,27 @@ class UserDoesNotHaveAccessToTaskError(Exception):
class AuthorizationService:
"""Determine whether a user has permission to perform their request."""
# https://stackoverflow.com/a/71320673/6090676
@classmethod
def verify_sha256_token(cls, auth_header: Optional[str]) -> None:
"""Verify_sha256_token."""
if auth_header is None:
raise ApiError(
error_code="unauthorized",
message="",
status_code=403,
)
received_sign = auth_header.split("sha256=")[-1].strip()
secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode()
expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest()
if not compare_digest(received_sign, expected_sign):
raise ApiError(
error_code="unauthorized",
message="",
status_code=403,
)
@classmethod
def has_permission(
cls, principals: list[PrincipalModel], permission: str, target_uri: str
@ -229,7 +256,11 @@ class AuthorizationService:
def should_disable_auth_for_request(cls) -> bool:
"""Should_disable_auth_for_request."""
swagger_functions = ["get_json_spec"]
authentication_exclusion_list = ["status", "authentication_callback"]
authentication_exclusion_list = [
"status",
"authentication_callback",
"github_webhook_receive",
]
if request.method == "OPTIONS":
return True
@ -241,6 +272,7 @@ class AuthorizationService:
return True
api_view_function = current_app.view_functions[request.endpoint]
module = inspect.getmodule(api_view_function)
if (
api_view_function
and api_view_function.__name__.startswith("login")
@ -248,6 +280,8 @@ class AuthorizationService:
or api_view_function.__name__.startswith("console_ui_")
or api_view_function.__name__ in authentication_exclusion_list
or api_view_function.__name__ in swagger_functions
or module == openid_blueprint
or module == scaffold # don't check permissions for static assets
):
return True

View File

@ -1,6 +1,8 @@
"""File_system_service."""
import os
from contextlib import contextmanager
from datetime import datetime
from typing import Generator
from typing import List
from typing import Optional
@ -23,13 +25,25 @@ class FileSystemService:
PROCESS_GROUP_JSON_FILE = "process_group.json"
PROCESS_MODEL_JSON_FILE = "process_model.json"
# https://stackoverflow.com/a/24176022/6090676
@staticmethod
@contextmanager
def cd(newdir: str) -> Generator:
"""Cd."""
prevdir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(prevdir)
@staticmethod
def root_path() -> str:
"""Root_path."""
# fixme: allow absolute files
dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
app_root = current_app.root_path
return os.path.join(app_root, "..", dir_name)
return os.path.abspath(os.path.join(app_root, "..", dir_name))
@staticmethod
def id_string_to_relative_path(id_string: str) -> str:

View File

@ -1,56 +1,252 @@
"""Git_service."""
import os
import shutil
import subprocess # noqa we need the subprocess module to safely run the git commands
import uuid
from typing import Optional
from typing import Union
from flask import current_app
from flask import g
from spiffworkflow_backend.config import ConfigurationError
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.services.file_system_service import FileSystemService
class MissingGitConfigsError(Exception):
"""MissingGitConfigsError."""
class InvalidGitWebhookBodyError(Exception):
"""InvalidGitWebhookBodyError."""
class GitCloneUrlMismatchError(Exception):
"""GitCloneUrlMismatchError."""
class GitCommandError(Exception):
"""GitCommandError."""
# TOOD: check for the existence of git and configs on bootup if publishing is enabled
class GitService:
"""GitService."""
@staticmethod
def get_current_revision() -> str:
@classmethod
def get_current_revision(cls) -> str:
"""Get_current_revision."""
bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
# The value includes a carriage return character at the end, so we don't grab the last character
current_git_revision = os.popen( # noqa: S605
f"cd {bpmn_spec_absolute_dir} && git rev-parse --short HEAD"
).read()[
:-1
] # noqa: S605
return current_git_revision
with FileSystemService.cd(bpmn_spec_absolute_dir):
return cls.run_shell_command_to_get_stdout(
["git", "rev-parse", "--short", "HEAD"]
)
@staticmethod
@classmethod
def get_instance_file_contents_for_revision(
process_model: ProcessModelInfo, revision: str
) -> bytes:
cls,
process_model: ProcessModelInfo,
revision: str,
file_name: Optional[str] = None,
) -> str:
"""Get_instance_file_contents_for_revision."""
bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
process_model_relative_path = FileSystemService.process_model_relative_path(
process_model
)
shell_cd_command = f"cd {bpmn_spec_absolute_dir}"
shell_git_command = f"git show {revision}:{process_model_relative_path}/{process_model.primary_file_name}"
shell_command = f"{shell_cd_command} && {shell_git_command}"
# git show 78ae5eb:category_number_one/script-task/script-task.bpmn
file_contents: str = os.popen(shell_command).read()[:-1] # noqa: S605
assert file_contents # noqa: S101
return file_contents.encode("utf-8")
file_name_to_use = file_name
if file_name_to_use is None:
file_name_to_use = process_model.primary_file_name
with FileSystemService.cd(bpmn_spec_absolute_dir):
shell_command = [
"git",
"show",
f"{revision}:{process_model_relative_path}/{file_name_to_use}",
]
return cls.run_shell_command_to_get_stdout(shell_command)
@staticmethod
def commit(message: str) -> str:
@classmethod
def commit(cls, message: str, repo_path: Optional[str] = None) -> str:
"""Commit."""
bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
repo_path_to_use = repo_path
if repo_path is None:
repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]
if repo_path_to_use is None:
raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set")
git_username = ""
git_email = ""
if (
current_app.config["GIT_COMMIT_USERNAME"]
and current_app.config["GIT_COMMIT_EMAIL"]
):
git_username = current_app.config["GIT_COMMIT_USERNAME"]
git_email = current_app.config["GIT_COMMIT_EMAIL"]
shell_command = f"./bin/git_commit_bpmn_models_repo '{bpmn_spec_absolute_dir}' '{message}' '{git_username}' '{git_email}'"
output = os.popen(shell_command).read() # noqa: S605
return output
if current_app.config["GIT_USERNAME"] and current_app.config["GIT_USER_EMAIL"]:
git_username = current_app.config["GIT_USERNAME"]
git_email = current_app.config["GIT_USER_EMAIL"]
shell_command_path = os.path.join(
current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo"
)
shell_command = [
shell_command_path,
repo_path_to_use,
message,
git_username,
git_email,
]
return cls.run_shell_command_to_get_stdout(shell_command)
@classmethod
def check_for_configs(cls) -> None:
"""Check_for_configs."""
if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None:
raise MissingGitConfigsError(
"Missing config for GIT_BRANCH_TO_PUBLISH_TO. "
"This is required for publishing process models"
)
if current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"] is None:
raise MissingGitConfigsError(
"Missing config for GIT_CLONE_URL_FOR_PUBLISHING. "
"This is required for publishing process models"
)
@classmethod
def run_shell_command_as_boolean(cls, command: list[str]) -> bool:
"""Run_shell_command_as_boolean."""
# we know result will be a bool here
result: bool = cls.run_shell_command(command, return_success_state=True) # type: ignore
return result
@classmethod
def run_shell_command_to_get_stdout(cls, command: list[str]) -> str:
"""Run_shell_command_to_get_stdout."""
# we know result will be a CompletedProcess here
result: subprocess.CompletedProcess[bytes] = cls.run_shell_command(
command, return_success_state=False
) # type: ignore
return result.stdout.decode("utf-8").strip()
@classmethod
def run_shell_command(
cls, command: list[str], return_success_state: bool = False
) -> Union[subprocess.CompletedProcess[bytes], bool]:
"""Run_shell_command."""
# this is fine since we pass the commands directly
result = subprocess.run(command, check=False, capture_output=True) # noqa
if return_success_state:
return result.returncode == 0
if result.returncode != 0:
stdout = result.stdout.decode("utf-8")
stderr = result.stderr.decode("utf-8")
raise GitCommandError(
f"Failed to execute git command: {command} "
f"Stdout: {stdout} "
f"Stderr: {stderr} "
)
return result
# only supports github right now
@classmethod
def handle_web_hook(cls, webhook: dict) -> bool:
"""Handle_web_hook."""
cls.check_for_configs()
if "repository" not in webhook or "clone_url" not in webhook["repository"]:
raise InvalidGitWebhookBodyError(
f"Cannot find required keys of 'repository:clone_url' from webhook body: {webhook}"
)
clone_url = webhook["repository"]["clone_url"]
if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]:
raise GitCloneUrlMismatchError(
f"Configured clone url does not match clone url from webhook: {clone_url}"
)
if "ref" not in webhook:
raise InvalidGitWebhookBodyError(
f"Could not find the 'ref' arg in the webhook boy: {webhook}"
)
if current_app.config["GIT_BRANCH"] is None:
raise MissingGitConfigsError(
"Missing config for GIT_BRANCH. "
"This is required for updating the repository as a result of the webhook"
)
ref = webhook["ref"]
git_branch = current_app.config["GIT_BRANCH"]
if ref != f"refs/heads/{git_branch}":
return False
with FileSystemService.cd(current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]):
cls.run_shell_command(["git", "pull"])
return True
@classmethod
def publish(cls, process_model_id: str, branch_to_update: str) -> str:
"""Publish."""
cls.check_for_configs()
source_process_model_root = FileSystemService.root_path()
source_process_model_path = os.path.join(
source_process_model_root, process_model_id
)
unique_hex = uuid.uuid4().hex
clone_dir = f"sample-process-models.{unique_hex}"
# clone new instance of sample-process-models, checkout branch_to_update
# we are adding a guid to this so the flake8 issue has been mitigated
destination_process_root = f"/tmp/{clone_dir}" # noqa
git_clone_url = current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"].replace(
"https://",
f"https://{current_app.config['GIT_USERNAME']}:{current_app.config['GIT_USER_PASSWORD']}@",
)
cmd = ["git", "clone", git_clone_url, destination_process_root]
cls.run_shell_command(cmd)
with FileSystemService.cd(destination_process_root):
# create publish branch from branch_to_update
cls.run_shell_command(["git", "checkout", branch_to_update])
branch_to_pull_request = f"publish-{process_model_id}"
# check if branch exists and checkout appropriately
command = [
"git",
"show-ref",
"--verify",
f"refs/remotes/origin/{branch_to_pull_request}",
]
if cls.run_shell_command_as_boolean(command):
cls.run_shell_command(["git", "checkout", branch_to_pull_request])
else:
cls.run_shell_command(["git", "checkout", "-b", branch_to_pull_request])
# copy files from process model into the new publish branch
destination_process_model_path = os.path.join(
destination_process_root, process_model_id
)
if os.path.exists(destination_process_model_path):
shutil.rmtree(destination_process_model_path)
shutil.copytree(source_process_model_path, destination_process_model_path)
# add and commit files to branch_to_pull_request, then push
commit_message = (
f"Request to publish changes to {process_model_id}, "
f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}"
)
cls.commit(commit_message, destination_process_root)
cls.run_shell_command(
["git", "push", "--set-upstream", "origin", branch_to_pull_request]
)
# build url for github page to open PR
git_remote = cls.run_shell_command_to_get_stdout(
["git", "config", "--get", "remote.origin.url"]
)
remote_url = git_remote.strip().replace(".git", "")
pr_url = f"{remote_url}/compare/{branch_to_update}...{branch_to_pull_request}?expand=1"
# try to clean up
if os.path.exists(destination_process_root):
shutil.rmtree(destination_process_root)
return pr_url

View File

@ -80,6 +80,9 @@ from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_instance import MessageModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
@ -96,6 +99,7 @@ from spiffworkflow_backend.services.service_task_service import ServiceTaskDeleg
from spiffworkflow_backend.services.spec_file_service import SpecFileService
from spiffworkflow_backend.services.user_service import UserService
# Sorry about all this crap. I wanted to move this thing to another file, but
# importing a bunch of types causes circular imports.
@ -177,7 +181,12 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore
)
return Script.generate_augmented_list(script_attributes_context)
def evaluate(self, task: SpiffTask, expression: str, external_methods=None) -> Any:
def evaluate(
self,
task: SpiffTask,
expression: str,
external_methods: Optional[dict[str, Any]] = None,
) -> Any:
"""Evaluate."""
return self._evaluate(expression, task.data, task, external_methods)
@ -543,7 +552,7 @@ class ProcessInstanceProcessor:
"""SaveSpiffStepDetails."""
bpmn_json = self.serialize()
wf_json = json.loads(bpmn_json)
task_json = wf_json["tasks"]
task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]}
return {
"process_instance_id": self.process_instance_model.id,
@ -572,6 +581,41 @@ class ProcessInstanceProcessor:
db.session.add(details_model)
db.session.commit()
def extract_metadata(self, process_model_info: ProcessModelInfo) -> None:
"""Extract_metadata."""
metadata_extraction_paths = process_model_info.metadata_extraction_paths
if metadata_extraction_paths is None:
return
if len(metadata_extraction_paths) <= 0:
return
current_data = self.get_current_data()
for metadata_extraction_path in metadata_extraction_paths:
key = metadata_extraction_path["key"]
path = metadata_extraction_path["path"]
path_segments = path.split(".")
data_for_key = current_data
for path_segment in path_segments:
if path_segment in data_for_key:
data_for_key = data_for_key[path_segment]
else:
data_for_key = None # type: ignore
break
if data_for_key is not None:
pim = ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=self.process_instance_model.id,
key=key,
).first()
if pim is None:
pim = ProcessInstanceMetadataModel(
process_instance_id=self.process_instance_model.id,
key=key,
)
pim.value = data_for_key
db.session.add(pim)
db.session.commit()
def save(self) -> None:
"""Saves the current state of this processor to the database."""
self.process_instance_model.bpmn_json = self.serialize()
@ -598,6 +642,15 @@ class ProcessInstanceProcessor:
process_instance_id=self.process_instance_model.id
).all()
ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks()
process_model_display_name = ""
process_model_info = self.process_model_service.get_process_model(
self.process_instance_model.process_model_identifier
)
if process_model_info is not None:
process_model_display_name = process_model_info.display_name
self.extract_metadata(process_model_info)
for ready_or_waiting_task in ready_or_waiting_tasks:
# filter out non-usertasks
task_spec = ready_or_waiting_task.task_spec
@ -616,13 +669,6 @@ class ProcessInstanceProcessor:
if "formUiSchemaFilename" in properties:
ui_form_file_name = properties["formUiSchemaFilename"]
process_model_display_name = ""
process_model_info = self.process_model_service.get_process_model(
self.process_instance_model.process_model_identifier
)
if process_model_info is not None:
process_model_display_name = process_model_info.display_name
active_task = None
for at in active_tasks:
if at.task_id == str(ready_or_waiting_task.id):
@ -1159,8 +1205,8 @@ class ProcessInstanceProcessor:
def get_current_data(self) -> dict[str, Any]:
"""Get the current data for the process.
Return either most recent task data or the process data
if the process instance is complete
Return either the most recent task data or--if the process instance is complete--
the process data.
"""
if self.process_instance_model.status == "complete":
return self.get_data()

View File

@ -2,6 +2,9 @@
from dataclasses import dataclass
from typing import Optional
import sqlalchemy
from flask_bpmn.models.db import db
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -57,12 +60,21 @@ class ProcessInstanceReportService:
@classmethod
def report_with_identifier(
cls, user: UserModel, report_identifier: Optional[str] = None
cls,
user: UserModel,
report_id: Optional[int] = None,
report_identifier: Optional[str] = None,
) -> ProcessInstanceReportModel:
"""Report_with_filter."""
if report_id is not None:
process_instance_report = ProcessInstanceReportModel.query.filter_by(
id=report_id, created_by_id=user.id
).first()
if process_instance_report is not None:
return process_instance_report # type: ignore
if report_identifier is None:
report_identifier = "default"
process_instance_report = ProcessInstanceReportModel.query.filter_by(
identifier=report_identifier, created_by_id=user.id
).first()
@ -73,17 +85,9 @@ class ProcessInstanceReportService:
# TODO replace with system reports that are loaded on launch (or similar)
temp_system_metadata_map = {
"default": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "username", "accessor": "username"},
{"Header": "status", "accessor": "status"},
],
"columns": cls.builtin_column_options(),
"filter_by": [],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_instances_initiated_by_me": {
"columns": [
@ -97,48 +101,31 @@ class ProcessInstanceReportService:
{"Header": "status", "accessor": "status"},
],
"filter_by": [{"field_name": "initiated_by_me", "field_value": True}],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_instances_with_tasks_completed_by_me": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "username", "accessor": "username"},
{"Header": "status", "accessor": "status"},
],
"columns": cls.builtin_column_options(),
"filter_by": [
{"field_name": "with_tasks_completed_by_me", "field_value": True}
],
"order_by": ["-start_in_seconds", "-id"],
},
"system_report_instances_with_tasks_completed_by_my_groups": {
"columns": [
{"Header": "id", "accessor": "id"},
{
"Header": "process_model_display_name",
"accessor": "process_model_display_name",
},
{"Header": "start_in_seconds", "accessor": "start_in_seconds"},
{"Header": "end_in_seconds", "accessor": "end_in_seconds"},
{"Header": "username", "accessor": "username"},
{"Header": "status", "accessor": "status"},
],
"columns": cls.builtin_column_options(),
"filter_by": [
{
"field_name": "with_tasks_completed_by_my_group",
"field_value": True,
}
],
"order_by": ["-start_in_seconds", "-id"],
},
}
process_instance_report = ProcessInstanceReportModel(
identifier=report_identifier,
created_by_id=user.id,
report_metadata=temp_system_metadata_map[report_identifier], # type: ignore
report_metadata=temp_system_metadata_map[report_identifier],
)
return process_instance_report # type: ignore
@ -241,3 +228,43 @@ class ProcessInstanceReportService:
)
return report_filter
@classmethod
def add_metadata_columns_to_process_instance(
cls,
process_instance_sqlalchemy_rows: list[sqlalchemy.engine.row.Row], # type: ignore
metadata_columns: list[dict],
) -> list[dict]:
"""Add_metadata_columns_to_process_instance."""
results = []
for process_instance in process_instance_sqlalchemy_rows:
process_instance_dict = process_instance["ProcessInstanceModel"].serialized
for metadata_column in metadata_columns:
if metadata_column["accessor"] not in process_instance_dict:
process_instance_dict[
metadata_column["accessor"]
] = process_instance[metadata_column["accessor"]]
results.append(process_instance_dict)
return results
@classmethod
def get_column_names_for_model(cls, model: db.Model) -> list[str]: # type: ignore
"""Get_column_names_for_model."""
return [i.name for i in model.__table__.columns]
@classmethod
def builtin_column_options(cls) -> list[dict]:
"""Builtin_column_options."""
return [
{"Header": "Id", "accessor": "id", "filterable": False},
{
"Header": "Process",
"accessor": "process_model_display_name",
"filterable": False,
},
{"Header": "Start", "accessor": "start_in_seconds", "filterable": False},
{"Header": "End", "accessor": "end_in_seconds", "filterable": False},
{"Header": "Username", "accessor": "username", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
]

View File

@ -304,6 +304,11 @@ class ProcessInstanceService:
else:
lane = None
if hasattr(spiff_task.task_spec, "spec"):
call_activity_process_identifier = spiff_task.task_spec.spec
else:
call_activity_process_identifier = None
parent_id = None
if spiff_task.parent:
parent_id = spiff_task.parent.id
@ -320,25 +325,11 @@ class ProcessInstanceService:
multi_instance_type=mi_type,
multi_instance_count=info["mi_count"],
multi_instance_index=info["mi_index"],
process_name=spiff_task.task_spec._wf_spec.description,
process_identifier=spiff_task.task_spec._wf_spec.name,
properties=props,
parent=parent_id,
event_definition=serialized_task_spec.get("event_definition"),
call_activity_process_identifier=call_activity_process_identifier,
)
return task
@staticmethod
def serialize_flat_with_task_data(
process_instance: ProcessInstanceModel,
) -> dict[str, Any]:
"""NOTE: This is crazy slow. Put the latest task data in the database."""
"""Serialize_flat_with_task_data."""
# results = {}
# try:
# processor = ProcessInstanceProcessor(process_instance)
# process_instance.data = processor.get_current_data()
# results = process_instance.serialized_flat
# except ApiError:
results = process_instance.serialized
return results

View File

@ -148,20 +148,18 @@ class ProcessModelService(FileSystemService):
error_code="existing_instances",
message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.",
)
self.get_process_model(process_model_id)
# path = self.workflow_path(process_model)
path = f"{FileSystemService.root_path()}/{process_model_id}"
process_model = self.get_process_model(process_model_id)
path = self.workflow_path(process_model)
shutil.rmtree(path)
def process_model_move(
self, original_process_model_id: str, new_location: str
) -> ProcessModelInfo:
"""Process_model_move."""
original_model_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), original_process_model_id)
)
process_model = self.get_process_model(original_process_model_id)
original_model_path = self.workflow_path(process_model)
_, model_id = os.path.split(original_model_path)
new_relative_path = f"{new_location}/{model_id}"
new_relative_path = os.path.join(new_location, model_id)
new_model_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), new_relative_path)
)
@ -174,7 +172,6 @@ class ProcessModelService(FileSystemService):
cls, relative_path: str
) -> ProcessModelInfo:
"""Get_process_model_from_relative_path."""
process_group_identifier, _ = os.path.split(relative_path)
path = os.path.join(FileSystemService.root_path(), relative_path)
return cls.__scan_process_model(path)
@ -226,7 +223,7 @@ class ProcessModelService(FileSystemService):
user = UserService.current_user()
new_process_model_list = []
for process_model in process_models:
uri = f"/v1.0/process-models/{process_model.id.replace('/', ':')}/process-instances"
uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}"
result = AuthorizationService.user_has_permission(
user=user, permission="create", target_uri=uri
)
@ -245,7 +242,7 @@ class ProcessModelService(FileSystemService):
if full_group_id_path is None:
full_group_id_path = process_group_id_segment
else:
full_group_id_path = f"{full_group_id_path}/{process_group_id_segment}" # type: ignore
full_group_id_path = os.path.join(full_group_id_path, process_group_id_segment) # type: ignore
parent_group = ProcessModelService.get_process_group(full_group_id_path)
if parent_group:
parent_group_array.append(
@ -307,8 +304,8 @@ class ProcessModelService(FileSystemService):
) -> ProcessGroup:
"""Process_group_move."""
original_group_path = self.process_group_path(original_process_group_id)
original_root, original_group_id = os.path.split(original_group_path)
new_root = f"{FileSystemService.root_path()}/{new_location}"
_, original_group_id = os.path.split(original_group_path)
new_root = os.path.join(FileSystemService.root_path(), new_location)
new_group_path = os.path.abspath(
os.path.join(FileSystemService.root_path(), new_root, original_group_id)
)
@ -432,6 +429,9 @@ class ProcessModelService(FileSystemService):
# process_group.process_groups.sort()
return process_group
# path might have backslashes on windows, not sure
# not sure if os.path.join converts forward slashes in the relative_path argument to backslashes:
# path = os.path.join(FileSystemService.root_path(), relative_path)
@classmethod
def __scan_process_model(
cls,
@ -448,6 +448,10 @@ class ProcessModelService(FileSystemService):
data.pop("process_group_id")
# we don't save `id` in the json file, so we add it back in here.
relative_path = os.path.relpath(path, FileSystemService.root_path())
# even on windows, use forward slashes for ids
relative_path = relative_path.replace("\\", "/")
data["id"] = relative_path
process_model_info = ProcessModelInfo(**data)
if process_model_info is None:

View File

@ -31,7 +31,6 @@ class ServiceTaskDelegate:
if value.startswith(secret_prefix):
key = value.removeprefix(secret_prefix)
secret = SecretService().get_secret(key)
assert secret # noqa: S101
return secret.value
file_prefix = "file:"

View File

@ -171,13 +171,18 @@ class SpecFileService(FileSystemService):
ref.is_primary = True
if ref.is_primary:
ProcessModelService.update_process_model(
process_model_info,
{
"primary_process_id": ref.identifier,
"primary_file_name": file_name,
},
)
update_hash = {}
if not process_model_info.primary_file_name:
update_hash["primary_process_id"] = ref.identifier
update_hash["primary_file_name"] = file_name
elif file_name == process_model_info.primary_file_name:
update_hash["primary_process_id"] = ref.identifier
if len(update_hash) > 0:
ProcessModelService.update_process_model(
process_model_info,
update_hash,
)
SpecFileService.update_caches(ref)
return file

View File

@ -19,7 +19,11 @@
<bpmn:scriptTask id="hot_script_task_OH_YEEEEEEEEEEEEEEEEEEEEAH" name="OHHHHHHHHHHYEEEESSSSSSSSSS">
<bpmn:incoming>Flow_0bazl8x</bpmn:incoming>
<bpmn:outgoing>Flow_1mcaszp</bpmn:outgoing>
<bpmn:script>a = 1</bpmn:script>
<bpmn:script>a = 1
b = 2
outer = {}
outer["inner"] = 'sweet1'
</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="Event_1vch1y0">
<bpmn:incoming>Flow_1mcaszp</bpmn:incoming>

View File

@ -0,0 +1,56 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_hk6nsfl" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1ohrjz9</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1ohrjz9" sourceRef="StartEvent_1" targetRef="Activity_0fah9rm" />
<bpmn:endEvent id="Event_1tk4dsv">
<bpmn:incoming>Flow_1flxgry</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_18gs4jt" sourceRef="Activity_0fah9rm" targetRef="Activity_1bvyv67" />
<bpmn:scriptTask id="Activity_0fah9rm" name="First setting of data">
<bpmn:incoming>Flow_1ohrjz9</bpmn:incoming>
<bpmn:outgoing>Flow_18gs4jt</bpmn:outgoing>
<bpmn:script>outer = {}
invoice_number = 123
outer["inner"] = 'sweet1'
outer['time'] = time.time_ns()</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1flxgry" sourceRef="Activity_1bvyv67" targetRef="Event_1tk4dsv" />
<bpmn:scriptTask id="Activity_1bvyv67" name="First setting of data">
<bpmn:incoming>Flow_18gs4jt</bpmn:incoming>
<bpmn:outgoing>Flow_1flxgry</bpmn:outgoing>
<bpmn:script>outer["inner"] = 'sweet2'</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_hk6nsfl">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1c5bi8c_di" bpmnElement="Activity_0fah9rm">
<dc:Bounds x="270" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1tk4dsv_di" bpmnElement="Event_1tk4dsv">
<dc:Bounds x="612" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ay4o3w_di" bpmnElement="Activity_1bvyv67">
<dc:Bounds x="430" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1ohrjz9_di" bpmnElement="Flow_1ohrjz9">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_18gs4jt_di" bpmnElement="Flow_18gs4jt">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1flxgry_di" bpmnElement="Flow_1flxgry">
<di:waypoint x="530" y="177" />
<di:waypoint x="612" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -265,7 +265,7 @@ class BaseTest:
)
modified_process_model_id = test_process_model_id.replace("/", ":")
response = client.post(
f"/v1.0/process-models/{modified_process_model_id}/process-instances",
f"/v1.0/process-instances/{modified_process_model_id}",
headers=headers,
)
assert response.status_code == 201

View File

@ -57,7 +57,7 @@ class TestLoggingService(BaseTest):
assert response.status_code == 200
log_response = client.get(
f"/v1.0/process-instances/{process_instance_id}/logs",
f"/v1.0/logs/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
headers=headers,
)
assert log_response.status_code == 200

View File

@ -0,0 +1,61 @@
"""Test_authentication."""
from flask import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
class TestFlaskOpenId(BaseTest):
"""An integrated Open ID that responds to openID requests.
By referencing a build in YAML file. Useful for
local development, testing, demos etc...
"""
def test_discovery_of_endpoints(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test discovery endpoints."""
response = client.get("/openid/.well-known/openid-configuration")
discovered_urls = response.json
assert "http://localhost/openid" == discovered_urls["issuer"]
assert (
"http://localhost/openid/auth" == discovered_urls["authorization_endpoint"]
)
assert "http://localhost/openid/token" == discovered_urls["token_endpoint"]
def test_get_login_page(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""It should be possible to get to a login page."""
data = {"state": {"bubblegum": 1, "daydream": 2}}
response = client.get("/openid/auth", query_string=data)
assert b"<h2>Login</h2>" in response.data
assert b"bubblegum" in response.data
def test_get_token(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""It should be possible to get a token."""
code = (
"c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx"
)
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": f"Basic {code}",
}
data = {
"grant_type": "authorization_code",
"code": code,
"redirect_url": "http://localhost:7000/v1.0/login_return",
}
response = client.post("/openid/token", data=data, headers=headers)
assert response

View File

@ -20,6 +20,9 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.process_group import ProcessGroup
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_instance_report import (
ProcessInstanceReportModel,
)
@ -330,6 +333,9 @@ class TestProcessApi(BaseTest):
process_model.display_name = "Updated Display Name"
process_model.primary_file_name = "superduper.bpmn"
process_model.primary_process_id = "superduper"
process_model.metadata_extraction_paths = [
{"key": "extraction1", "path": "path1"}
]
modified_process_model_identifier = process_model_identifier.replace("/", ":")
response = client.put(
@ -343,6 +349,9 @@ class TestProcessApi(BaseTest):
assert response.json["display_name"] == "Updated Display Name"
assert response.json["primary_file_name"] == "superduper.bpmn"
assert response.json["primary_process_id"] == "superduper"
assert response.json["metadata_extraction_paths"] == [
{"key": "extraction1", "path": "path1"}
]
def test_process_model_list_all(
self,
@ -903,7 +912,7 @@ class TestProcessApi(BaseTest):
modified_process_model_identifier = process_model_identifier.replace("/", ":")
response = client.post(
f"/v1.0/process-models/{modified_process_model_identifier}/process-instances",
f"/v1.0/process-instances/{modified_process_model_identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 201
@ -1145,10 +1154,11 @@ class TestProcessApi(BaseTest):
headers=self.logged_in_headers(with_super_admin_user),
)
show_response = client.get(
f"/v1.0/process-models/{modified_process_model_identifier}/process-instances/{process_instance_id}",
f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert show_response.json is not None
assert show_response.status_code == 200
file_system_root = FileSystemService.root_path()
file_path = (
f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
@ -1157,6 +1167,60 @@ class TestProcessApi(BaseTest):
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
def test_process_instance_show_with_specified_process_identifier(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_show_with_specified_process_identifier."""
process_model_id = "call_activity_nested"
process_model_identifier = self.create_group_and_model_with_bpmn(
client=client,
user=with_super_admin_user,
process_group_id="test_group_two",
process_model_id=process_model_id,
bpmn_file_location="call_activity_nested",
)
spec_reference = SpecReferenceCache.query.filter_by(
identifier="Level2b"
).first()
assert spec_reference
modified_process_model_identifier = (
self.modify_process_identifier_for_path_param(process_model_identifier)
)
headers = self.logged_in_headers(with_super_admin_user)
create_response = self.create_process_instance_from_process_model_id(
client, process_model_identifier, headers
)
assert create_response.json is not None
assert create_response.status_code == 201
process_instance_id = create_response.json["id"]
client.post(
f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}/run",
headers=self.logged_in_headers(with_super_admin_user),
)
show_response = client.get(
f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}?process_identifier={spec_reference.identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert show_response.json is not None
assert show_response.status_code == 200
file_system_root = FileSystemService.root_path()
process_instance_file_path = (
f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn"
)
with open(process_instance_file_path) as f_open:
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents
spec_reference_file_path = os.path.join(
file_system_root, spec_reference.relative_path
)
with open(spec_reference_file_path) as f_open:
xml_file_contents = f_open.read()
assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents
def test_message_start_when_starting_process_instance(
self,
app: Flask,
@ -1311,7 +1375,7 @@ class TestProcessApi(BaseTest):
assert response.json is not None
response = client.post(
f"/v1.0/process-instances/{process_instance_id}/terminate",
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/terminate",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@ -1358,7 +1422,7 @@ class TestProcessApi(BaseTest):
assert response.json is not None
delete_response = client.delete(
f"/v1.0/process-instances/{process_instance_id}",
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert delete_response.status_code == 200
@ -1723,14 +1787,14 @@ class TestProcessApi(BaseTest):
],
}
ProcessInstanceReportModel.create_with_attributes(
report = ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
"/v1.0/process-instances/reports/sure",
f"/v1.0/process-instances/reports/{report.id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@ -1769,14 +1833,14 @@ class TestProcessApi(BaseTest):
],
}
ProcessInstanceReportModel.create_with_attributes(
report = ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
"/v1.0/process-instances/reports/sure?grade_level=1",
f"/v1.0/process-instances/reports/{report.id}?grade_level=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
@ -1791,9 +1855,9 @@ class TestProcessApi(BaseTest):
with_super_admin_user: UserModel,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_process_instance_report_show_with_default_list."""
"""Test_process_instance_report_show_with_bad_identifier."""
response = client.get(
"/v1.0/process-instances/reports/sure?grade_level=1",
"/v1.0/process-instances/reports/13000000?grade_level=1",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 404
@ -2357,7 +2421,7 @@ class TestProcessApi(BaseTest):
assert process_instance.status == "user_input_required"
client.post(
f"/v1.0/process-instances/{process_instance_id}/suspend",
f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/suspend",
headers=self.logged_in_headers(with_super_admin_user),
)
process_instance = ProcessInstanceService().get_process_instance(
@ -2544,3 +2608,313 @@ class TestProcessApi(BaseTest):
# make sure the new subgroup does exist
new_process_group = ProcessModelService.get_process_group(new_sub_path)
assert new_process_group.id == new_sub_path
# this doesn't work in CI
# assert "Initial Commit" in output
# def test_process_model_publish(
# self,
# app: Flask,
# client: FlaskClient,
# with_db_and_bpmn_file_cleanup: None,
# with_super_admin_user: UserModel,
# ) -> None:
# """Test_process_model_publish."""
# bpmn_root = FileSystemService.root_path()
# shell_command = ["git", "init", "--initial-branch=main", bpmn_root]
# output = GitService.run_shell_command_to_get_stdout(shell_command)
# assert output == f"Initialized empty Git repository in {bpmn_root}/.git/\n"
# with FileSystemService.cd(bpmn_root):
# output = GitService.run_shell_command_to_get_stdout(["git", "status"])
# assert "On branch main" in output
# assert "No commits yet" in output
# assert (
# 'nothing to commit (create/copy files and use "git add" to track)'
# in output
# )
#
# process_group_id = "test_group"
# self.create_process_group(
# client, with_super_admin_user, process_group_id, process_group_id
# )
#
# sub_process_group_id = "test_group/test_sub_group"
# process_model_id = "hello_world"
# bpmn_file_name = "hello_world.bpmn"
# bpmn_file_location = "hello_world"
# process_model_identifier = self.create_group_and_model_with_bpmn(
# client=client,
# user=with_super_admin_user,
# process_group_id=sub_process_group_id,
# process_model_id=process_model_id,
# bpmn_file_name=bpmn_file_name,
# bpmn_file_location=bpmn_file_location,
# )
# process_model_absolute_dir = os.path.join(
# bpmn_root, process_model_identifier
# )
#
# output = GitService.run_shell_command_to_get_stdout(["git", "status"])
# test_string = 'Untracked files:\n (use "git add <file>..." to include in what will be committed)\n\ttest_group'
# assert test_string in output
#
# os.system("git add .")
# output = os.popen("git commit -m 'Initial Commit'").read()
# assert "Initial Commit" in output
# assert "4 files changed" in output
# assert "test_group/process_group.json" in output
# assert "test_group/test_sub_group/hello_world/hello_world.bpmn" in output
# assert "test_group/test_sub_group/hello_world/process_model.json" in output
# assert "test_group/test_sub_group/process_group.json" in output
#
# output = GitService.run_shell_command_to_get_stdout(["git", "status"])
# assert "On branch main" in output
# assert "nothing to commit" in output
# assert "working tree clean" in output
#
# output = os.popen("git branch --list").read() # noqa: S605
# assert output == "* main\n"
# os.system("git branch staging")
# output = os.popen("git branch --list").read() # noqa: S605
# assert output == "* main\n staging\n"
#
# os.system("git checkout staging")
#
# output = GitService.run_shell_command_to_get_stdout(["git", "status"])
# assert "On branch staging" in output
# assert "nothing to commit" in output
# assert "working tree clean" in output
#
# # process_model = ProcessModelService.get_process_model(process_model_identifier)
#
# listing = os.listdir(process_model_absolute_dir)
# assert len(listing) == 2
# assert "hello_world.bpmn" in listing
# assert "process_model.json" in listing
#
# os.system("git checkout main")
#
# output = GitService.run_shell_command_to_get_stdout(["git", "status"])
# assert "On branch main" in output
# assert "nothing to commit" in output
# assert "working tree clean" in output
#
# file_data = b"abc123"
# new_file_path = os.path.join(process_model_absolute_dir, "new_file.txt")
# with open(new_file_path, "wb") as f_open:
# f_open.write(file_data)
#
# output = GitService.run_shell_command_to_get_stdout(["git", "status"])
# assert "On branch main" in output
# assert "Untracked files:" in output
# assert "test_group/test_sub_group/hello_world/new_file.txt" in output
#
# os.system(
# "git add test_group/test_sub_group/hello_world/new_file.txt"
# ) # noqa: S605
# output = os.popen("git commit -m 'add new_file.txt'").read() # noqa: S605
#
# assert "add new_file.txt" in output
# assert "1 file changed, 1 insertion(+)" in output
# assert "test_group/test_sub_group/hello_world/new_file.txt" in output
#
# listing = os.listdir(process_model_absolute_dir)
# assert len(listing) == 3
# assert "hello_world.bpmn" in listing
# assert "process_model.json" in listing
# assert "new_file.txt" in listing
#
# # modified_process_model_id = process_model_identifier.replace("/", ":")
# # response = client.post(
# # f"/v1.0/process-models/{modified_process_model_id}/publish?branch_to_update=staging",
# # headers=self.logged_in_headers(with_super_admin_user),
# # )
#
# print("test_process_model_publish")
def test_can_get_process_instance_list_with_report_metadata(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=process_instance.id
).all()
assert len(process_instance_metadata) == 3
report_metadata = {
"columns": [
{"Header": "ID", "accessor": "id"},
{"Header": "Status", "accessor": "status"},
{"Header": "Key One", "accessor": "key1"},
{"Header": "Key Two", "accessor": "key2"},
],
"order_by": ["status"],
"filter_by": [],
}
process_instance_report = ProcessInstanceReportModel.create_with_attributes(
identifier="sure",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_identifier={process_instance_report.identifier}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
assert response.status_code == 200
assert len(response.json["results"]) == 1
assert response.json["results"][0]["status"] == "complete"
assert response.json["results"][0]["id"] == process_instance.id
assert response.json["results"][0]["key1"] == "value1"
assert response.json["results"][0]["key2"] == "value2"
assert response.json["pagination"]["count"] == 1
assert response.json["pagination"]["pages"] == 1
assert response.json["pagination"]["total"] == 1
def test_can_get_process_instance_report_column_list(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_get_process_instance_list_with_report_metadata."""
process_model = load_test_spec(
process_model_id="save_process_instance_metadata/save_process_instance_metadata",
bpmn_file_name="save_process_instance_metadata.bpmn",
process_model_source_directory="save_process_instance_metadata",
)
process_instance = self.create_process_instance_from_process_model(
process_model=process_model, user=with_super_admin_user
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
process_instance_metadata = ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=process_instance.id
).all()
assert len(process_instance_metadata) == 3
response = client.get(
"/v1.0/process-instances/reports/columns",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.json is not None
assert response.status_code == 200
assert response.json == [
{"Header": "Id", "accessor": "id", "filterable": False},
{
"Header": "Process",
"accessor": "process_model_display_name",
"filterable": False,
},
{"Header": "Start", "accessor": "start_in_seconds", "filterable": False},
{"Header": "End", "accessor": "end_in_seconds", "filterable": False},
{"Header": "Username", "accessor": "username", "filterable": False},
{"Header": "Status", "accessor": "status", "filterable": False},
{"Header": "key1", "accessor": "key1", "filterable": True},
{"Header": "key2", "accessor": "key2", "filterable": True},
{"Header": "key3", "accessor": "key3", "filterable": True},
]
def test_process_instance_list_can_order_by_metadata(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_process_instance_list_can_order_by_metadata."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec(
"test_group/hello_world",
process_model_source_directory="nested-task-data-structure",
)
ProcessModelService.update_process_model(
process_model,
{
"metadata_extraction_paths": [
{"key": "time_ns", "path": "outer.time"},
]
},
)
process_instance_one = self.create_process_instance_from_process_model(
process_model
)
processor = ProcessInstanceProcessor(process_instance_one)
processor.do_engine_steps(save=True)
assert process_instance_one.status == "complete"
process_instance_two = self.create_process_instance_from_process_model(
process_model
)
processor = ProcessInstanceProcessor(process_instance_two)
processor.do_engine_steps(save=True)
assert process_instance_two.status == "complete"
report_metadata = {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "Time", "accessor": "time_ns"},
],
"order_by": ["time_ns"],
}
report_one = ProcessInstanceReportModel.create_with_attributes(
identifier="report_one",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_id={report_one.id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 2
assert response.json["results"][0]["id"] == process_instance_one.id
assert response.json["results"][1]["id"] == process_instance_two.id
report_metadata = {
"columns": [
{"Header": "id", "accessor": "id"},
{"Header": "Time", "accessor": "time_ns"},
],
"order_by": ["-time_ns"],
}
report_two = ProcessInstanceReportModel.create_with_attributes(
identifier="report_two",
report_metadata=report_metadata,
user=with_super_admin_user,
)
response = client.get(
f"/v1.0/process-instances?report_id={report_two.id}",
headers=self.logged_in_headers(with_super_admin_user),
)
assert response.status_code == 200
assert response.json is not None
assert len(response.json["results"]) == 2
assert response.json["results"][1]["id"] == process_instance_one.id
assert response.json["results"][0]["id"] == process_instance_two.id

View File

@ -0,0 +1,22 @@
"""Process Model."""
from flask.app import Flask
from flask.testing import FlaskClient
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from spiffworkflow_backend.services.git_service import GitService
class TestGitService(BaseTest):
"""TestGitService."""
def test_strips_output_of_stdout_from_command(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
) -> None:
"""Test_strips_output_of_stdout_from_command."""
output = GitService.run_shell_command_to_get_stdout(
["echo", " This output should not end in space or newline \n"]
)
assert output == "This output should not end in space or newline"

View File

@ -37,7 +37,7 @@ def test_generate_report_with_filter_by_with_variable_substitution(
with_db_and_bpmn_file_cleanup: None,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
"""Test_generate_report_with_filter_by_with_variable_substitution."""
process_instances = setup_process_instances_for_reports
report_metadata = {
"filter_by": [
@ -61,7 +61,7 @@ def test_generate_report_with_order_by_and_one_field(
with_db_and_bpmn_file_cleanup: None,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
"""Test_generate_report_with_order_by_and_one_field."""
process_instances = setup_process_instances_for_reports
report_metadata = {"order_by": ["test_score"]}
results = do_report_with_metadata_and_instances(report_metadata, process_instances)
@ -75,7 +75,7 @@ def test_generate_report_with_order_by_and_two_fields(
with_db_and_bpmn_file_cleanup: None,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
"""Test_generate_report_with_order_by_and_two_fields."""
process_instances = setup_process_instances_for_reports
report_metadata = {"order_by": ["grade_level", "test_score"]}
results = do_report_with_metadata_and_instances(report_metadata, process_instances)
@ -89,7 +89,7 @@ def test_generate_report_with_order_by_desc(
with_db_and_bpmn_file_cleanup: None,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
"""Test_generate_report_with_order_by_desc."""
process_instances = setup_process_instances_for_reports
report_metadata = {"order_by": ["grade_level", "-test_score"]}
results = do_report_with_metadata_and_instances(report_metadata, process_instances)
@ -103,7 +103,7 @@ def test_generate_report_with_columns(
with_db_and_bpmn_file_cleanup: None,
setup_process_instances_for_reports: list[ProcessInstanceModel],
) -> None:
"""Test_user_can_be_given_permission_to_administer_process_group."""
"""Test_generate_report_with_columns."""
process_instances = setup_process_instances_for_reports
report_metadata = {
"columns": [

View File

@ -5,12 +5,16 @@ from flask_bpmn.models.db import db
from tests.spiffworkflow_backend.helpers.base_test import BaseTest
from tests.spiffworkflow_backend.helpers.test_data import load_test_spec
from spiffworkflow_backend.models.process_instance_metadata import (
ProcessInstanceMetadataModel,
)
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.process_instance_processor import (
ProcessInstanceProcessor,
)
from spiffworkflow_backend.services.process_model_service import ProcessModelService
class TestProcessModel(BaseTest):
@ -122,6 +126,53 @@ class TestProcessModel(BaseTest):
processor.do_engine_steps(save=True)
assert process_instance.status == "complete"
def test_extract_metadata(
self,
app: Flask,
client: FlaskClient,
with_db_and_bpmn_file_cleanup: None,
with_super_admin_user: UserModel,
) -> None:
"""Test_can_run_process_model_with_call_activities."""
self.create_process_group(
client, with_super_admin_user, "test_group", "test_group"
)
process_model = load_test_spec(
"test_group/hello_world",
process_model_source_directory="nested-task-data-structure",
)
ProcessModelService.update_process_model(
process_model,
{
"metadata_extraction_paths": [
{"key": "awesome_var", "path": "outer.inner"},
{"key": "invoice_number", "path": "invoice_number"},
]
},
)
process_instance = self.create_process_instance_from_process_model(
process_model
)
processor = ProcessInstanceProcessor(process_instance)
processor.do_engine_steps(save=True)
assert process_instance.status == "complete"
process_instance_metadata_awesome_var = (
ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=process_instance.id, key="awesome_var"
).first()
)
assert process_instance_metadata_awesome_var is not None
assert process_instance_metadata_awesome_var.value == "sweet2"
process_instance_metadata_awesome_var = (
ProcessInstanceMetadataModel.query.filter_by(
process_instance_id=process_instance.id, key="invoice_number"
).first()
)
assert process_instance_metadata_awesome_var is not None
assert process_instance_metadata_awesome_var.value == "123"
def create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo:
"""Create_test_process_model."""
return ProcessModelInfo(

View File

@ -8,6 +8,9 @@
# testing
/coverage
# in case we accidentally run backend tests in frontend. :D
/.coverage.*
# production
/build
@ -29,4 +32,4 @@ cypress/screenshots
/test*.json
# Editors
.idea
.idea

View File

@ -1,4 +1,5 @@
import { modifyProcessIdentifierForPathParam } from '../../src/helpers';
import { miscDisplayName } from '../support/helpers';
describe('process-models', () => {
beforeEach(() => {
@ -16,7 +17,7 @@ describe('process-models', () => {
const modelDisplayName = `Test Model 2 ${id}`;
const modelId = `test-model-2-${id}`;
const newModelDisplayName = `${modelDisplayName} edited`;
cy.contains('99-Shared Resources').click();
cy.contains(miscDisplayName).click();
cy.wait(500);
cy.contains(groupDisplayName).click();
cy.createModel(groupId, modelId, modelDisplayName);
@ -34,7 +35,7 @@ describe('process-models', () => {
cy.contains(`Process Model: ${newModelDisplayName}`);
// go back to process model show by clicking on the breadcrumb
cy.contains(modelId).click();
cy.contains(modelDisplayName).click();
cy.getBySel('delete-process-model-button').click();
cy.contains('Are you sure');
@ -46,6 +47,7 @@ describe('process-models', () => {
`process-groups/${modifyProcessIdentifierForPathParam(groupId)}`
);
cy.contains(modelId).should('not.exist');
cy.contains(modelDisplayName).should('not.exist');
});
it('can create new bpmn, dmn, and json files', () => {
@ -61,11 +63,11 @@ describe('process-models', () => {
const dmnFileName = `dmn_test_file_${id}`;
const jsonFileName = `json_test_file_${id}`;
cy.contains('99-Shared Resources').click();
cy.contains(miscDisplayName).click();
cy.wait(500);
cy.contains(groupDisplayName).click();
cy.createModel(groupId, modelId, modelDisplayName);
cy.contains(directParentGroupId).click();
cy.contains(groupDisplayName).click();
cy.contains(modelDisplayName).click();
cy.url().should(
'include',
@ -90,7 +92,7 @@ describe('process-models', () => {
cy.get('input[name=file_name]').type(bpmnFileName);
cy.contains('Save Changes').click();
cy.contains(`Process Model File: ${bpmnFileName}`);
cy.contains(modelId).click();
cy.contains(modelDisplayName).click();
cy.contains(`Process Model: ${modelDisplayName}`);
// cy.getBySel('files-accordion').click();
cy.contains(`${bpmnFileName}.bpmn`).should('exist');
@ -108,7 +110,7 @@ describe('process-models', () => {
cy.get('input[name=file_name]').type(dmnFileName);
cy.contains('Save Changes').click();
cy.contains(`Process Model File: ${dmnFileName}`);
cy.contains(modelId).click();
cy.contains(modelDisplayName).click();
cy.contains(`Process Model: ${modelDisplayName}`);
// cy.getBySel('files-accordion').click();
cy.contains(`${dmnFileName}.dmn`).should('exist');
@ -124,7 +126,7 @@ describe('process-models', () => {
cy.contains(`Process Model File: ${jsonFileName}`);
// wait for json to load before clicking away to avoid network errors
cy.wait(500);
cy.contains(modelId).click();
cy.contains(modelDisplayName).click();
cy.contains(`Process Model: ${modelDisplayName}`);
// cy.getBySel('files-accordion').click();
cy.contains(`${jsonFileName}.json`).should('exist');
@ -151,12 +153,12 @@ describe('process-models', () => {
const modelDisplayName = `Test Model 2 ${id}`;
const modelId = `test-model-2-${id}`;
cy.contains('Add a process group');
cy.contains('99-Shared Resources').click();
cy.contains(miscDisplayName).click();
cy.wait(500);
cy.contains(groupDisplayName).click();
cy.createModel(groupId, modelId, modelDisplayName);
cy.contains(`${directParentGroupId}`).click();
cy.contains(`${groupDisplayName}`).click();
cy.contains('Add a process model');
cy.contains(modelDisplayName).click();
cy.url().should(
@ -186,7 +188,7 @@ describe('process-models', () => {
.click();
// in breadcrumb
cy.contains(modelId).click();
cy.contains(modelDisplayName).click();
cy.getBySel('delete-process-model-button').click();
cy.contains('Are you sure');
@ -203,7 +205,7 @@ describe('process-models', () => {
// process models no longer has pagination post-tiles
// it.only('can paginate items', () => {
// cy.contains('99-Shared Resources').click();
// cy.contains(miscDisplayName).click();
// cy.wait(500);
// cy.contains('Acceptance Tests Group One').click();
// cy.basicPaginationTest();

View File

@ -1,5 +1,6 @@
import { string } from 'prop-types';
import { modifyProcessIdentifierForPathParam } from '../../src/helpers';
import { miscDisplayName } from './helpers';
// ***********************************************
// This example commands.js shows you how to
@ -86,15 +87,15 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => {
Cypress.Commands.add(
'runPrimaryBpmnFile',
(expectAutoRedirectToHumanTask = false) => {
cy.contains('Run').click();
cy.contains('Start').click();
if (expectAutoRedirectToHumanTask) {
// the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress.
cy.url().should('include', `/tasks/`);
cy.contains('Task: ');
} else {
cy.contains(/Process Instance.*kicked off/);
cy.contains(/Process Instance.*[kK]icked [oO]ff/);
cy.reload(true);
cy.contains(/Process Instance.*kicked off/).should('not.exist');
cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist');
}
}
);
@ -103,8 +104,8 @@ Cypress.Commands.add(
'navigateToProcessModel',
(groupDisplayName, modelDisplayName, modelIdentifier) => {
cy.navigateToAdmin();
cy.contains('99-Shared Resources').click();
cy.contains(`Process Group: 99-Shared Resources`, { timeout: 10000 });
cy.contains(miscDisplayName).click();
cy.contains(`Process Group: ${miscDisplayName}`, { timeout: 10000 });
cy.contains(groupDisplayName).click();
cy.contains(`Process Group: ${groupDisplayName}`);
// https://stackoverflow.com/q/51254946/6090676

View File

@ -0,0 +1 @@
export const miscDisplayName = 'Shared Resources';

View File

@ -68,7 +68,7 @@
"@cypress/grep": "^3.1.0",
"@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.6",
"cypress": "^10.8.0",
"cypress": "^12",
"eslint": "^8.19.0",
"eslint_d": "^12.2.0",
"eslint-config-airbnb": "^19.0.4",
@ -9850,9 +9850,9 @@
"integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A=="
},
"node_modules/cypress": {
"version": "10.11.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz",
"integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==",
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"dev": true,
"hasInstallScript": true,
"dependencies": {
@ -9903,7 +9903,7 @@
"cypress": "bin/cypress"
},
"engines": {
"node": ">=12.0.0"
"node": "^14.0.0 || ^16.0.0 || >=18.0.0"
}
},
"node_modules/cypress/node_modules/@types/node": {
@ -38586,9 +38586,9 @@
"integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A=="
},
"cypress": {
"version": "10.11.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz",
"integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==",
"version": "12.1.0",
"resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz",
"integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==",
"dev": true,
"requires": {
"@cypress/request": "^2.88.10",

View File

@ -104,7 +104,7 @@
"@cypress/grep": "^3.1.0",
"@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.6",
"cypress": "^10.8.0",
"cypress": "^12",
"eslint": "^8.19.0",
"eslint_d": "^12.2.0",
"eslint-config-airbnb": "^19.0.4",

View File

@ -41,4 +41,3 @@
-->
</body>
</html>

View File

@ -13,6 +13,7 @@ import AdminRoutes from './routes/AdminRoutes';
import { ErrorForDisplay } from './interfaces';
import { AbilityContext } from './contexts/Can';
import UserService from './services/UserService';
export default function App() {
const [errorMessage, setErrorMessage] = useState<ErrorForDisplay | null>(
@ -24,6 +25,11 @@ export default function App() {
[errorMessage]
);
if (!UserService.isLoggedIn()) {
UserService.doLogin();
return null;
}
const ability = defineAbility(() => {});
let errorTag = null;

View File

@ -0,0 +1,22 @@
import { Link } from 'react-router-dom';
import { modifyProcessIdentifierForPathParam } from '../helpers';
import { MessageInstance, ProcessInstance } from '../interfaces';
export function FormatProcessModelDisplayName(
instanceObject: ProcessInstance | MessageInstance
) {
const {
process_model_identifier: processModelIdentifier,
process_model_display_name: processModelDisplayName,
} = instanceObject;
return (
<Link
to={`/admin/process-models/${modifyProcessIdentifierForPathParam(
processModelIdentifier
)}`}
title={processModelIdentifier}
>
{processModelDisplayName}
</Link>
);
}

Some files were not shown because too many files have changed in this diff Show More