diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7587ce0d1..3f1c6dbb8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,6 +11,12 @@ repos: require_serial: true # exclude: ^migrations/ exclude: "/migrations/" + + # otherwise it will not fix long lines if the long lines contain long strings + # https://github.com/psf/black/pull/1132 + # https://github.com/psf/black/pull/1609 + args: [--preview] + - id: check-added-large-files files: ^spiffworkflow-backend/ name: Check for added large files diff --git a/SpiffWorkflow/Makefile b/SpiffWorkflow/Makefile index 180691381..61cee6b53 100644 --- a/SpiffWorkflow/Makefile +++ b/SpiffWorkflow/Makefile @@ -36,8 +36,16 @@ uninstall: .PHONY : tests tests: - cd tests/$(NAME) - PYTHONPATH=../.. python -m unittest discover -v . "*Test.py" + python -m unittest discover -vs tests/SpiffWorkflow -p \*Test.py -t . + +.PHONY : tests-par +tests-par: + @if ! command -v unittest-parallel >/dev/null 2>&1; then \ + echo "unittest-parallel not found. Please install it with:"; \ + echo " pip install unittest-parallel"; \ + exit 1; \ + fi + unittest-parallel --module-fixtures -vs tests/SpiffWorkflow -p \*Test.py -t . .PHONY : tests-cov tests-cov: diff --git a/SpiffWorkflow/README.md b/SpiffWorkflow/README.md index a5ba1c772..e00ff6be2 100644 --- a/SpiffWorkflow/README.md +++ b/SpiffWorkflow/README.md @@ -72,7 +72,7 @@ coverage run --source=SpiffWorkflow -m unittest discover -v . "*Test.py" ``` ## Support -You can find us on Discord at https://discord.gg/zDEBEnrF +You can find us on Discord at https://discord.gg/BYHcc7PpUC Commercial support for SpiffWorkflow is available from [Sartography](https://sartography.com) diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py index 1880eb215..581a00aea 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py @@ -29,7 +29,7 @@ from .ValidationException import ValidationException from ..specs.BpmnProcessSpec import BpmnProcessSpec from ..specs.events.EndEvent import EndEvent from ..specs.events.StartEvent import StartEvent -from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent +from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent, EventBasedGateway from ..specs.events.IntermediateEvent import SendTask, ReceiveTask from ..specs.SubWorkflowTask import CallActivity, SubWorkflowTask, TransactionSubprocess from ..specs.ExclusiveGateway import ExclusiveGateway @@ -47,7 +47,7 @@ from .task_parsers import (UserTaskParser, NoneTaskParser, ManualTaskParser, ExclusiveGatewayParser, ParallelGatewayParser, InclusiveGatewayParser, CallActivityParser, ScriptTaskParser, SubWorkflowParser, ServiceTaskParser) -from .event_parsers import (StartEventParser, EndEventParser, BoundaryEventParser, +from .event_parsers import (EventBasedGatewayParser, StartEventParser, EndEventParser, BoundaryEventParser, IntermediateCatchEventParser, IntermediateThrowEventParser, SendTaskParser, ReceiveTaskParser) @@ -57,7 +57,8 @@ XSD_PATH = os.path.join(os.path.dirname(__file__), 'schema', 'BPMN20.xsd') class BpmnValidator: def __init__(self, xsd_path=XSD_PATH, imports=None): - schema = etree.parse(open(xsd_path)) + with open(xsd_path) as xsd: + schema = etree.parse(xsd) if imports is not None: for ns, fn in imports.items(): elem = etree.Element( @@ -104,6 +105,7 @@ class BpmnParser(object): full_tag('boundaryEvent'): (BoundaryEventParser, BoundaryEvent), full_tag('receiveTask'): (ReceiveTaskParser, ReceiveTask), full_tag('sendTask'): (SendTaskParser, SendTask), + full_tag('eventBasedGateway'): (EventBasedGatewayParser, EventBasedGateway), } OVERRIDE_PARSER_CLASSES = {} diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/TaskParser.py b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/TaskParser.py index 5291b1623..552fe7117 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/TaskParser.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/TaskParser.py @@ -121,6 +121,25 @@ class TaskParser(NodeParser): elif len(self.xpath('./bpmn:standardLoopCharacteristics')) > 0: self._set_multiinstance_attributes(True, 25, STANDARDLOOPCOUNT, loop_task=True) + def _add_boundary_event(self, children): + + parent = _BoundaryEventParent( + self.spec, '%s.BoundaryEventParent' % self.get_id(), + self.task, lane=self.task.lane) + self.process_parser.parsed_nodes[self.node.get('id')] = parent + parent.connect_outgoing(self.task, '%s.FromBoundaryEventParent' % self.get_id(), None, None) + for event in children: + child = self.process_parser.parse_node(event) + if isinstance(child.event_definition, CancelEventDefinition) \ + and not isinstance(self.task, TransactionSubprocess): + raise ValidationException('Cancel Events may only be used with transactions', + node=self.node, + filename=self.filename) + parent.connect_outgoing(child, + '%s.FromBoundaryEventParent' % event.get('id'), + None, None) + return parent + def parse_node(self): """ Parse this node, and all children, returning the connected task spec. @@ -139,30 +158,9 @@ class TaskParser(NodeParser): boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.get_id()) if boundary_event_nodes: - parent_task = _BoundaryEventParent( - self.spec, '%s.BoundaryEventParent' % self.get_id(), - self.task, lane=self.task.lane) - self.process_parser.parsed_nodes[ - self.node.get('id')] = parent_task - parent_task.connect_outgoing( - self.task, '%s.FromBoundaryEventParent' % self.get_id(), - None, None) - for boundary_event in boundary_event_nodes: - b = self.process_parser.parse_node(boundary_event) - if isinstance(b.event_definition, CancelEventDefinition) \ - and not isinstance(self.task, TransactionSubprocess): - raise ValidationException( - 'Cancel Events may only be used with transactions', - node=self.node, - filename=self.filename) - parent_task.connect_outgoing( - b, - '%s.FromBoundaryEventParent' % boundary_event.get( - 'id'), - None, None) + parent = self._add_boundary_event(boundary_event_nodes) else: - self.process_parser.parsed_nodes[ - self.node.get('id')] = self.task + self.process_parser.parsed_nodes[self.node.get('id')] = self.task children = [] outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.get_id()) @@ -202,7 +200,7 @@ class TaskParser(NodeParser): c, target_node, sequence_flow, sequence_flow.get('id') == default_outgoing) - return parent_task if boundary_event_nodes else self.task + return parent if boundary_event_nodes else self.task except ValidationException: raise except Exception as ex: diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/event_parsers.py b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/event_parsers.py index 07cef338e..177d755a4 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/parser/event_parsers.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/parser/event_parsers.py @@ -5,7 +5,7 @@ from SpiffWorkflow.bpmn.specs.events.event_definitions import CorrelationPropert from .ValidationException import ValidationException from .TaskParser import TaskParser from .util import first, one -from ..specs.events.event_definitions import (TimerEventDefinition, MessageEventDefinition, +from ..specs.events.event_definitions import (MultipleEventDefinition, TimerEventDefinition, MessageEventDefinition, ErrorEventDefinition, EscalationEventDefinition, SignalEventDefinition, CancelEventDefinition, CycleTimerEventDefinition, @@ -81,17 +81,18 @@ class EventDefinitionParser(TaskParser): """Parse the timerEventDefinition node and return an instance of TimerEventDefinition.""" try: + label = self.node.get('name', self.node.get('id')) time_date = first(self.xpath('.//bpmn:timeDate')) if time_date is not None: - return TimerEventDefinition(self.node.get('name'), time_date.text) + return TimerEventDefinition(label, time_date.text) time_duration = first(self.xpath('.//bpmn:timeDuration')) if time_duration is not None: - return TimerEventDefinition(self.node.get('name'), time_duration.text) + return TimerEventDefinition(label, time_duration.text) time_cycle = first(self.xpath('.//bpmn:timeCycle')) if time_cycle is not None: - return CycleTimerEventDefinition(self.node.get('name'), time_cycle.text) + return CycleTimerEventDefinition(label, time_cycle.text) raise ValidationException("Unknown Time Specification", node=self.node, filename=self.filename) except Exception as e: raise ValidationException("Time Specification Error. " + str(e), node=self.node, filename=self.filename) @@ -109,7 +110,7 @@ class EventDefinitionParser(TaskParser): correlations.append(CorrelationProperty(key, expression, used_by)) return correlations - def _create_task(self, event_definition, cancel_activity=None): + def _create_task(self, event_definition, cancel_activity=None, parallel=None): if isinstance(event_definition, MessageEventDefinition): for prop in event_definition.correlation_properties: @@ -126,28 +127,40 @@ class EventDefinitionParser(TaskParser): } if cancel_activity is not None: kwargs['cancel_activity'] = cancel_activity + if parallel is not None: + kwargs['parallel'] = parallel return self.spec_class(self.spec, self.get_task_spec_name(), event_definition, **kwargs) def get_event_definition(self, xpaths): - """Returns the first event definition it can find in given list of xpaths""" + """Returns all event definitions it can find in given list of xpaths""" + + event_definitions = [] for path in xpaths: - event = first(self.xpath(path)) - if event is not None: + for event in self.xpath(path): if path == MESSAGE_EVENT_XPATH: - return self.parse_message_event(event) + event_definitions.append(self.parse_message_event(event)) elif path == SIGNAL_EVENT_XPATH: - return self.parse_signal_event(event) + event_definitions.append(self.parse_signal_event(event)) elif path == TIMER_EVENT_XPATH: - return self.parse_timer_event() + event_definitions.append(self.parse_timer_event()) elif path == CANCEL_EVENT_XPATH: - return self.parse_cancel_event() + event_definitions.append(self.parse_cancel_event()) elif path == ERROR_EVENT_XPATH: - return self.parse_error_event(event) + event_definitions.append(self.parse_error_event(event)) elif path == ESCALATION_EVENT_XPATH: - return self.parse_escalation_event(event) + event_definitions.append(self.parse_escalation_event(event)) elif path == TERMINATION_EVENT_XPATH: - return self.parse_terminate_event() - return NoneEventDefinition() + event_definitions.append(self.parse_terminate_event()) + + parallel = self.node.get('parallelMultiple') == 'true' + + if len(event_definitions) == 0: + return NoneEventDefinition() + elif len(event_definitions) == 1: + return event_definitions[0] + else: + return MultipleEventDefinition(event_definitions, parallel) + class StartEventParser(EventDefinitionParser): """Parses a Start Event, and connects it to the internal spec.start task. @@ -158,8 +171,7 @@ class StartEventParser(EventDefinitionParser): task = self._create_task(event_definition) self.spec.start.connect(task) if isinstance(event_definition, CycleTimerEventDefinition): - # We are misusing cycle timers, so this is a hack whereby we will - # revisit ourselves if we fire. + # We are misusing cycle timers, so this is a hack whereby we will revisit ourselves if we fire. task.connect(task) return task @@ -229,3 +241,22 @@ class BoundaryEventParser(EventDefinitionParser): if isinstance(event_definition, NoneEventDefinition): raise NotImplementedError('Unsupported Catch Event: %r', etree.tostring(self.node)) return self._create_task(event_definition, cancel_activity) + + +class EventBasedGatewayParser(EventDefinitionParser): + + def create_task(self): + return self._create_task(MultipleEventDefinition()) + + def handles_multiple_outgoing(self): + return True + + def connect_outgoing(self, outgoing_task, outgoing_task_node, sequence_flow_node, is_default): + self.task.event_definition.event_definitions.append(outgoing_task.event_definition) + self.task.connect_outgoing( + outgoing_task, + sequence_flow_node.get('id'), + sequence_flow_node.get('name', None), + self.parse_documentation(sequence_flow_node) + ) + \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/bpmn_converters.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/bpmn_converters.py index 1e274c21f..9fa80ad61 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/bpmn_converters.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/bpmn_converters.py @@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification from .dictionary import DictionaryConverter -from ..specs.events.event_definitions import SignalEventDefinition, MessageEventDefinition, NoneEventDefinition +from ..specs.events.event_definitions import MultipleEventDefinition, SignalEventDefinition, MessageEventDefinition, NoneEventDefinition from ..specs.events.event_definitions import TimerEventDefinition, CycleTimerEventDefinition, TerminateEventDefinition from ..specs.events.event_definitions import ErrorEventDefinition, EscalationEventDefinition, CancelEventDefinition from ..specs.events.event_definitions import CorrelationProperty, NamedEventDefinition @@ -91,7 +91,7 @@ class BpmnTaskSpecConverter(DictionaryConverter): event_definitions = [ NoneEventDefinition, CancelEventDefinition, TerminateEventDefinition, SignalEventDefinition, MessageEventDefinition, ErrorEventDefinition, EscalationEventDefinition, - TimerEventDefinition, CycleTimerEventDefinition ] + TimerEventDefinition, CycleTimerEventDefinition , MultipleEventDefinition] for event_definition in event_definitions: self.register( @@ -257,6 +257,9 @@ class BpmnTaskSpecConverter(DictionaryConverter): dct['error_code'] = event_definition.error_code if isinstance(event_definition, EscalationEventDefinition): dct['escalation_code'] = event_definition.escalation_code + if isinstance(event_definition, MultipleEventDefinition): + dct['event_definitions'] = [self.convert(e) for e in event_definition.event_definitions] + dct['parallel'] = event_definition.parallel return dct @@ -273,6 +276,8 @@ class BpmnTaskSpecConverter(DictionaryConverter): internal, external = dct.pop('internal'), dct.pop('external') if 'correlation_properties' in dct: dct['correlation_properties'] = [CorrelationProperty(**prop) for prop in dct['correlation_properties']] + if 'event_definitions' in dct: + dct['event_definitions'] = [self.restore(d) for d in dct['event_definitions']] event_definition = definition_class(**dct) event_definition.internal = internal event_definition.external = external diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec_converters.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec_converters.py index 852ebe4de..be1d810b8 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec_converters.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/task_spec_converters.py @@ -21,7 +21,7 @@ from ..specs.ParallelGateway import ParallelGateway from ..specs.events.StartEvent import StartEvent from ..specs.events.EndEvent import EndEvent -from ..specs.events.IntermediateEvent import BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent +from ..specs.events.IntermediateEvent import BoundaryEvent, EventBasedGateway, IntermediateCatchEvent, IntermediateThrowEvent from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask from ..workflow import BpmnWorkflow @@ -52,6 +52,7 @@ class StartTaskConverter(BpmnTaskSpecConverter): def from_dict(self, dct): return self.task_spec_from_dict(dct) + class LoopResetTaskConverter(BpmnTaskSpecConverter): def __init__(self, data_converter=None, typename=None): @@ -70,6 +71,7 @@ class LoopResetTaskConverter(BpmnTaskSpecConverter): spec.destination_id = UUID(spec.destination_id) return spec + class EndJoinConverter(BpmnTaskSpecConverter): def __init__(self, data_converter=None, typename=None): @@ -310,3 +312,9 @@ class BoundaryEventParentConverter(BpmnTaskSpecConverter): def from_dict(self, dct): return self.task_spec_from_dict(dct) + + +class EventBasedGatewayConverter(EventConverter): + + def __init__(self, data_converter=None, typename=None): + super().__init__(EventBasedGateway, data_converter, typename) diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py index 1173e7a8b..74d205d62 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py @@ -17,7 +17,7 @@ from .task_spec_converters import SimpleTaskConverter, StartTaskConverter, EndJo from .task_spec_converters import NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter from .task_spec_converters import CallActivityTaskConverter, TransactionSubprocessTaskConverter from .task_spec_converters import StartEventConverter, EndEventConverter -from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter +from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter from .task_spec_converters import SendTaskConverter, ReceiveTaskConverter from .task_spec_converters import BoundaryEventConverter, BoundaryEventParentConverter from .task_spec_converters import ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter @@ -27,7 +27,7 @@ DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [ NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter, CallActivityTaskConverter, TransactionSubprocessTaskConverter, StartEventConverter, EndEventConverter, SendTaskConverter, ReceiveTaskConverter, - IntermediateCatchEventConverter, IntermediateThrowEventConverter, + IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter, BoundaryEventConverter, BoundaryEventParentConverter, ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter ] diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py index 28e01f9ef..798c04c6d 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py @@ -111,6 +111,7 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin): def deserialize(cls, serializer, wf_spec, s_state): return serializer.deserialize_boundary_event_parent(wf_spec, s_state, cls) + class BoundaryEvent(CatchingEvent): """Task Spec for a bpmn:boundaryEvent node.""" @@ -128,7 +129,6 @@ class BoundaryEvent(CatchingEvent): interrupting = 'Interrupting' if self.cancel_activity else 'Non-Interrupting' return f'{interrupting} {self.event_definition.event_type} Event' - def catches(self, my_task, event_definition, correlations=None): # Boundary events should only be caught while waiting return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING @@ -148,3 +148,19 @@ class BoundaryEvent(CatchingEvent): @classmethod def deserialize(cls, serializer, wf_spec, s_state): return serializer.deserialize_boundary_event(wf_spec, s_state, cls) + + +class EventBasedGateway(CatchingEvent): + + @property + def spec_type(self): + return 'Event Based Gateway' + + def _predict_hook(self, my_task): + my_task._sync_children(self.outputs, state=TaskState.MAYBE) + + def _on_complete_hook(self, my_task): + for child in my_task.children: + if not child.task_spec.event_definition.has_fired(child): + child.cancel() + return super()._on_complete_hook(my_task) \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py index 96be3bffb..fc1cb2f06 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_definitions.py @@ -20,6 +20,7 @@ import datetime from copy import deepcopy +from SpiffWorkflow.task import TaskState class EventDefinition(object): """ @@ -69,10 +70,10 @@ class EventDefinition(object): # We also don't have a more sophisticated method for addressing events to # a particular process, but this at least provides a mechanism for distinguishing # between processes and subprocesses. - if self.internal: - workflow.catch(event) if self.external: outer_workflow.catch(event, correlations) + if self.internal and (self.external and workflow != outer_workflow): + workflow.catch(event) def __eq__(self, other): return self.__class__.__name__ == other.__class__.__name__ @@ -92,6 +93,7 @@ class EventDefinition(object): obj.internal, obj.external = internal, external return obj + class NamedEventDefinition(EventDefinition): """ Extend the base event class to provide a name for the event. Most throw/catch events @@ -115,7 +117,6 @@ class NamedEventDefinition(EventDefinition): retdict['name'] = self.name return retdict - class CancelEventDefinition(EventDefinition): """ Cancel events are only handled by the outerworkflow, as they can only be used inside @@ -307,6 +308,11 @@ class TimerEventDefinition(EventDefinition): The Timer is considered to have fired if the evaluated dateTime expression is before datetime.datetime.now() """ + + if my_task.internal_data.get('event_fired'): + # If we manually send this event, this will be set + return True + dt = my_task.workflow.script_engine.evaluate(my_task, self.dateTime) if isinstance(dt,datetime.timedelta): if my_task._get_internal_data('start_time',None) is not None: @@ -330,6 +336,9 @@ class TimerEventDefinition(EventDefinition): now = datetime.date.today() return now > dt + def __eq__(self, other): + return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label + def serialize(self): retdict = super(TimerEventDefinition, self).serialize() retdict['label'] = self.label @@ -363,6 +372,10 @@ class CycleTimerEventDefinition(EventDefinition): # We will fire this timer whenever a cycle completes # The task itself will manage counting how many times it fires + if my_task.internal_data.get('event_fired'): + # If we manually send this event, this will be set + return True + repeat, delta = my_task.workflow.script_engine.evaluate(my_task, self.cycle_definition) # This is the first time we've entered this event @@ -393,8 +406,65 @@ class CycleTimerEventDefinition(EventDefinition): my_task.internal_data['start_time'] = None super(CycleTimerEventDefinition, self).reset(my_task) + def __eq__(self, other): + return self.__class__.__name__ == other.__class__.__name__ and self.label == other.label + def serialize(self): retdict = super(CycleTimerEventDefinition, self).serialize() retdict['label'] = self.label retdict['cycle_definition'] = self.cycle_definition return retdict + + +class MultipleEventDefinition(EventDefinition): + + def __init__(self, event_definitions=None, parallel=False): + super().__init__() + self.event_definitions = event_definitions or [] + self.parallel = parallel + + @property + def event_type(self): + return 'Multiple' + + def has_fired(self, my_task): + + seen_events = my_task.internal_data.get('seen_events', []) + for event in self.event_definitions: + if isinstance(event, (TimerEventDefinition, CycleTimerEventDefinition)): + child = [c for c in my_task.children if c.task_spec.event_definition == event] + child[0].task_spec._update_hook(child[0]) + child[0]._set_state(TaskState.MAYBE) + if event.has_fired(my_task): + seen_events.append(event) + + if self.parallel: + # Parallel multiple need to match all events + return all(event in seen_events for event in self.event_definitions) + else: + return len(seen_events) > 0 + + def catch(self, my_task, event_definition=None): + event_definition.catch(my_task, event_definition) + seen_events = my_task.internal_data.get('seen_events', []) + [event_definition] + my_task._set_internal_data(seen_events=seen_events) + + def reset(self, my_task): + my_task.internal_data.pop('seen_events', None) + super().reset(my_task) + + def __eq__(self, other): + # This event can catch any of the events associated with it + for event in self.event_definitions: + if event == other: + return True + return False + + def throw(self, my_task): + # Mutiple events throw all associated events when they fire + for event_definition in self.event_definitions: + self._throw( + event=event_definition, + workflow=my_task.workflow, + outer_workflow=my_task.workflow.outer_workflow + ) \ No newline at end of file diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py index 05b973035..f2aa6d21d 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/specs/events/event_types.py @@ -54,7 +54,7 @@ class CatchingEvent(Simple, BpmnSpecMixin): my_task._ready() super(CatchingEvent, self)._update_hook(my_task) - def _on_ready(self, my_task): + def _on_ready_hook(self, my_task): # None events don't propogate, so as soon as we're ready, we fire our event if isinstance(self.event_definition, NoneEventDefinition): @@ -63,7 +63,7 @@ class CatchingEvent(Simple, BpmnSpecMixin): # If we have not seen the event we're waiting for, enter the waiting state if not self.event_definition.has_fired(my_task): my_task._set_state(TaskState.WAITING) - super(CatchingEvent, self)._on_ready(my_task) + super(CatchingEvent, self)._on_ready_hook(my_task) def _on_complete_hook(self, my_task): diff --git a/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py b/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py index d9b3994b2..0f556e31d 100644 --- a/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py +++ b/SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py @@ -16,7 +16,7 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA -from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition +from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition, MultipleEventDefinition from .PythonScriptEngine import PythonScriptEngine from .specs.events.event_types import CatchingEvent from .specs.events.StartEvent import StartEvent @@ -113,6 +113,14 @@ class BpmnWorkflow(Workflow): workflow = workflow.outer_workflow return workflow + def _get_or_create_subprocess(self, task_spec, wf_spec): + if isinstance(task_spec.event_definition, MultipleEventDefinition): + for sp in self.subprocesses.values(): + start = sp.get_tasks_from_spec_name(task_spec.name) + if len(start) and start[0].state == TaskState.WAITING: + return sp + return self.add_subprocess(wf_spec.name, f'{wf_spec.name}_{len(self.subprocesses)}') + def catch(self, event_definition, correlations=None): """ Send an event definition to any tasks that catch it. @@ -134,10 +142,8 @@ class BpmnWorkflow(Workflow): for name, spec in self.subprocess_specs.items(): for task_spec in list(spec.task_specs.values()): if isinstance(task_spec, StartEvent) and task_spec.event_definition == event_definition: - subprocess = self.add_subprocess(spec.name, f'{spec.name}_{len(self.subprocesses)}') - subprocess.correlations = correlations or {} - start = self.get_tasks_from_spec_name(task_spec.name, workflow=subprocess)[0] - task_spec.event_definition.catch(start, event_definition) + subprocess = self._get_or_create_subprocess(task_spec, spec) + subprocess.correlations.update(correlations or {}) # We need to get all the tasks that catch an event before completing any of them # in order to prevent the scenario where multiple boundary events catch the diff --git a/SpiffWorkflow/SpiffWorkflow/dmn/engine/DMNEngine.py b/SpiffWorkflow/SpiffWorkflow/dmn/engine/DMNEngine.py index 68ef38bc8..0d63ac524 100644 --- a/SpiffWorkflow/SpiffWorkflow/dmn/engine/DMNEngine.py +++ b/SpiffWorkflow/SpiffWorkflow/dmn/engine/DMNEngine.py @@ -30,9 +30,7 @@ class DMNEngine: a given task.""" result = {} matched_rules = self.decide(task) - if len(matched_rules) == 1: - result = matched_rules[0].output_as_dict(task) - elif len(matched_rules) > 1: # This must be a multi-output + if self.decision_table.hit_policy == HitPolicy.COLLECT.value: # each output will be an array of values, all outputs will # be placed in a dict, which we will then merge. for rule in matched_rules: @@ -41,6 +39,8 @@ class DMNEngine: if not key in result: result[key] = [] result[key].append(rule_output[key]) + elif len(matched_rules) > 0: + result = matched_rules[0].output_as_dict(task) return result diff --git a/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec_converters.py b/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec_converters.py index 4e3cb183b..7c65ab5f4 100644 --- a/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec_converters.py +++ b/SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec_converters.py @@ -21,6 +21,7 @@ class BusinessRuleTaskConverter(BpmnTaskSpecConverter): return { 'id': table.id, 'name': table.name, + 'hit_policy': table.hit_policy, 'inputs': [val.__dict__ for val in table.inputs], 'outputs': [val.__dict__ for val in table.outputs], 'rules': [self.rule_to_dict(rule) for rule in table.rules], diff --git a/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec_converters.py b/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec_converters.py index a1c0525c2..abf3614b2 100644 --- a/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec_converters.py +++ b/SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec_converters.py @@ -3,7 +3,7 @@ from functools import partial from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent -from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent +from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent, EventBasedGateway from SpiffWorkflow.spiff.specs.none_task import NoneTask from SpiffWorkflow.spiff.specs.manual_task import ManualTask from SpiffWorkflow.spiff.specs.user_task import UserTask @@ -164,3 +164,7 @@ class ReceiveTaskConverter(SpiffEventConverter): dct['prescript'] = spec.prescript dct['postscript'] = spec.postscript return dct + +class EventBasedGatewayConverter(SpiffEventConverter): + def __init__(self, data_converter=None, typename=None): + super().__init__(EventBasedGateway, data_converter, typename) \ No newline at end of file diff --git a/SpiffWorkflow/doc/index.rst b/SpiffWorkflow/doc/index.rst index 74bccd055..e3cd2dbf9 100644 --- a/SpiffWorkflow/doc/index.rst +++ b/SpiffWorkflow/doc/index.rst @@ -36,7 +36,7 @@ Spiff Workflow is published under the terms of the Support ------- -You can find us on `our Discord Channel `_ +You can find us on `our Discord Channel `_ Commercial support for SpiffWorkflow is available from `Sartography `_ diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/event-gateway.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/event-gateway.bpmn new file mode 100644 index 000000000..afa986ba5 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/event-gateway.bpmn @@ -0,0 +1,107 @@ + + + + + Flow_0w4b5t2 + + + + Flow_0w4b5t2 + Flow_0gge7fn + Flow_0px7ksu + Flow_1rfbrlf + + + Flow_0gge7fn + Flow_1g4g85l + + + + + Flow_0px7ksu + Flow_18v90rx + + + + + Flow_1rfbrlf + Flow_0mppjk9 + + timedelta(seconds=1) + + + + + Flow_0mppjk9 + + + + Flow_1g4g85l + + + + Flow_18v90rx + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-start-parallel.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-start-parallel.bpmn new file mode 100644 index 000000000..453dde30c --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-start-parallel.bpmn @@ -0,0 +1,43 @@ + + + + + Flow_1tr2mqr + + + + + Flow_1tr2mqr + Flow_1qjctmo + + + + Flow_1qjctmo + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-start.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-start.bpmn new file mode 100644 index 000000000..e9deb9b0f --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-start.bpmn @@ -0,0 +1,43 @@ + + + + + Flow_1tr2mqr + + + + + Flow_1tr2mqr + Flow_1qjctmo + + + + Flow_1qjctmo + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-throw-start.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-throw-start.bpmn new file mode 100644 index 000000000..a70bb6337 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-throw-start.bpmn @@ -0,0 +1,88 @@ + + + + + + + + + + + Flow_04uk4n8 + + + + + Flow_08al33k + + + Flow_04uk4n8 + Flow_08al33k + + + + + + + Flow_1wgdi4h + + + + + Flow_1wxjn4e + + + Flow_1wgdi4h + Flow_1wxjn4e + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-throw.bpmn b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-throw.bpmn new file mode 100644 index 000000000..721f54a80 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/multiple-throw.bpmn @@ -0,0 +1,87 @@ + + + + + + + + + + + Flow_1tr2mqr + + + + Flow_1wohnl8 + Flow_1tr2mqr + + + + + Flow_1wohnl8 + + + + + + Flow_1wgdi4h + + + + + Flow_1wxjn4e + + + Flow_1wgdi4h + Flow_1wxjn4e + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py new file mode 100644 index 000000000..6e5497842 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/EventBasedGatewayTest.py @@ -0,0 +1,60 @@ +from datetime import timedelta + +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition +from SpiffWorkflow.task import TaskState + +from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase + +class EventBsedGatewayTest(BpmnWorkflowTestCase): + + def setUp(self): + self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v') + self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta}) + self.workflow = BpmnWorkflow(self.spec, script_engine=self.script_engine) + + def testEventBasedGateway(self): + self.actual_test() + + def testEventBasedGatewaySaveRestore(self): + self.actual_test(True) + + def actual_test(self, save_restore=False): + + self.workflow.do_engine_steps() + waiting_tasks = self.workflow.get_waiting_tasks() + if save_restore: + self.save_restore() + self.workflow.script_engine = self.script_engine + self.assertEqual(len(waiting_tasks), 1) + self.workflow.catch(MessageEventDefinition('message_1')) + self.workflow.refresh_waiting_tasks() + self.workflow.do_engine_steps() + self.assertEqual(self.workflow.is_completed(), True) + self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.COMPLETED) + self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED) + self.assertEqual(self.workflow.get_tasks_from_spec_name('timer_event')[0].state, TaskState.CANCELLED) + + def testTimeout(self): + + self.workflow.do_engine_steps() + waiting_tasks = self.workflow.get_waiting_tasks() + self.assertEqual(len(waiting_tasks), 1) + timer_event = waiting_tasks[0].task_spec.event_definition.event_definitions[-1] + self.workflow.catch(timer_event) + self.workflow.refresh_waiting_tasks() + self.workflow.do_engine_steps() + self.assertEqual(self.workflow.is_completed(), True) + self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.CANCELLED) + self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED) + self.assertEqual(self.workflow.get_tasks_from_spec_name('timer_event')[0].state, TaskState.COMPLETED) + + def testMultipleStart(self): + spec, subprocess = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main') + workflow = BpmnWorkflow(spec) + workflow.do_engine_steps() + workflow.catch(MessageEventDefinition('message_1')) + workflow.catch(MessageEventDefinition('message_2')) + workflow.refresh_waiting_tasks() + workflow.do_engine_steps() diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleCatchEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleCatchEventTest.py new file mode 100644 index 000000000..791b7bd54 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleCatchEventTest.py @@ -0,0 +1,81 @@ +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition + +from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase + + +class MultipleStartEventTest(BpmnWorkflowTestCase): + + def setUp(self): + self.spec, self.subprocesses = self.load_workflow_spec('multiple-start.bpmn', 'main') + self.workflow = BpmnWorkflow(self.spec) + + def testMultipleStartEvent(self): + self.actual_test() + + def testMultipleStartEventSaveRestore(self): + self.actual_test(True) + + def actual_test(self, save_restore=False): + + self.workflow.do_engine_steps() + waiting_tasks = self.workflow.get_waiting_tasks() + + if save_restore: + self.save_restore() + + # The start event should be waiting + self.assertEqual(len(waiting_tasks), 1) + self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1') + + self.workflow.catch(MessageEventDefinition('message_1')) + self.workflow.refresh_waiting_tasks() + self.workflow.do_engine_steps() + + # Now the first task should be ready + ready_tasks = self.workflow.get_ready_user_tasks() + self.assertEqual(len(ready_tasks), 1) + self.assertEqual(ready_tasks[0].task_spec.name, 'any_task') + + +class ParallelStartEventTest(BpmnWorkflowTestCase): + + def setUp(self): + self.spec, self.subprocesses = self.load_workflow_spec('multiple-start-parallel.bpmn', 'main') + self.workflow = BpmnWorkflow(self.spec) + + def testParallelStartEvent(self): + self.actual_test() + + def testParallelStartEventSaveRestore(self): + self.actual_test(True) + + def actual_test(self, save_restore=False): + + self.workflow.do_engine_steps() + waiting_tasks = self.workflow.get_waiting_tasks() + + if save_restore: + self.save_restore() + + # The start event should be waiting + self.assertEqual(len(waiting_tasks), 1) + self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1') + + self.workflow.catch(MessageEventDefinition('message_1')) + self.workflow.refresh_waiting_tasks() + self.workflow.do_engine_steps() + + # It should still be waiting because it has to receive both messages + waiting_tasks = self.workflow.get_waiting_tasks() + self.assertEqual(len(waiting_tasks), 1) + self.assertEqual(waiting_tasks[0].task_spec.name, 'StartEvent_1') + + self.workflow.catch(MessageEventDefinition('message_2')) + self.workflow.refresh_waiting_tasks() + self.workflow.do_engine_steps() + + # Now the first task should be ready + ready_tasks = self.workflow.get_ready_user_tasks() + self.assertEqual(len(ready_tasks), 1) + self.assertEqual(ready_tasks[0].task_spec.name, 'any_task') \ No newline at end of file diff --git a/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py new file mode 100644 index 000000000..087951eb3 --- /dev/null +++ b/SpiffWorkflow/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py @@ -0,0 +1,47 @@ +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow + +from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase + + +class MultipleThrowEventIntermediateCatchTest(BpmnWorkflowTestCase): + + def setUp(self): + self.spec, subprocesses = self.load_collaboration('multiple-throw.bpmn','top') + self.workflow = BpmnWorkflow(self.spec, subprocesses) + + def testMultipleThrowEventIntermediateCatch(self): + self.actual_test() + + def testMultipleThrowEventIntermediateCatchSaveRestore(self): + self.actual_test(True) + + def actual_test(self, save_restore=False): + if save_restore: + self.save_restore() + self.workflow.do_engine_steps() + self.assertEqual(len(self.workflow.get_waiting_tasks()), 0) + self.assertEqual(self.workflow.is_completed(), True) + + +class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase): + + def setUp(self): + specs = self.get_all_specs('multiple-throw-start.bpmn') + self.spec = specs.pop('initiate') + self.workflow = BpmnWorkflow(self.spec, specs) + + def testMultipleThrowEventStartEvent(self): + self.actual_test() + + def testMultipleThrowEventStartEventSaveRestore(self): + self.actual_test(True) + + def actual_test(self, save_restore=False): + if save_restore: + self.save_restore() + self.workflow.do_engine_steps() + ready_tasks = self.workflow.get_ready_user_tasks() + self.assertEqual(len(ready_tasks), 1) + ready_tasks[0].complete() + self.workflow.do_engine_steps() + self.assertEqual(self.workflow.is_completed(), True) \ No newline at end of file diff --git a/SpiffWorkflow/tests/SpiffWorkflow/dmn/HitPolicyTest.py b/SpiffWorkflow/tests/SpiffWorkflow/dmn/HitPolicyTest.py index a7503732a..061ba660e 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/dmn/HitPolicyTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/dmn/HitPolicyTest.py @@ -3,6 +3,8 @@ import unittest from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser +from SpiffWorkflow.dmn.serializer.task_spec_converters import \ + BusinessRuleTaskConverter from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase from tests.SpiffWorkflow.dmn.DecisionRunner import DecisionRunner from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import \ @@ -27,7 +29,18 @@ class HitPolicyTest(BpmnWorkflowTestCase): self.assertEqual('COLLECT', decision_table.hit_policy) res = runner.result({'type': 'stooge'}) self.assertEqual(4, len(res['name'])) + res = runner.result({'type': 'farmer'}) + self.assertEqual(1, len(res['name'])) + self.assertEqual('Elmer Fudd', res['name'][0]) + def testSerializeHitPolicy(self): + file_name = os.path.join(os.path.dirname(__file__), 'data', 'collect_hit.dmn') + runner = PythonDecisionRunner(file_name) + decision_table = runner.decision_table + self.assertEqual("COLLECT", decision_table.hit_policy) + dict = BusinessRuleTaskConverter().decision_table_to_dict(decision_table) + new_table = BusinessRuleTaskConverter().decision_table_from_dict(dict) + self.assertEqual("COLLECT", new_table.hit_policy) def suite(): return unittest.TestLoader().loadTestsFromTestCase(HitPolicyTest) diff --git a/SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py b/SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py index fc16db931..05653e185 100644 --- a/SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py +++ b/SpiffWorkflow/tests/SpiffWorkflow/specs/ExecuteTest.py @@ -3,9 +3,7 @@ import os -import sys import unittest -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) from tests.SpiffWorkflow.util import run_workflow from .TaskSpecTest import TaskSpecTest @@ -25,7 +23,8 @@ class ExecuteTest(TaskSpecTest): args=self.cmd_args) def setUp(self): - self.cmd_args = ["python", "ExecuteProcessMock.py"] + script_path = os.path.join(os.path.dirname(__file__), '..', 'ExecuteProcessMock.py') + self.cmd_args = ["python", script_path] TaskSpecTest.setUp(self) def testConstructor(self): diff --git a/bin/pre b/bin/pre new file mode 100755 index 000000000..fa89ecc79 --- /dev/null +++ b/bin/pre @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" +"${script_dir}/run_pyl" pre diff --git a/bin/run_pyl b/bin/run_pyl index d3abd7fc6..64446662a 100755 --- a/bin/run_pyl +++ b/bin/run_pyl @@ -16,6 +16,17 @@ react_projects=( spiffworkflow-frontend ) +subcommand="${1:-}" + +if [[ "$subcommand" == "pre" ]]; then + if [[ -n "$(git status --porcelain SpiffWorkflow)" ]]; then + echo "SpiffWorkflow has uncommitted changes. Running its test suite." + pushd SpiffWorkflow + make tests-par # run tests in parallel + popd + fi +fi + function get_python_dirs() { (git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo '' } @@ -50,23 +61,34 @@ function run_pre_commmit() { } for react_project in "${react_projects[@]}" ; do - pushd "$react_project" - npm run lint:fix - popd + # if pre, only do stuff when there are changes + if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$react_project")" ]]; then + pushd "$react_project" + npm run lint:fix + popd + fi done for python_project in "${python_projects[@]}" ; do - pushd "$python_project" - run_fix_docstrings || run_fix_docstrings - run_autoflake || run_autoflake - popd + if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then + pushd "$python_project" + run_fix_docstrings || run_fix_docstrings + run_autoflake || run_autoflake + popd + fi done -run_pre_commmit || run_pre_commmit -for python_project in "${python_projects[@]}"; do - pushd "$python_project" - poetry install - poetry run mypy $(get_python_dirs) - poetry run coverage run --parallel -m pytest - popd +if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "spiffworkflow-backend")" ]]; then + # rune_pre_commit only applies to spiffworkflow-backend at the moment + run_pre_commmit || run_pre_commmit +fi + +for python_project in "${python_projects[@]}"; do + if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then + pushd "$python_project" + poetry install + poetry run mypy $(get_python_dirs) + poetry run coverage run --parallel -m pytest + popd + fi done diff --git a/bpmn-js-spiffworkflow/webpack.config.js b/bpmn-js-spiffworkflow/webpack.config.js index 0774e48e1..8fe08cf56 100644 --- a/bpmn-js-spiffworkflow/webpack.config.js +++ b/bpmn-js-spiffworkflow/webpack.config.js @@ -35,6 +35,7 @@ module.exports = { new CopyWebpackPlugin({ patterns: [ { from: 'assets/**', to: 'vendor/bpmn-js', context: 'node_modules/bpmn-js/dist/' }, + { from: '*.css', to: 'vendor/bpmn-js-color-picker', context: 'node_modules/bpmn-js-color-picker/colors' }, { from: 'assets/**', to: 'vendor/bpmn-js-properties-panel', diff --git a/docker-compose.yml b/docker-compose.yml index 1cf550248..b505499b5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,9 +10,9 @@ services: environment: - MYSQL_DATABASE=spiffworkflow_backend_development - MYSQL_ROOT_PASSWORD=my-secret-pw - - MYSQL_TCP_PORT=7003 + - MYSQL_TCP_PORT=8003 ports: - - "7003" + - "8003" healthcheck: test: mysql --user=root --password=my-secret-pw -e 'select 1' spiffworkflow_backend_development interval: 10s @@ -30,12 +30,12 @@ services: - SPIFFWORKFLOW_BACKEND_ENV=development - FLASK_DEBUG=0 - FLASK_SESSION_SECRET_KEY=super_secret_key - - OPEN_ID_SERVER_URL=http://localhost:7000/openid - - SPIFFWORKFLOW_FRONTEND_URL=http://localhost:7001 - - SPIFFWORKFLOW_BACKEND_URL=http://localhost:7000 - - SPIFFWORKFLOW_BACKEND_PORT=7000 + - OPEN_ID_SERVER_URL=http://localhost:8000/openid + - SPIFFWORKFLOW_FRONTEND_URL=http://localhost:8001 + - SPIFFWORKFLOW_BACKEND_URL=http://localhost:8000 + - SPIFFWORKFLOW_BACKEND_PORT=8000 - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true - - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:7003/spiffworkflow_backend_development + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:my-secret-pw@spiffworkflow-db:8003/spiffworkflow_backend_development - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=false - SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME=example.yml @@ -43,12 +43,12 @@ services: - OPEN_ID_CLIENT_ID=spiffworkflow-backend - OPEN_ID_CLIENT_SECRET_KEY=my_open_id_secret_key ports: - - "7000:7000" + - "8000:8000" volumes: - ./process_models:/app/process_models - ./log:/app/log healthcheck: - test: curl localhost:7000/v1.0/status --fail + test: curl localhost:8000/v1.0/status --fail interval: 10s timeout: 5s retries: 20 @@ -58,9 +58,9 @@ services: image: ghcr.io/sartography/spiffworkflow-frontend environment: - APPLICATION_ROOT=/ - - PORT0=7001 + - PORT0=8001 ports: - - "7001:7001" + - "8001:8001" spiffworkflow-connector: container_name: spiffworkflow-connector @@ -69,10 +69,11 @@ services: - FLASK_ENV=${FLASK_ENV:-development} - FLASK_DEBUG=0 - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + - CONNECTOR_PROXY_PORT=8004 ports: - - "7004:7004" + - "8004:8004" healthcheck: - test: curl localhost:7004/liveness --fail + test: curl localhost:8004/liveness --fail interval: 10s timeout: 5s retries: 20 diff --git a/flask-bpmn/poetry.lock b/flask-bpmn/poetry.lock index 8b17963a7..e1ce1b3ad 100644 --- a/flask-bpmn/poetry.lock +++ b/flask-bpmn/poetry.lock @@ -813,22 +813,6 @@ category = "main" optional = false python-versions = ">=3.6" -[[package]] -name = "libcst" -version = "0.4.3" -description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.dependencies] -pyyaml = ">=5.2" -typing-extensions = ">=3.7.4.2" -typing-inspect = ">=0.4.0" - -[package.extras] -dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"] - [[package]] name = "livereload" version = "2.6.3" @@ -905,18 +889,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "monkeytype" -version = "22.2.0" -description = "Generating type annotations from sampled production types" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -libcst = ">=0.3.7" -mypy-extensions = "*" - [[package]] name = "mypy" version = "0.991" @@ -1504,7 +1476,7 @@ test = ["pytest"] [[package]] name = "SpiffWorkflow" version = "1.2.1" -description = "" +description = "A workflow framework and BPMN/DMN Processor" category = "main" optional = false python-versions = "*" @@ -1520,7 +1492,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "025bc30f27366e06dd1286b7563e4b1cb04c1c46" +resolved_reference = "841bd63017bb1d92858456393f144b4e5b23c994" [[package]] name = "sqlalchemy" @@ -1627,18 +1599,6 @@ category = "main" optional = false python-versions = ">=3.7" -[[package]] -name = "typing-inspect" -version = "0.7.1" -description = "Runtime inspection utilities for typing module." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -mypy-extensions = ">=0.3.0" -typing-extensions = ">=3.7.4" - [[package]] name = "unidecode" version = "1.3.4" @@ -1770,7 +1730,7 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>= [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "6dfda037ebb3024834a45670108756a3057fff1b6fb5b916d222d3a162509b7d" +content-hash = "45cac5741fa47e44710f5aae6dfdb4636fc4d60df2d6aba467052fdd5199e791" [metadata.files] alabaster = [ @@ -2234,32 +2194,6 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, ] -libcst = [ - {file = "libcst-0.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bea98a8be2b1725784ae01e89519121eba7d81280dcbee40ae03ececd7277cf3"}, - {file = "libcst-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3d9191c764645dddf94d49885e590433fa0ee6d347b07eec86566786e6d2ada5"}, - {file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0f22e9787e44304e7cd9744e543602ab2c1bca8b922cb6237ea08d9a0be3fdd"}, - {file = "libcst-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff147dd77b6ea72e4f2f0abfcd1be11a3108c28cb65e6da666c0b77142033f7c"}, - {file = "libcst-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d744d4a6301c75322f1d88365dccfe402a51e724583a2edc4cba474462cc9419"}, - {file = "libcst-0.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:ed0f15545eddfdd6270069ce0b2d4c253298817bd676a1a6adddaa1d66c7e28b"}, - {file = "libcst-0.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6f57056a743853c01bbd21bfd96c2a1b4c317bbc66920f5f2c9999b3dca7233"}, - {file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3d33da8f9b088e118bfc6ecacdd627ac237baeb490f4d7a383af4df4ea4f82"}, - {file = "libcst-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df5f51a837fc10cdbf5c61acb467f6c15d5f9ca1d94a84a6a29c4f20ce7b437e"}, - {file = "libcst-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f744f60057c8998b856d9baf28765c65574992f4a49830ca350010fc31f4eac4"}, - {file = "libcst-0.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:88ab371aab82f7241448e263ec42abced649a77cdd21df960268e6df70b3f3f7"}, - {file = "libcst-0.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:826ea5f10a84625db861ccf35946317f4f29e575261e44c0cd6c24c4dde5c2bb"}, - {file = "libcst-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cab5b23796ce66303398bb7b2d27bcb17d2416dacd3d00229c961aed87d79a3b"}, - {file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afc793c95af79e5adc5905713ccddff034d0de3e3da748424b722edf890227de"}, - {file = "libcst-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c982387b8e23ad18efbd0287004924931a0b05c91ed5630453faf224bb0b185"}, - {file = "libcst-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4c25aca45df5f86a6a1c8c219e8c7a90acdaef02b53eb01eafa563381cb0ce"}, - {file = "libcst-0.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1a395129ecf6c6ce429427f34100ccd99f35898a98187764a4559d9f92166cd0"}, - {file = "libcst-0.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ca00819affafccb02b2582ec47706712b995c9887cad02bb8efe94a066830f37"}, - {file = "libcst-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:231a9ca446570f9b63d8c2c6dbf6c796fb939a5e4ef9dc0dd9304a21a6c0da16"}, - {file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b08e7a56950479c856183ad6fdf0a21df028d6732e1d19822ec1593e32f700ca"}, - {file = "libcst-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cb70e7e5118234e75d309fcf04931e20f282f16c80dda464fc1b88ef02e52e4"}, - {file = "libcst-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8c00b24ab39facff463b18b9abc8df7dd063ae0ce9fe2e78e199c9a8572e37"}, - {file = "libcst-0.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:28f35b9a21b2f8982a8ed3f53b1fdbc5435252409d34d061a3229dc4b413b8c7"}, - {file = "libcst-0.4.3.tar.gz", hash = "sha256:f79ab61287505d97ed57ead14b78777f48cd6ec5339ca4978987e4c35957a465"}, -] livereload = [ {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, ] @@ -2389,10 +2323,6 @@ mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, ] -monkeytype = [ - {file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"}, - {file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"}, -] mypy = [ {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, @@ -2808,11 +2738,6 @@ typing-extensions = [ {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] -typing-inspect = [ - {file = "typing_inspect-0.7.1-py2-none-any.whl", hash = "sha256:b1f56c0783ef0f25fb064a01be6e5407e54cf4a4bf4f3ba3fe51e0bd6dcea9e5"}, - {file = "typing_inspect-0.7.1-py3-none-any.whl", hash = "sha256:3cd7d4563e997719a710a3bfe7ffb544c6b72069b6812a02e9b414a8fa3aaa6b"}, - {file = "typing_inspect-0.7.1.tar.gz", hash = "sha256:047d4097d9b17f46531bf6f014356111a1b6fb821a24fe7ac909853ca2a782aa"}, -] unidecode = [ {file = "Unidecode-1.3.4-py3-none-any.whl", hash = "sha256:afa04efcdd818a93237574791be9b2817d7077c25a068b00f8cff7baa4e59257"}, {file = "Unidecode-1.3.4.tar.gz", hash = "sha256:8e4352fb93d5a735c788110d2e7ac8e8031eb06ccbfe8d324ab71735015f9342"}, diff --git a/flask-bpmn/src/flask_bpmn/api/api_error.py b/flask-bpmn/src/flask_bpmn/api/api_error.py index eb390abe1..ed792e7e8 100644 --- a/flask-bpmn/src/flask_bpmn/api/api_error.py +++ b/flask-bpmn/src/flask_bpmn/api/api_error.py @@ -170,15 +170,21 @@ def set_user_sentry_context() -> None: def handle_exception(exception: Exception) -> flask.wrappers.Response: """Handles unexpected exceptions.""" set_user_sentry_context() - id = capture_exception(exception) - organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG") - project_slug = current_app.config.get("SENTRY_PROJECT_SLUG") sentry_link = None - if organization_slug and project_slug: - sentry_link = ( - f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" - ) + if not isinstance(exception, ApiError) or exception.error_code != "invalid_token": + id = capture_exception(exception) + + if isinstance(exception, ApiError): + current_app.logger.info( + f"Sending ApiError exception to sentry: {exception} with error code {exception.error_code}") + + organization_slug = current_app.config.get("SENTRY_ORGANIZATION_SLUG") + project_slug = current_app.config.get("SENTRY_PROJECT_SLUG") + if organization_slug and project_slug: + sentry_link = ( + f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}" + ) # !!!NOTE!!!: do this after sentry stuff since calling logger.exception # seems to break the sentry sdk context where we no longer get back diff --git a/poetry.lock b/poetry.lock index e5c9c4c04..118134c7f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -163,7 +163,7 @@ python-versions = "*" [[package]] name = "black" -version = "22.10.0" +version = "23.1a1" description = "The uncompromising code formatter." category = "dev" optional = false @@ -614,7 +614,7 @@ werkzeug = "*" type = "git" url = "https://github.com/sartography/flask-bpmn" reference = "main" -resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4" +resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b" [[package]] name = "flask-cors" @@ -1760,7 +1760,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "bba7ddf5478af579b891ca63c50babbfccf6b7a4" +resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c" [[package]] name = "sqlalchemy" @@ -2182,27 +2182,18 @@ billiard = [ {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, ] black = [ - {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"}, - {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"}, - {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"}, - {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"}, - {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"}, - {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"}, - {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"}, - {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"}, - {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"}, - {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"}, - {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"}, - {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"}, - {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"}, - {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"}, - {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"}, - {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"}, - {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"}, - {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"}, - {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"}, - {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"}, - {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"}, + {file = "black-23.1a1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb7641d442ede92538bc70fa0201f884753a7d0f62f26c722b7b00301b95902"}, + {file = "black-23.1a1-cp310-cp310-win_amd64.whl", hash = "sha256:88288a645402106b8eb9f50d7340ae741e16240bb01c2eed8466549153daa96e"}, + {file = "black-23.1a1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db1d8027ce7ae53f0ccf02b0be0b8808fefb291d6cb1543420f4165d96d364c"}, + {file = "black-23.1a1-cp311-cp311-win_amd64.whl", hash = "sha256:88ec25a64063945b4591b6378bead544c5d3260de1c93ad96f3ad2d76ddd76fd"}, + {file = "black-23.1a1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dff6f0157e47fbbeada046fca144b6557d3be2fb2602d668881cd179f04a352"}, + {file = "black-23.1a1-cp37-cp37m-win_amd64.whl", hash = "sha256:ca658b69260a18bf7aa0b0a6562dbbd304a737487d1318998aaca5a75901fd2c"}, + {file = "black-23.1a1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85dede655442f5e246e7abd667fe07e14916897ba52f3640b5489bf11f7dbf67"}, + {file = "black-23.1a1-cp38-cp38-win_amd64.whl", hash = "sha256:ddbf9da228726d46f45c29024263e160d41030a415097254817d65127012d1a2"}, + {file = "black-23.1a1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63330069d8ec909cf4e2c4d43a7f00aeb03335430ef9fec6cd2328e6ebde8a77"}, + {file = "black-23.1a1-cp39-cp39-win_amd64.whl", hash = "sha256:793c9176beb2adf295f6b863d9a4dc953fe2ac359ca3da108d71d14cb2c09e52"}, + {file = "black-23.1a1-py3-none-any.whl", hash = "sha256:e88e4b633d64b9e7adc4a6b922f52bb204af9f90d7b1e3317e6490f2b598b1ea"}, + {file = "black-23.1a1.tar.gz", hash = "sha256:0b945a5a1e5a5321f884de0061d5a8585d947c9b608e37b6d26ceee4dfdf4b62"}, ] blinker = [ {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, @@ -2857,7 +2848,18 @@ psycopg2 = [ {file = "psycopg2-2.9.5.tar.gz", hash = "sha256:a5246d2e683a972e2187a8714b5c2cf8156c064629f9a9b1a873c1730d9e245a"}, ] pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, ] pycodestyle = [ diff --git a/spiffworkflow-backend/.gitignore b/spiffworkflow-backend/.gitignore index 58cb14347..a61561685 100644 --- a/spiffworkflow-backend/.gitignore +++ b/spiffworkflow-backend/.gitignore @@ -1,7 +1,7 @@ .mypy_cache/ /.idea/ /.coverage -/.coverage.* +.coverage.* /.nox/ /.python-version /.pytype/ diff --git a/spiffworkflow-backend/bin/build_and_run_with_docker_compose b/spiffworkflow-backend/bin/build_and_run_with_docker_compose index 4356d974f..2dfa896e6 100755 --- a/spiffworkflow-backend/bin/build_and_run_with_docker_compose +++ b/spiffworkflow-backend/bin/build_and_run_with_docker_compose @@ -9,7 +9,7 @@ set -o errtrace -o errexit -o nounset -o pipefail if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" - export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models" + export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../../sample-process-models" fi if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then diff --git a/spiffworkflow-backend/bin/delete_and_import_all_permissions.py b/spiffworkflow-backend/bin/delete_and_import_all_permissions.py index a55e36e7f..966ec5a11 100644 --- a/spiffworkflow-backend/bin/delete_and_import_all_permissions.py +++ b/spiffworkflow-backend/bin/delete_and_import_all_permissions.py @@ -7,7 +7,8 @@ def main() -> None: """Main.""" app = get_hacked_up_app_for_script() with app.app_context(): - AuthorizationService.delete_all_permissions_and_recreate() + AuthorizationService.delete_all_permissions() + AuthorizationService.import_permissions_from_yaml_file() if __name__ == "__main__": diff --git a/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance b/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance old mode 100755 new mode 100644 index 9b6b4c757..dbce01ecc --- a/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance +++ b/spiffworkflow-backend/bin/get_bpmn_json_for_process_instance @@ -1,5 +1,4 @@ """Get the bpmn process json for a given process instance id and store it in /tmp.""" -#!/usr/bin/env python import os import sys @@ -18,15 +17,17 @@ def main(process_instance_id: str): id=process_instance_id ).first() + file_path = f"/tmp/{process_instance_id}_bpmn_json.json" if not process_instance: raise Exception( f"Could not find a process instance with id: {process_instance_id}" ) with open( - f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8" + file_path, "w", encoding="utf-8" ) as f: f.write(process_instance.bpmn_json) + print(f"Saved to {file_path}") if len(sys.argv) < 2: diff --git a/spiffworkflow-backend/bin/get_logs_from_docker_compose b/spiffworkflow-backend/bin/get_logs_from_docker_compose index 78c7684e3..d2c06c6f3 100755 --- a/spiffworkflow-backend/bin/get_logs_from_docker_compose +++ b/spiffworkflow-backend/bin/get_logs_from_docker_compose @@ -7,4 +7,5 @@ function error_handler() { trap 'error_handler ${LINENO} $?' ERR set -o errtrace -o errexit -o nounset -o pipefail -docker compose logs "$@" +# "docker compose logs" is only getting the db logs so specify them both +docker compose logs db spiffworkflow-backend diff --git a/spiffworkflow-backend/bin/get_perms b/spiffworkflow-backend/bin/get_perms new file mode 100755 index 000000000..5e0dbd6de --- /dev/null +++ b/spiffworkflow-backend/bin/get_perms @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +set -x +mysql -uroot spiffworkflow_backend_development -e 'select pa.id, g.identifier group_identifier, pt.uri, permission from permission_assignment pa join principal p on p.id = pa.principal_id join `group` g on g.id = p.group_id join permission_target pt on pt.id = pa.permission_target_id;' diff --git a/spiffworkflow-backend/bin/git_commit_bpmn_models_repo b/spiffworkflow-backend/bin/git_commit_bpmn_models_repo index 13e18da9c..0ba512021 100755 --- a/spiffworkflow-backend/bin/git_commit_bpmn_models_repo +++ b/spiffworkflow-backend/bin/git_commit_bpmn_models_repo @@ -11,26 +11,42 @@ set -o errtrace -o errexit -o nounset -o pipefail bpmn_models_absolute_dir="$1" git_commit_message="$2" -git_commit_username="$3" -git_commit_email="$4" +git_branch="$3" +git_commit_username="$4" +git_commit_email="$5" +git_commit_password="$6" -if [[ -z "${2:-}" ]]; then - >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]" +if [[ -z "${6:-}" ]]; then + >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message] [git_branch] [git_commit_username] [git_commit_email]" exit 1 fi -cd "$bpmn_models_absolute_dir" -git add . +function failed_to_get_lock() { + >&2 echo "ERROR: Failed to get lock." + exit 1 +} + +function run() { + cd "$bpmn_models_absolute_dir" + git add . + + # https://unix.stackexchange.com/a/155077/456630 + if [ -z "$(git status --porcelain)" ]; then + echo "No changes to commit" + else + PAT="${git_commit_username}:${git_commit_password}" + AUTH=$(echo -n "$PAT" | openssl base64 | tr -d '\n') -# https://unix.stackexchange.com/a/155077/456630 -if [ -z "$(git status --porcelain)" ]; then - echo "No changes to commit" -else - if [[ -n "$git_commit_username" ]]; then git config --local user.name "$git_commit_username" - fi - if [[ -n "$git_commit_email" ]]; then git config --local user.email "$git_commit_email" + git config --local http.extraHeader "Authorization: Basic $AUTH" + git commit -m "$git_commit_message" + git push --set-upstream origin "$git_branch" + git config --unset --local http.extraHeader fi - git commit -m "$git_commit_message" -fi +} + +exec {lock_fd}>/var/lock/mylockfile || failed_to_get_lock +flock --timeout 60 "$lock_fd" || failed_to_get_lock +run +flock -u "$lock_fd" diff --git a/spiffworkflow-backend/bin/import_tickets_for_command_line.py b/spiffworkflow-backend/bin/import_tickets_for_command_line.py index e193b5990..cc94ba545 100644 --- a/spiffworkflow-backend/bin/import_tickets_for_command_line.py +++ b/spiffworkflow-backend/bin/import_tickets_for_command_line.py @@ -27,7 +27,6 @@ def main(): """Main.""" app = get_hacked_up_app_for_script() with app.app_context(): - process_model_identifier_ticket = "ticket" db.session.query(ProcessInstanceModel).filter( ProcessInstanceModel.process_model_identifier diff --git a/spiffworkflow-backend/bin/keycloak_test_server.py b/spiffworkflow-backend/bin/keycloak_test_server.py index 59efd36c5..3e9334938 100644 --- a/spiffworkflow-backend/bin/keycloak_test_server.py +++ b/spiffworkflow-backend/bin/keycloak_test_server.py @@ -40,7 +40,8 @@ def hello_world(): return ( 'Hello, %s, See private ' 'Log out' - ) % oidc.user_getfield("preferred_username") + % oidc.user_getfield("preferred_username") + ) else: return 'Welcome anonymous, Log in' diff --git a/spiffworkflow-backend/bin/recreate_db b/spiffworkflow-backend/bin/recreate_db index 5eb248fe0..ec38c7b39 100755 --- a/spiffworkflow-backend/bin/recreate_db +++ b/spiffworkflow-backend/bin/recreate_db @@ -61,3 +61,7 @@ for task in $tasks; do done SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade +if [[ -n "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]] && ! grep -Eq '^(development|testing)$' <<< "$SPIFFWORKFLOW_BACKEND_ENV"; then + mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_$SPIFFWORKFLOW_BACKEND_ENV" + FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade +fi diff --git a/spiffworkflow-backend/bin/spiffworkflow-realm.json b/spiffworkflow-backend/bin/spiffworkflow-realm.json index a30f53c14..e31942cf1 100644 --- a/spiffworkflow-backend/bin/spiffworkflow-realm.json +++ b/spiffworkflow-backend/bin/spiffworkflow-realm.json @@ -426,6 +426,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "admin@spiffworkflow.org", "credentials" : [ { "id" : "ef435043-ef0c-407a-af5b-ced13182a408", "type" : "password", @@ -446,6 +447,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "alex@sartography.com", "credentials" : [ { "id" : "81a61a3b-228d-42b3-b39a-f62d8e7f57ca", "type" : "password", @@ -465,6 +467,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "amir@status.im", "credentials" : [ { "id" : "e589f3ad-bf7b-4756-89f7-7894c03c2831", "type" : "password", @@ -484,6 +487,9 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "firstName" : "", + "lastName" : "", + "email" : "ciadmin1@spiffworkflow.org", "credentials" : [ { "id" : "111b5ea1-c2ab-470a-a16b-2373bc94de7a", "type" : "password", @@ -499,28 +505,6 @@ }, "notBefore" : 0, "groups" : [ ] - }, { - "id" : "56457e8f-47c6-4f9f-a72b-473dea5edfeb", - "createdTimestamp" : 1657139955336, - "username" : "ciuser1", - "enabled" : true, - "totp" : false, - "emailVerified" : false, - "credentials" : [ { - "id" : "762f36e9-47af-44da-8520-cf09d752497a", - "type" : "password", - "createdDate" : 1657139966468, - "secretData" : "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}", - "credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" - } ], - "disableableCredentialTypes" : [ ], - "requiredActions" : [ ], - "realmRoles" : [ "default-roles-spiffworkflow" ], - "clientRoles" : { - "spiffworkflow-backend" : [ "uma_protection" ] - }, - "notBefore" : 0, - "groups" : [ ] }, { "id" : "d58b61cc-a77e-488f-a427-05f4e0572e20", "createdTimestamp" : 1669132945413, @@ -530,6 +514,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "core@status.im", "credentials" : [ { "id" : "ee80092b-8ee6-4699-8492-566e088b48f5", "type" : "password", @@ -550,6 +535,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "dan@sartography.com", "credentials" : [ { "id" : "d517c520-f500-4542-80e5-7144daef1e32", "type" : "password", @@ -569,6 +555,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "daniel@sartography.com", "credentials" : [ { "id" : "f240495c-265b-42fc-99db-46928580d07d", "type" : "password", @@ -588,6 +575,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "elizabeth@sartography.com", "credentials" : [ { "id" : "ae951ec8-9fc9-4f1b-b340-bbbe463ae5c2", "type" : "password", @@ -609,6 +597,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "fin@status.im", "credentials" : [ { "id" : "2379940c-98b4-481a-b629-0bd1a4e91acf", "type" : "password", @@ -631,6 +620,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "fin1@status.im", "credentials" : [ { "id" : "96216746-ff72-454e-8288-232428d10b42", "type" : "password", @@ -651,6 +641,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "finance_user1@status.im", "credentials" : [ { "id" : "f14722ec-13a7-4d35-a4ec-0475d405ae58", "type" : "password", @@ -670,6 +661,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "harmeet@status.im", "credentials" : [ { "id" : "89c26090-9bd3-46ac-b038-883d02e3f125", "type" : "password", @@ -691,6 +683,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "j@status.im", "credentials" : [ { "id" : "e71ec785-9133-4b7d-8015-1978379af0bb", "type" : "password", @@ -711,6 +704,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jakub@status.im", "credentials" : [ { "id" : "ce141fa5-b8d5-4bbe-93e7-22e7119f97c2", "type" : "password", @@ -730,6 +724,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jarrad@status.im", "credentials" : [ { "id" : "113e0343-1069-476d-83f9-21d98edb9cfa", "type" : "password", @@ -749,6 +744,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jason@sartography.com", "credentials" : [ { "id" : "40abf32e-f0cc-4a17-8231-1a69a02c1b0b", "type" : "password", @@ -768,6 +764,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "jon@sartography.com", "credentials" : [ { "id" : "8b520e01-5b9b-44ab-9ee8-505bd0831a45", "type" : "password", @@ -787,6 +784,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "kb@sartography.com", "credentials" : [ { "id" : "2c0be363-038f-48f1-86d6-91fdd28657cf", "type" : "password", @@ -808,6 +806,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "lead@status.im", "credentials" : [ { "id" : "96e836a4-1a84-45c5-a9ed-651b0c90195e", "type" : "password", @@ -830,6 +829,7 @@ "emailVerified" : false, "firstName" : "", "lastName" : "", + "email" : "lead1@status.im", "credentials" : [ { "id" : "4e17388b-6c44-44e1-b20a-a873c0feb9a8", "type" : "password", @@ -850,6 +850,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "manuchehr@status.im", "credentials" : [ { "id" : "07dabf55-b5d3-4f98-abba-3334086ecf5e", "type" : "password", @@ -869,6 +870,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "mike@sartography.com", "credentials" : [ { "id" : "1ed375fb-0f1a-4c2a-9243-2477242cf7bd", "type" : "password", @@ -887,7 +889,10 @@ "username" : "natalia", "enabled" : true, "totp" : false, - "emailVerified" : false, + "emailVerified" : true, + "firstName" : "", + "lastName" : "", + "email" : "natalia@sartography.com", "credentials" : [ { "id" : "b6aa9936-39cc-4931-bfeb-60e6753de5ba", "type" : "password", @@ -907,6 +912,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "sasha@status.im", "credentials" : [ { "id" : "4a170af4-6f0c-4e7b-b70c-e674edf619df", "type" : "password", @@ -926,6 +932,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "service-account@status.im", "serviceAccountClientId" : "spiffworkflow-backend", "credentials" : [ ], "disableableCredentialTypes" : [ ], @@ -943,6 +950,7 @@ "enabled" : true, "totp" : false, "emailVerified" : false, + "email" : "service-account-withauth@status.im", "serviceAccountClientId" : "withAuth", "credentials" : [ ], "disableableCredentialTypes" : [ ], @@ -2166,7 +2174,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-address-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-full-name-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper" ] } }, { "id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", @@ -2184,7 +2192,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-full-name-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper" ] } }, { "id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c", @@ -2274,7 +2282,7 @@ "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "b896c673-57ab-4f24-bbb1-334bdadbecd3", + "id" : "76ae522e-7ab3-48dc-af76-9cb8069368a2", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2296,7 +2304,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "4da99e29-371e-4f4b-a863-e5079f30a714", + "id" : "ddf80243-ec40-4c21-ae94-2967d841f84c", "alias" : "Authentication Options", "description" : "Authentication options.", "providerId" : "basic-flow", @@ -2325,7 +2333,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "d398c928-e201-4e8b-ab09-289bb351cd2e", + "id" : "4f075680-46b7-49eb-b94c-d7425f105cb9", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2347,7 +2355,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "663b7aa3-84f6-4347-8ed4-588c2464b75d", + "id" : "a0467c77-c3dc-4df6-acd2-c05ca13601ed", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2369,7 +2377,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "98013bc1-e4dd-41f7-9849-1f898143b944", + "id" : "07536fec-8d41-4c73-845f-ca85002022e0", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2391,7 +2399,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b77e7545-9e39-4d72-93f8-1b38c954c2e2", + "id" : "f123f912-71fb-4596-97f9-c0628a59413d", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2413,7 +2421,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2470e6f4-9a01-476a-9057-75d78e577182", + "id" : "03c26cc5-366b-462d-9297-b4016f8d7c57", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2435,7 +2443,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "8e7dad0b-f4e1-4534-b618-b635b0a0e4f9", + "id" : "1b4f474e-aa64-45cc-90f1-63504585d89c", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2458,7 +2466,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "97c83e43-cba8-4d92-b108-9181bca07a1e", + "id" : "38024dd6-daff-45de-8782-06b07b7bfa56", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2480,7 +2488,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "fbabd64c-20de-4b8c-bfd2-be6822572278", + "id" : "b7e30fca-e4ac-4886-a2e7-642fe2a27ee7", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -2516,7 +2524,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0628a99f-b194-495d-8e54-cc4ca8684956", + "id" : "92e3571d-ac3e-4e79-a391-5315954e866f", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -2552,7 +2560,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "ce6bf7af-3bff-48ce-b214-7fed08503a2a", + "id" : "5093dd2d-fe5d-4f41-a54d-03cd648d9b7f", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -2581,7 +2589,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "60ce729b-d055-4ae7-83cb-85dbcf8cfdaa", + "id" : "95d2f1ff-6907-47ce-a93c-db462fe04844", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -2596,7 +2604,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "0bd3cf93-7f33-46b2-ad1f-85cdfb0a87f9", + "id" : "27405ee8-5730-419c-944c-a7c67edd91ce", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -2619,7 +2627,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3e52f178-9b9d-4a62-97d5-f9f3f872bcd9", + "id" : "fce6d926-3a99-40ee-b79e-cae84493dbd8", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -2641,7 +2649,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "3f5fd6cc-2935-45d8-9bef-6857bba3657a", + "id" : "75d93596-b7fb-4a2c-a780-e6a038e66fe9", "alias" : "http challenge", "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", "providerId" : "basic-flow", @@ -2663,7 +2671,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "2c2b32dd-57dc-45d7-9a24-b4a253cb6a03", + "id" : "04cdc1ac-c58d-4f8c-bc10-7d5e2bb99485", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -2679,7 +2687,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "dbc28b13-dba7-42a0-a8ab-faa8762979c3", + "id" : "99593c1e-f2a5-4198-ad41-634694259110", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -2715,7 +2723,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "b4a901d5-e7b9-4eb6-9f8e-1d3305846828", + "id" : "7d53f026-b05e-4a9c-aba6-23b17826a4d4", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -2751,7 +2759,7 @@ "userSetupAllowed" : false } ] }, { - "id" : "824fe757-cc5c-4e13-ab98-9a2132e10f5c", + "id" : "7ca17e64-f916-4d6c-91f0-815ec66f50e8", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -2767,13 +2775,13 @@ } ] } ], "authenticatorConfig" : [ { - "id" : "817a93da-29df-447f-ab05-cd9557e66745", + "id" : "9b71d817-b999-479d-97f8-07e39dd9e9fa", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "4a8a9659-fa0d-4da8-907b-3b6daec1c878", + "id" : "f9f13ba1-6a17-436b-a80b-6ccc042f9fc2", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" diff --git a/spiffworkflow-backend/bin/start_keycloak b/spiffworkflow-backend/bin/start_keycloak index 32b502ca0..f76347da7 100755 --- a/spiffworkflow-backend/bin/start_keycloak +++ b/spiffworkflow-backend/bin/start_keycloak @@ -18,7 +18,19 @@ set -o errtrace -o errexit -o nounset -o pipefail if ! docker network inspect spiffworkflow > /dev/null 2>&1; then docker network create spiffworkflow fi -docker rm keycloak 2>/dev/null || echo 'no keycloak container found, safe to start new container' + +# https://stackoverflow.com/a/60579344/6090676 +container_name="keycloak" +if [[ -n "$(docker ps -qa -f name=$container_name)" ]]; then + echo ":: Found container - $container_name" + if [[ -n "$(docker ps -q -f name=$container_name)" ]]; then + echo ":: Stopping running container - $container_name" + docker stop $container_name + fi + echo ":: Removing stopped container - $container_name" + docker rm $container_name +fi + docker run \ -p 7002:8080 \ -d \ diff --git a/spiffworkflow-backend/conftest.py b/spiffworkflow-backend/conftest.py index c3af94332..b24a7ed1b 100644 --- a/spiffworkflow-backend/conftest.py +++ b/spiffworkflow-backend/conftest.py @@ -9,7 +9,7 @@ from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from tests.spiffworkflow_backend.helpers.base_test import BaseTest -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.process_instance_processor import ( @@ -47,7 +47,7 @@ def app() -> Flask: @pytest.fixture() def with_db_and_bpmn_file_cleanup() -> None: """Process_group_resource.""" - db.session.query(ActiveTaskUserModel).delete() + db.session.query(HumanTaskUserModel).delete() for model in SpiffworkflowBaseDBModel._all_subclasses(): db.session.query(model).delete() diff --git a/spiffworkflow-backend/docker-compose.yml b/spiffworkflow-backend/docker-compose.yml index 1cbe9dcb7..410cbb7ab 100644 --- a/spiffworkflow-backend/docker-compose.yml +++ b/spiffworkflow-backend/docker-compose.yml @@ -68,7 +68,7 @@ services: - "7000:7000" network_mode: host volumes: - - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models - ./log:/app/log healthcheck: test: curl localhost:7000/v1.0/status --fail @@ -82,7 +82,7 @@ services: profiles: - debug volumes: - - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ${BPMN_SPEC_ABSOLUTE_DIR:-../../sample-process-models}:/app/process_models - ./:/app command: /app/bin/boot_in_docker_debug_mode diff --git a/spiffworkflow-backend/migrations/versions/4d75421c0af0_.py b/spiffworkflow-backend/migrations/versions/907bcf0c3d75_.py similarity index 90% rename from spiffworkflow-backend/migrations/versions/4d75421c0af0_.py rename to spiffworkflow-backend/migrations/versions/907bcf0c3d75_.py index 34fa1e974..552afe485 100644 --- a/spiffworkflow-backend/migrations/versions/4d75421c0af0_.py +++ b/spiffworkflow-backend/migrations/versions/907bcf0c3d75_.py @@ -1,8 +1,8 @@ """empty message -Revision ID: 4d75421c0af0 +Revision ID: 907bcf0c3d75 Revises: -Create Date: 2022-12-06 17:42:56.417673 +Create Date: 2022-12-28 13:52:13.030028 """ from alembic import op @@ -10,7 +10,7 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '4d75421c0af0' +revision = '907bcf0c3d75' down_revision = None branch_labels = None depends_on = None @@ -72,14 +72,15 @@ def upgrade(): op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('username', sa.String(length=255), nullable=False), - sa.Column('uid', sa.String(length=50), nullable=True), - sa.Column('service', sa.String(length=50), nullable=False), + sa.Column('service', sa.String(length=255), nullable=False), sa.Column('service_id', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('display_name', sa.String(length=255), nullable=True), sa.Column('email', sa.String(length=255), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('service', 'service_id', name='service_key'), - sa.UniqueConstraint('uid') + sa.UniqueConstraint('username') ) op.create_table('message_correlation_property', sa.Column('id', sa.Integer(), nullable=False), @@ -174,11 +175,20 @@ def upgrade(): sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique') ) - op.create_table('active_task', + op.create_table('user_group_assignment_waiting', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username', 'group_id', name='user_group_assignment_staged_unique') + ) + op.create_table('human_task', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), - sa.Column('actual_owner_id', sa.Integer(), nullable=True), sa.Column('lane_assignment_id', sa.Integer(), nullable=True), + sa.Column('completed_by_user_id', sa.Integer(), nullable=True), + sa.Column('actual_owner_id', sa.Integer(), nullable=True), sa.Column('form_file_name', sa.String(length=50), nullable=True), sa.Column('ui_form_file_name', sa.String(length=50), nullable=True), sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), @@ -189,12 +199,15 @@ def upgrade(): sa.Column('task_type', sa.String(length=50), nullable=True), sa.Column('task_status', sa.String(length=50), nullable=True), sa.Column('process_model_display_name', sa.String(length=255), nullable=True), + sa.Column('completed', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['actual_owner_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['completed_by_user_id'], ['user.id'], ), sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ), sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique') + sa.UniqueConstraint('task_id', 'process_instance_id', name='human_task_unique') ) + op.create_index(op.f('ix_human_task_completed'), 'human_task', ['completed'], unique=False) op.create_table('message_correlation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('process_instance_id', sa.Integer(), nullable=False), @@ -255,23 +268,20 @@ def upgrade(): sa.Column('spiff_step', sa.Integer(), nullable=False), sa.Column('task_json', sa.JSON(), nullable=False), sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), - sa.Column('completed_by_user_id', sa.Integer(), nullable=True), - sa.Column('lane_assignment_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['lane_assignment_id'], ['group.id'], ), sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), sa.PrimaryKeyConstraint('id') ) - op.create_table('active_task_user', + op.create_table('human_task_user', sa.Column('id', sa.Integer(), nullable=False), - sa.Column('active_task_id', sa.Integer(), nullable=False), + sa.Column('human_task_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['active_task_id'], ['active_task.id'], ), + sa.ForeignKeyConstraint(['human_task_id'], ['human_task.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('active_task_id', 'user_id', name='active_task_user_unique') + sa.UniqueConstraint('human_task_id', 'user_id', name='human_task_user_unique') ) - op.create_index(op.f('ix_active_task_user_active_task_id'), 'active_task_user', ['active_task_id'], unique=False) - op.create_index(op.f('ix_active_task_user_user_id'), 'active_task_user', ['user_id'], unique=False) + op.create_index(op.f('ix_human_task_user_human_task_id'), 'human_task_user', ['human_task_id'], unique=False) + op.create_index(op.f('ix_human_task_user_user_id'), 'human_task_user', ['user_id'], unique=False) op.create_table('message_correlation_message_instance', sa.Column('id', sa.Integer(), nullable=False), sa.Column('message_instance_id', sa.Integer(), nullable=False), @@ -291,9 +301,9 @@ def downgrade(): op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance') op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance') op.drop_table('message_correlation_message_instance') - op.drop_index(op.f('ix_active_task_user_user_id'), table_name='active_task_user') - op.drop_index(op.f('ix_active_task_user_active_task_id'), table_name='active_task_user') - op.drop_table('active_task_user') + op.drop_index(op.f('ix_human_task_user_user_id'), table_name='human_task_user') + op.drop_index(op.f('ix_human_task_user_human_task_id'), table_name='human_task_user') + op.drop_table('human_task_user') op.drop_table('spiff_step_details') op.drop_index(op.f('ix_process_instance_metadata_key'), table_name='process_instance_metadata') op.drop_table('process_instance_metadata') @@ -304,7 +314,9 @@ def downgrade(): op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation') op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation') op.drop_table('message_correlation') - op.drop_table('active_task') + op.drop_index(op.f('ix_human_task_completed'), table_name='human_task') + op.drop_table('human_task') + op.drop_table('user_group_assignment_waiting') op.drop_table('user_group_assignment') op.drop_table('secret') op.drop_table('refresh_token') diff --git a/spiffworkflow-backend/poetry.lock b/spiffworkflow-backend/poetry.lock index a23004b40..707c5b3c3 100644 --- a/spiffworkflow-backend/poetry.lock +++ b/spiffworkflow-backend/poetry.lock @@ -654,7 +654,7 @@ werkzeug = "*" type = "git" url = "https://github.com/sartography/flask-bpmn" reference = "main" -resolved_reference = "860f2387bebdaa9220e9fbf6f8fa7f74e805d0d4" +resolved_reference = "c79c1e0b6d34ec05d82cce888b5e57b33d24403b" [[package]] name = "Flask-Cors" @@ -1851,7 +1851,7 @@ lxml = "*" type = "git" url = "https://github.com/sartography/SpiffWorkflow" reference = "main" -resolved_reference = "ffb1686757f944065580dd2db8def73d6c1f0134" +resolved_reference = "80640024a8030481645f0c34f34c57e88f7b4f0c" [[package]] name = "SQLAlchemy" diff --git a/spiffworkflow-backend/src/.coverage.jason-Gazelle.473795.719220 b/spiffworkflow-backend/src/.coverage.jason-Gazelle.473795.719220 new file mode 100644 index 000000000..3c5fc7087 Binary files /dev/null and b/spiffworkflow-backend/src/.coverage.jason-Gazelle.473795.719220 differ diff --git a/spiffworkflow-backend/src/.coverage.jason-Gazelle.475245.497833 b/spiffworkflow-backend/src/.coverage.jason-Gazelle.475245.497833 new file mode 100644 index 000000000..214df28dc Binary files /dev/null and b/spiffworkflow-backend/src/.coverage.jason-Gazelle.475245.497833 differ diff --git a/spiffworkflow-backend/src/.coverage.jason-Gazelle.476451.578823 b/spiffworkflow-backend/src/.coverage.jason-Gazelle.476451.578823 new file mode 100644 index 000000000..ef7f5c499 Binary files /dev/null and b/spiffworkflow-backend/src/.coverage.jason-Gazelle.476451.578823 differ diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py index 9599116a2..f1de793d4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/__init__.py @@ -18,11 +18,11 @@ from werkzeug.exceptions import NotFound import spiffworkflow_backend.load_database_models # noqa: F401 from spiffworkflow_backend.config import setup_config +from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint from spiffworkflow_backend.routes.openid_blueprint.openid_blueprint import ( openid_blueprint, ) -from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.routes.user_blueprint import user_blueprint from spiffworkflow_backend.services.authorization_service import AuthorizationService @@ -93,7 +93,8 @@ def create_app() -> flask.app.Flask: if os.environ.get("FLASK_SESSION_SECRET_KEY") is None: raise KeyError( - "Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY" + "Cannot find the secret_key from the environment. Please set" + " FLASK_SESSION_SECRET_KEY" ) app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY") @@ -103,7 +104,6 @@ def create_app() -> flask.app.Flask: migrate.init_app(app, db) app.register_blueprint(user_blueprint) - app.register_blueprint(process_api_blueprint) app.register_blueprint(api_error_blueprint) app.register_blueprint(admin_blueprint, url_prefix="/admin") app.register_blueprint(openid_blueprint, url_prefix="/openid") @@ -117,7 +117,7 @@ def create_app() -> flask.app.Flask: ] CORS(app, origins=origins_re, max_age=3600) - connexion_app.add_api("api.yml", base_path="/v1.0") + connexion_app.add_api("api.yml", base_path=V1_API_PATH_PREFIX) mail = Mail(app) app.config["MAIL_APP"] = mail diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 52890114c..a15b44468 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -8,10 +8,6 @@ servers: - url: http://localhost:5000/v1.0 # this is handled in flask now security: [] -# - jwt: ["secret"] -# - oAuth2AuthCode: -# - read_email -# - uid paths: /login: @@ -22,7 +18,6 @@ paths: schema: type: string get: - security: [] summary: redirect to open id authentication server operationId: spiffworkflow_backend.routes.user.login tags: @@ -48,7 +43,6 @@ paths: schema: type: string get: - security: [] operationId: spiffworkflow_backend.routes.user.login_return tags: - Authentication @@ -68,7 +62,6 @@ paths: schema: type: string get: - security: [] operationId: spiffworkflow_backend.routes.user.logout summary: Logout authenticated user tags: @@ -78,7 +71,6 @@ paths: description: Logout Authenticated User /logout_return: get: - security: [] operationId: spiffworkflow_backend.routes.user.logout_return summary: Logout authenticated user tags: @@ -89,7 +81,6 @@ paths: /login_api: get: - security: [] operationId: spiffworkflow_backend.routes.user.login_api summary: Authenticate user for API access tags: @@ -115,7 +106,6 @@ paths: schema: type: string get: - security: [] operationId: spiffworkflow_backend.routes.user.login_api_return tags: - Authentication @@ -125,8 +115,7 @@ paths: /status: get: - security: [] - operationId: spiffworkflow_backend.routes.process_api_blueprint.status + operationId: spiffworkflow_backend.routes.health_controller.status summary: Returns 200 if the server is Responding tags: - Liveness @@ -160,7 +149,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_list + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_list summary: get list tags: - Process Groups @@ -174,7 +163,7 @@ paths: items: $ref: "#/components/schemas/ProcessModelCategory" post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_create summary: Add process group tags: - Process Groups @@ -201,7 +190,7 @@ paths: type: string # process_group_show get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_show + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_show summary: Returns a single process group tags: - Process Groups @@ -213,7 +202,7 @@ paths: schema: $ref: "#/components/schemas/ProcessModelCategory" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_delete + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_delete summary: Deletes a single process group tags: - Process Groups @@ -221,7 +210,7 @@ paths: "200": description: The process group was deleted. put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_update + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_update summary: Updates a single process group tags: - Process Groups @@ -253,7 +242,7 @@ paths: schema: type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_move + operationId: spiffworkflow_backend.routes.process_groups_controller.process_group_move summary: returns the new group tags: - Process Groups @@ -304,7 +293,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_list + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_list summary: Return a list of process models for a given process group tags: - Process Models @@ -327,7 +316,33 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_create + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create + summary: Creates a new process model with the given parameters. + tags: + - Process Models + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + responses: + "201": + description: Process model created successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + + /process-models-natural-language/{modified_process_group_id}: + parameters: + - name: modified_process_group_id + in: path + required: true + description: modified id of an existing process group + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_create_with_natural_language summary: Creates a new process model with the given parameters. tags: - Process Models @@ -353,7 +368,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_create summary: Add a new workflow spec file tags: - Process Model Files @@ -383,7 +398,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_show summary: Returns a single process model tags: - Process Models @@ -395,7 +410,7 @@ paths: schema: $ref: "#/components/schemas/ProcessModel" put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_update summary: Modifies an existing process model with the given parameters. tags: - Process Models @@ -412,7 +427,7 @@ paths: schema: $ref: "#/components/schemas/ProcessModel" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_delete summary: Removes an existing process model tags: - Process Models @@ -439,7 +454,7 @@ paths: schema: type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_move + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_move summary: returns the new model tags: - Process Models @@ -466,7 +481,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_publish + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_publish summary: Merge changes from this model to another branch. tags: - Process Models @@ -500,6 +515,11 @@ paths: post: operationId: spiffworkflow_backend.routes.process_api_blueprint.github_webhook_receive summary: receives push webhooks from github so we can keep our process model repo up to date + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" tags: - git responses: @@ -510,6 +530,119 @@ paths: schema: $ref: "#/components/schemas/OkTrue" + /process-instances/for-me: + parameters: + - name: process_model_identifier + in: query + required: false + description: The unique id of an existing process model. + schema: + type: string + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: start_from + in: query + required: false + description: For filtering - beginning of start window - in seconds since epoch + schema: + type: integer + - name: start_to + in: query + required: false + description: For filtering - end of start window - in seconds since epoch + schema: + type: integer + - name: end_from + in: query + required: false + description: For filtering - beginning of end window - in seconds since epoch + schema: + type: integer + - name: end_to + in: query + required: false + description: For filtering - end of end window - in seconds since epoch + schema: + type: integer + - name: process_status + in: query + required: false + description: For filtering - not_started, user_input_required, waiting, complete, error, or suspended + schema: + type: string + - name: initiated_by_me + in: query + required: false + description: For filtering - show instances initiated by me + schema: + type: boolean + - name: with_tasks_completed_by_me + in: query + required: false + description: For filtering - show instances with tasks completed by me + schema: + type: boolean + - name: with_tasks_completed_by_my_group + in: query + required: false + description: For filtering - show instances with tasks completed by my group + schema: + type: boolean + - name: with_relation_to_me + in: query + required: false + description: For filtering - show instances that have something to do with me + schema: + type: boolean + - name: user_filter + in: query + required: false + description: For filtering - indicates the user has manually entered a query + schema: + type: boolean + - name: report_identifier + in: query + required: false + description: Specifies the identifier of a report to use, if any + schema: + type: string + - name: report_id + in: query + required: false + description: Specifies the identifier of a report to use, if any + schema: + type: integer + - name: user_group_identifier + in: query + required: false + description: The identifier of the group to get the process instances for + schema: + type: string + get: + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list_for_me + summary: Returns a list of process instances that are associated with me. + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + /process-instances: parameters: - name: process_model_identifier @@ -578,6 +711,12 @@ paths: description: For filtering - show instances with tasks completed by my group schema: type: boolean + - name: with_relation_to_me + in: query + required: false + description: For filtering - show instances that have something to do with me + schema: + type: boolean - name: user_filter in: query required: false @@ -596,9 +735,15 @@ paths: description: Specifies the identifier of a report to use, if any schema: type: integer + - name: user_group_identifier + in: query + required: false + description: The identifier of the group to get the process instances for + schema: + type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list - summary: Returns a list of process instances for a given process model + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_list + summary: Returns a list of process instances. tags: - Process Instances responses: @@ -611,22 +756,16 @@ paths: items: $ref: "#/components/schemas/Workflow" - /process-models/{process_group_id}/{process_model_id}/script-unit-tests: + /process-models/{modified_process_model_identifier}/script-unit-tests: parameters: - - name: process_group_id - in: path - required: true - description: The unique id of an existing process group - schema: - type: string - - name: process_model_id + - name: modified_process_model_identifier in: path required: true description: The unique id of an existing process model. schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_create + operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_create summary: Create script unit test based on given criteria tags: - Script Unit Test @@ -638,22 +777,16 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-models/{process_group_id}/{process_model_id}/script-unit-tests/run: + /process-models/{modified_process_model_identifier}/script-unit-tests/run: parameters: - - name: process_group_id - in: path - required: true - description: The unique id of an existing process group - schema: - type: string - - name: process_model_id + - name: modified_process_model_identifier in: path required: true description: The unique id of an existing process model. schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_run + operationId: spiffworkflow_backend.routes.script_unit_tests_controller.script_unit_test_run summary: Run a given script unit test. tags: - Script Unit Test @@ -674,7 +807,7 @@ paths: schema: type: string post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_create summary: Creates an process instance from a process model and returns the instance tags: - Process Instances @@ -686,6 +819,133 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-instances/for-me/{modified_process_model_identifier}/{process_instance_id}/task-info: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The unique id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string + - name: all_tasks + in: query + required: false + description: If true, this wil return all tasks associated with the process instance and not just user tasks. + schema: + type: boolean + - name: spiff_step + in: query + required: false + description: If set will return the tasks as they were during a specific step of execution. + schema: + type: integer + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data_for_me + summary: returns the list of all user tasks associated with process instance without the task data + responses: + "200": + description: list of tasks + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + + /process-instances/{modified_process_model_identifier}/{process_instance_id}/task-info: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The unique id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string + - name: all_tasks + in: query + required: false + description: If true, this wil return all tasks associated with the process instance and not just user tasks. + schema: + type: boolean + - name: spiff_step + in: query + required: false + description: If set will return the tasks as they were during a specific step of execution. + schema: + type: integer + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_without_task_data + summary: returns the list of all user tasks associated with process instance without the task data + responses: + "200": + description: list of tasks + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + + /process-instances/for-me/{modified_process_model_identifier}/{process_instance_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The unique id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show_for_me + summary: Show information about a process instance that is associated with me + responses: + "200": + description: One Process Instance + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + /process-instances/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier @@ -700,10 +960,16 @@ paths: description: The unique id of an existing process instance. schema: type: integer + - name: process_identifier + in: query + required: false + description: The identifier of the process to use for the diagram. Useful for displaying the diagram for a call activity. + schema: + type: string get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_show summary: Show information about a process instance responses: "200": @@ -713,7 +979,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_delete summary: Deletes a single process instance tags: - Process Instances @@ -740,7 +1006,7 @@ paths: schema: type: boolean post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_run + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_run summary: Run a process instance tags: - Process Instances @@ -752,7 +1018,7 @@ paths: schema: $ref: "#/components/schemas/Workflow" - /process-instances/{modified_process_model_identifier}/{process_instance_id}/terminate: + /process-instance-terminate/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path @@ -761,7 +1027,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_terminate + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_terminate summary: Terminate a process instance tags: - Process Instances @@ -773,7 +1039,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instances/{modified_process_model_identifier}/{process_instance_id}/suspend: + /process-instance-suspend/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path @@ -782,7 +1048,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_suspend + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_suspend summary: Suspend a process instance tags: - Process Instances @@ -794,7 +1060,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" - /process-instances/{modified_process_model_identifier}/{process_instance_id}/resume: + /process-instance-resume/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: process_instance_id in: path @@ -803,7 +1069,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_resume + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_resume summary: Resume a process instance tags: - Process Instances @@ -815,6 +1081,39 @@ paths: schema: $ref: "#/components/schemas/OkTrue" + /process-instance-reset/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified process model id + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: spiff_step + in: query + required: false + description: Reset the process to this state + schema: + type: integer + post: + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_reset + summary: Reset a process instance to an earlier step + tags: + - Process Instances + responses: + "200": + description: Empty ok true response on successful resume. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + /process-instances/reports: parameters: - name: page @@ -830,7 +1129,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_list summary: Returns all process instance reports for process model tags: - Process Instances @@ -844,7 +1143,7 @@ paths: items: $ref: "#/components/schemas/Workflow" post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_create summary: Returns all process instance reports for process model tags: - Process Instances @@ -858,7 +1157,7 @@ paths: /process-instances/reports/columns: get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_column_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_column_list summary: Returns all available columns for a process instance report. tags: - Process Instances @@ -893,7 +1192,7 @@ paths: schema: type: integer get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_show summary: Returns a report of process instances for a given process model tags: - Process Instances @@ -907,7 +1206,7 @@ paths: items: $ref: "#/components/schemas/Workflow" put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_update summary: Updates a process instance report tags: - Process Instances @@ -919,7 +1218,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_delete + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_report_delete summary: Delete a process instance report tags: - Process Instances @@ -946,7 +1245,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_show summary: Returns metadata about the file tags: - Process Model Files @@ -958,7 +1257,7 @@ paths: schema: $ref: "#/components/schemas/File" put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_update + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_update summary: save the contents to the given file tags: - Process Model Files @@ -981,7 +1280,7 @@ paths: schema: $ref: "#/components/schemas/OkTrue" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_delete + operationId: spiffworkflow_backend.routes.process_models_controller.process_model_file_delete summary: Removes an existing process model file tags: - Process Model Files @@ -1010,8 +1309,7 @@ paths: get: tags: - Tasks - # security: [] - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_my_tasks + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_my_tasks summary: returns the list of ready or waiting tasks for a user responses: "200": @@ -1040,7 +1338,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_open_processes + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_open_processes summary: returns the list of tasks for given user's open process instances responses: "200": @@ -1069,7 +1367,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_me + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_me summary: returns the list of tasks for given user's open process instances responses: "200": @@ -1083,6 +1381,12 @@ paths: /tasks/for-my-groups: parameters: + - name: user_group_identifier + in: query + required: false + description: The identifier of the group to get the tasks for + schema: + type: string - name: page in: query required: false @@ -1098,7 +1402,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_for_my_groups + operationId: spiffworkflow_backend.routes.tasks_controller.task_list_for_my_groups summary: returns the list of tasks for given user's open process instances responses: "200": @@ -1110,6 +1414,22 @@ paths: items: $ref: "#/components/schemas/Task" + /user-groups/for-current-user: + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.user_group_list_for_current_user + summary: Group identifiers for current logged in user + responses: + "200": + description: list of user groups + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + /task-data/{modified_process_model_identifier}/{process_instance_id}: parameters: - name: modified_process_model_identifier @@ -1139,8 +1459,8 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list - summary: returns the list of all user tasks associated with process instance + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_task_list_with_task_data + summary: returns the list of all user tasks associated with process instance with the task data responses: "200": description: list of tasks @@ -1172,7 +1492,7 @@ paths: schema: type: string put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.update_task_data + operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update summary: Update the task data for requested instance and task tags: - Process Instances @@ -1184,11 +1504,104 @@ paths: schema: $ref: "#/components/schemas/Workflow" + /process-data/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: process_data_identifier + in: path + required: true + description: The identifier of the process data. + schema: + type: string + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_show + summary: Fetch the process data value. + tags: + - Data Objects + responses: + "200": + description: Fetch succeeded. + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /send-event/{modified_process_model_identifier}/{process_instance_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of the process instance + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.send_bpmn_event + summary: Send a BPMN event to the process + tags: + - Process Instances + responses: + "200": + description: Event Sent Successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /task-complete/{modified_process_model_identifier}/{process_instance_id}/{task_id}: + parameters: + - name: modified_process_model_identifier + in: path + required: true + description: The modified id of an existing process model + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of the process instance + schema: + type: string + - name: task_id + in: path + required: true + description: The unique id of the task. + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.manual_complete_task + summary: Mark a task complete without executing it + tags: + - Process Instances + responses: + "200": + description: Event Sent Successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + /service-tasks: get: tags: - Service Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.service_task_list + operationId: spiffworkflow_backend.routes.service_tasks_controller.service_task_list summary: Gets all available service task connectors responses: "200": @@ -1202,7 +1615,7 @@ paths: get: tags: - Authentications - operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_list + operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_list summary: Gets all available authentications from connector proxy responses: "200": @@ -1239,11 +1652,9 @@ paths: schema: type: string get: - # disable security so we can get the token from query params instead - security: [] tags: - Authentications - operationId: spiffworkflow_backend.routes.process_api_blueprint.authentication_callback + operationId: spiffworkflow_backend.routes.service_tasks_controller.authentication_callback summary: Callback to backend responses: "200": @@ -1276,7 +1687,7 @@ paths: get: tags: - Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_show + operationId: spiffworkflow_backend.routes.tasks_controller.task_show summary: Gets one task that a user wants to complete responses: "200": @@ -1288,7 +1699,7 @@ paths: put: tags: - Tasks - operationId: spiffworkflow_backend.routes.process_api_blueprint.task_submit + operationId: spiffworkflow_backend.routes.tasks_controller.task_submit summary: Update the form data for a tasks requestBody: content: @@ -1332,7 +1743,7 @@ paths: get: tags: - Messages - operationId: spiffworkflow_backend.routes.process_api_blueprint.message_instance_list + operationId: spiffworkflow_backend.routes.messages_controller.message_instance_list summary: Get a list of message instances responses: "200": @@ -1353,7 +1764,7 @@ paths: post: tags: - Messages - operationId: spiffworkflow_backend.routes.process_api_blueprint.message_start + operationId: spiffworkflow_backend.routes.messages_controller.message_start summary: Instantiate and run a given process model with a message start event matching given identifier requestBody: content: @@ -1397,7 +1808,7 @@ paths: get: tags: - Process Instances - operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_log_list + operationId: spiffworkflow_backend.routes.process_instances_controller.process_instance_log_list summary: returns a list of logs associated with the process instance responses: "200": @@ -1422,7 +1833,7 @@ paths: schema: type: integer post: - operationId: spiffworkflow_backend.routes.process_api_blueprint.add_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_create summary: Create a secret for a key and value tags: - Secrets @@ -1439,7 +1850,7 @@ paths: schema: type: number get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_list + operationId: spiffworkflow_backend.routes.secrets_controller.secret_list summary: Return list of all secrets tags: - Secrets @@ -1460,7 +1871,7 @@ paths: schema: type: string get: - operationId: spiffworkflow_backend.routes.process_api_blueprint.get_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_show summary: Return a secret value for a key tags: - Secrets @@ -1472,7 +1883,7 @@ paths: schema: $ref: "#/components/schemas/Secret" delete: - operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_delete summary: Delete an existing secret tags: - Secrets @@ -1484,7 +1895,7 @@ paths: "404": description: Secret does not exist put: - operationId: spiffworkflow_backend.routes.process_api_blueprint.update_secret + operationId: spiffworkflow_backend.routes.secrets_controller.secret_update summary: Modify an existing secret tags: - Secrets @@ -1543,16 +1954,6 @@ components: scopes: read_email: read email x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope - # oAuth2AuthCode: - # type: oauth2 - # description: authenticate with openid server - # flows: - # implicit: - # authorizationUrl: /v1.0/login_api - # scopes: - # uid: uid - # x-tokenInfoUrl: localhost:7000/v1.0/login_api_return - # x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope schemas: OkTrue: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py index 106b07357..fb5901f03 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/__init__.py @@ -17,21 +17,21 @@ def setup_database_uri(app: Flask) -> None: if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None: database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}" if app.config.get("SPIFF_DATABASE_TYPE") == "sqlite": - app.config[ - "SQLALCHEMY_DATABASE_URI" - ] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" + app.config["SQLALCHEMY_DATABASE_URI"] = ( + f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" + ) elif app.config.get("SPIFF_DATABASE_TYPE") == "postgres": - app.config[ - "SQLALCHEMY_DATABASE_URI" - ] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" + app.config["SQLALCHEMY_DATABASE_URI"] = ( + f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" + ) else: # use pswd to trick flake8 with hardcoded passwords db_pswd = os.environ.get("DB_PASSWORD") if db_pswd is None: db_pswd = "" - app.config[ - "SQLALCHEMY_DATABASE_URI" - ] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" + app.config["SQLALCHEMY_DATABASE_URI"] = ( + f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" + ) else: app.config["SQLALCHEMY_DATABASE_URI"] = app.config.get( "SPIFFWORKFLOW_BACKEND_DATABASE_URI" @@ -42,6 +42,7 @@ def load_config_file(app: Flask, env_config_module: str) -> None: """Load_config_file.""" try: app.config.from_object(env_config_module) + print(f"loaded config: {env_config_module}") except ImportStringError as exception: if os.environ.get("TERRAFORM_DEPLOYED_ENVIRONMENT") != "true": raise ModuleNotFoundError( @@ -62,6 +63,7 @@ def setup_config(app: Flask) -> None: ) app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False app.config.from_object("spiffworkflow_backend.config.default") + print("loaded config: default") env_config_prefix = "spiffworkflow_backend.config." if ( @@ -69,6 +71,7 @@ def setup_config(app: Flask) -> None: and os.environ.get("SPIFFWORKFLOW_BACKEND_ENV") is not None ): load_config_file(app, f"{env_config_prefix}terraform_deployed_environment") + print("loaded config: terraform_deployed_environment") env_config_module = env_config_prefix + app.config["ENV_IDENTIFIER"] load_config_file(app, env_config_module) @@ -87,6 +90,14 @@ def setup_config(app: Flask) -> None: "permissions", app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"], ) + print( + "set permissions file name config:" + f" {app.config['SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME']}" + ) + print( + "set permissions file name full path:" + f" {app.config['PERMISSIONS_FILE_FULLPATH']}" + ) # unversioned (see .gitignore) config that can override everything and include secrets. # src/spiffworkflow_backend/config/secrets.py diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py index ed77cf87b..d0d6a4010 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py @@ -27,8 +27,6 @@ CONNECTOR_PROXY_URL = environ.get( "CONNECTOR_PROXY_URL", default="http://localhost:7004" ) -GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true" - # Open ID server OPEN_ID_SERVER_URL = environ.get( "OPEN_ID_SERVER_URL", default="http://localhost:7002/realms/spiffworkflow" @@ -63,7 +61,10 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( # When a user clicks on the `Publish` button, this is the default branch this server merges into. # I.e., dev server could have `staging` here. Staging server might have `production` here. -GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging") +GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO") +GIT_BRANCH = environ.get("GIT_BRANCH") +GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL") +GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true" # Datbase Configuration SPIFF_DATABASE_TYPE = environ.get( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py index 182d08dec..cbbc269a8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/dev.py @@ -1,8 +1,9 @@ """Dev.""" from os import environ -GIT_MERGE_BRANCH = environ.get("GIT_MERGE_BRANCH", default="staging") +GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="staging") GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer") GIT_USER_EMAIL = environ.get( "GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com" ) +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "dev.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py index c3c479460..39e10cb58 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/development.py @@ -12,3 +12,8 @@ SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( RUN_BACKGROUND_SCHEDULER = ( environ.get("RUN_BACKGROUND_SCHEDULER", default="true") == "true" ) +GIT_CLONE_URL_FOR_PUBLISHING = environ.get( + "GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git" +) +GIT_USERNAME = "sartography-automated-committer" +GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml index a10b5685b..29d3c9c04 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/acceptance_tests.yml @@ -1,13 +1,10 @@ groups: admin: - users: [ciadmin1] - - common-user: - users: [ciuser1] + users: [ciadmin1@spiffworkflow.org] permissions: admin: - groups: [admin, common-user] + groups: [admin] users: [] - allowed_permissions: [create, read, update, delete, list, instantiate] + allowed_permissions: [create, read, update, delete] uri: /* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml new file mode 100644 index 000000000..a556c0139 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/dev.yml @@ -0,0 +1,151 @@ +default_group: everybody + +groups: + admin: + users: + [ + admin@spiffworkflow.org, + jakub@status.im, + jarrad@status.im, + kb@sartography.com, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, + ] + + Finance Team: + users: + [ + jakub@status.im, + amir@status.im, + jarrad@status.im, + sasha@status.im, + fin@status.im, + fin1@status.im, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, + ] + + demo: + users: + [ + harmeet@status.im, + sasha@status.im, + manuchehr@status.im, + core@status.im, + fin@status.im, + fin1@status.im, + lead@status.im, + lead1@status.im, + ] + + test: + users: + [ + natalia@sartography.com, + ] + +permissions: + admin: + groups: [admin] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /* + + # open system defaults for everybody + read-all-process-groups: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-groups/* + read-all-process-models: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-models/* + + # basic perms for everybody + read-all-process-instances-for-me: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/* + read-process-instance-reports: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-instances/reports/* + processes-read: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /processes + service-tasks: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /service-tasks + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /tasks/* + user-groups-for-current-user: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /user-groups/for-current-user + + + finance-admin: + groups: ["Finance Team"] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-groups/manage-procurement:procurement:* + + manage-revenue-streams-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:vendor-lifecycle-management:* + + manage-revenue-streams-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* + + create-test-instances: + groups: ["test"] + users: [] + allowed_permissions: [create, read] + uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml index 419c925fa..ee40f839b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/development.yml @@ -10,57 +10,60 @@ groups: admin: users: [ - admin, - jakub, - kb, - alex, - dan, - mike, - jason, - j, - jarrad, - elizabeth, - jon, - natalia, + admin@spiffworkflow.org, + jakub@status.im, + jarrad@status.im, + kb@sartography.com, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, ] Finance Team: users: [ - jakub, - alex, - dan, - mike, - jason, - j, - amir, - jarrad, - elizabeth, - jon, - natalia, - sasha, - fin, - fin1, + jakub@status.im, + amir@status.im, + jarrad@status.im, + sasha@status.im, + fin@status.im, + fin1@status.im, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, ] demo: users: [ - core, - fin, - fin1, - harmeet, - sasha, - manuchehr, - lead, - lead1 + harmeet@status.im, + sasha@status.im, + manuchehr@status.im, + core@status.im, + fin@status.im, + fin1@status.im, + lead@status.im, + lead1@status.im, ] - core-contributor: + test: users: [ - core, - harmeet, + natalia@sartography.com, + ] + + admin-ro: + users: + [ + j@sartography.com, ] permissions: @@ -69,135 +72,102 @@ permissions: users: [] allowed_permissions: [create, read, update, delete] uri: /* - - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* - service-tasks: - groups: [everybody] + admin-readonly: + groups: [admin-ro] users: [] allowed_permissions: [read] - uri: /v1.0/service-tasks + uri: /* + admin-process-instances-for-readonly: + groups: [admin-ro] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-instances/* - - # read all for everybody + # open system defaults for everybody read-all-process-groups: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-groups/* + uri: /process-groups/* read-all-process-models: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-models/* - read-all-process-instance: + uri: /process-models/* + + # basic perms for everybody + read-all-process-instances-for-me: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/process-instances/* + uri: /process-instances/for-me/* read-process-instance-reports: groups: [everybody] users: [] - allowed_permissions: [read] - uri: /v1.0/process-instances/reports/* + allowed_permissions: [create, read, update, delete] + uri: /process-instances/reports/* processes-read: groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/processes - - task-data-read: - groups: [demo] + uri: /processes + service-tasks: + groups: [everybody] users: [] allowed_permissions: [read] - uri: /v1.0/task-data/* + uri: /service-tasks + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /tasks/* + user-groups-for-current-user: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /user-groups/for-current-user - manage-procurement-admin: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:* - manage-procurement-admin-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement/* - manage-procurement-admin-models: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement:* - manage-procurement-admin-models-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement/* - manage-procurement-admin-instances: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement:* - manage-procurement-admin-instances-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement/* - finance-admin: groups: ["Finance Team"] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:procurement:* + uri: /process-groups/manage-procurement:procurement:* - manage-revenue-streams-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* manage-revenue-streams-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - - manage-procurement-invoice-instantiate: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] allowed_permissions: [create] - uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:* + uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* manage-procurement-invoice-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - - manage-procurement-instantiate: - groups: ["core-contributor", "demo"] + groups: ["demo"] users: [] allowed_permissions: [create] - uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:* + uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* manage-procurement-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* - - core1-admin-models-instantiate: - groups: ["core-contributor", "Finance Team"] + groups: ["demo"] users: [] allowed_permissions: [create] - uri: /v1.0/process-models/misc:category_number_one:process-model-with-form/process-instances - core1-admin-instances: - groups: ["core-contributor", "Finance Team"] + uri: /process-instances/manage-procurement:vendor-lifecycle-management:* + + manage-revenue-streams-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* + + create-test-instances: + groups: ["test"] users: [] allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form:* - core1-admin-instances-slash: - groups: ["core-contributor", "Finance Team"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/* + uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml index 79bfed81d..248a400b4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/example.yml @@ -2,14 +2,17 @@ default_group: everybody users: admin: + service: local_open_id email: admin@spiffworkflow.org password: admin preferred_username: Admin nelson: + service: local_open_id email: nelson@spiffworkflow.org password: nelson preferred_username: Nelson malala: + service: local_open_id email: malala@spiffworkflow.org password: malala preferred_username: Malala @@ -18,17 +21,17 @@ groups: admin: users: [ - admin, + admin@spiffworkflow.org, ] Education: users: [ - malala + malala@spiffworkflow.org ] President: users: [ - nelson + nelson@spiffworkflow.org ] permissions: @@ -44,45 +47,44 @@ permissions: groups: [everybody] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* + uri: /tasks/* # Everyone can see everything (all groups, and processes are visible) read-all-process-groups: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-groups/* + uri: /process-groups/* read-all-process-models: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-models/* + uri: /process-models/* read-all-process-instance: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-instances/* + uri: /process-instances/* read-process-instance-reports: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/process-instances/reports/* + uri: /process-instances/reports/* processes-read: groups: [ everybody ] users: [ ] allowed_permissions: [ read ] - uri: /v1.0/processes - - # Members of the Education group can change they processes work. + uri: /processes + # Members of the Education group can change the processes under "education". education-admin: groups: ["Education", "President"] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/education:* + uri: /process-groups/education:* # Anyone can start an education process. education-everybody: groups: [everybody] users: [] allowed_permissions: [create, read] - uri: /v1.0/process-instances/misc:category_number_one:process-model-with-form/* + uri: /process-instances/misc:category_number_one:process-model-with-form/* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml new file mode 100644 index 000000000..049c991ed --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/qa1.yml @@ -0,0 +1,12 @@ +default_group: everybody + +groups: + admin: + users: [admin@spiffworkflow.org] + +permissions: + admin: + groups: [admin] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml new file mode 100644 index 000000000..9816ca939 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/staging.yml @@ -0,0 +1,148 @@ +default_group: everybody + +groups: + admin: + users: + [ + admin@spiffworkflow.org, + jakub@status.im, + jarrad@status.im, + kb@sartography.com, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, + ] + + Finance Team: + users: + [ + jakub@status.im, + amir@status.im, + jarrad@status.im, + sasha@status.im, + fin@status.im, + fin1@status.im, + alex@sartography.com, + dan@sartography.com, + mike@sartography.com, + jason@sartography.com, + j@sartography.com, + elizabeth@sartography.com, + jon@sartography.com, + ] + + demo: + users: + [ + harmeet@status.im, + sasha@status.im, + manuchehr@status.im, + core@status.im, + fin@status.im, + fin1@status.im, + lead@status.im, + lead1@status.im, + ] + test: + users: + [ + natalia@sartography.com, + ] + +permissions: + admin: + groups: [admin] + users: [] + allowed_permissions: [read] + uri: /* + admin-process-instances: + groups: [admin] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-instances/* + + # open system defaults for everybody + read-all-process-groups: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-groups/* + read-all-process-models: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-models/* + + # basic perms for everybody + read-all-process-instances-for-me: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/* + read-process-instance-reports: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /process-instances/reports/* + processes-read: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /processes + service-tasks: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /service-tasks + tasks-crud: + groups: [everybody] + users: [] + allowed_permissions: [create, read, update, delete] + uri: /tasks/* + user-groups-for-current-user: + groups: [everybody] + users: [] + allowed_permissions: [read] + uri: /user-groups/for-current-user + + manage-revenue-streams-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances: + groups: ["demo"] + users: [] + allowed_permissions: [create] + uri: /process-instances/manage-procurement:vendor-lifecycle-management:* + + manage-revenue-streams-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* + manage-procurement-invoice-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:procurement:core-contributor-invoice-management:* + manage-procurement-instances-for-me: + groups: ["demo"] + users: [] + allowed_permissions: [read] + uri: /process-instances/for-me/manage-procurement:vendor-lifecycle-management:* + + create-test-instances: + groups: ["test"] + users: [] + allowed_permissions: [create, read] + uri: /process-instances/misc:test:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml index 2e41e3b00..049c991ed 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/terraform_deployed_environment.yml @@ -2,60 +2,7 @@ default_group: everybody groups: admin: - users: - [ - admin, - jakub, - kb, - alex, - dan, - mike, - jason, - j, - jarrad, - elizabeth, - jon, - natalia, - ] - - Finance Team: - users: - [ - jakub, - alex, - dan, - mike, - jason, - j, - amir, - jarrad, - elizabeth, - jon, - natalia, - sasha, - fin, - fin1, - ] - - demo: - users: - [ - core, - fin, - fin1, - harmeet, - sasha, - manuchehr, - lead, - lead1 - ] - - core-contributor: - users: - [ - core, - harmeet, - ] + users: [admin@spiffworkflow.org] permissions: admin: @@ -63,120 +10,3 @@ permissions: users: [] allowed_permissions: [create, read, update, delete] uri: /* - - tasks-crud: - groups: [everybody] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* - - service-tasks: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/service-tasks - - - # read all for everybody - read-all-process-groups: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-groups/* - read-all-process-models: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-models/* - read-all-process-instance: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-instances/* - read-process-instance-reports: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/process-instances/reports/* - processes-read: - groups: [everybody] - users: [] - allowed_permissions: [read] - uri: /v1.0/processes - - task-data-read: - groups: [demo] - users: [] - allowed_permissions: [read] - uri: /v1.0/task-data/* - - - manage-procurement-admin: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:* - manage-procurement-admin-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement/* - manage-procurement-admin-models: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement:* - manage-procurement-admin-models-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/manage-procurement/* - manage-procurement-admin-instances: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement:* - manage-procurement-admin-instances-slash: - groups: ["Project Lead"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/manage-procurement/* - - finance-admin: - groups: ["Finance Team"] - users: [] - allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/manage-procurement:procurement:* - - manage-revenue-streams-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - manage-revenue-streams-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-revenue-streams:product-revenue-streams:customer-contracts-trade-terms/* - - manage-procurement-invoice-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-procurement:procurement:core-contributor-invoice-management:* - manage-procurement-invoice-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:procurement:core-contributor-invoice-management:* - - manage-procurement-instantiate: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create] - uri: /v1.0/process-models/manage-procurement:vendor-lifecycle-management:* - manage-procurement-instances: - groups: ["core-contributor", "demo"] - users: [] - allowed_permissions: [create, read] - uri: /v1.0/process-instances/manage-procurement:vendor-lifecycle-management:* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml index c678205df..79a137104 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/permissions/testing.yml @@ -1,5 +1,12 @@ default_group: everybody +users: + testadmin1: + service: https://testing/openid/thing + email: testadmin1@spiffworkflow.org + password: admin + preferred_username: El administrador de la muerte + groups: admin: users: [testadmin1, testadmin2] @@ -14,7 +21,7 @@ permissions: admin: groups: [admin] users: [] - allowed_permissions: [create, read, update, delete, list, instantiate] + allowed_permissions: [create, read, update, delete] uri: /* read-all: @@ -27,29 +34,29 @@ permissions: groups: [everybody] users: [] allowed_permissions: [create, read, update, delete] - uri: /v1.0/tasks/* + uri: /tasks/* # TODO: all uris should really have the same structure finance-admin-group: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-groups/finance/* + uri: /process-groups/finance/* finance-admin-model: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/finance/* + uri: /process-models/finance/* finance-admin-model-lanes: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-models/finance:model_with_lanes/* + uri: /process-models/finance:model_with_lanes/* finance-admin-instance-run: groups: ["Finance Team"] users: [testuser4] allowed_permissions: [create, read, update, delete] - uri: /v1.0/process-instances/* + uri: /process-instances/* diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py new file mode 100644 index 000000000..2f8ad5fca --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/qa1.py @@ -0,0 +1,11 @@ +"""Qa1.""" +from os import environ + +GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="qa2") +GIT_USERNAME = environ.get("GIT_USERNAME", default="sartography-automated-committer") +GIT_USER_EMAIL = environ.get( + "GIT_USER_EMAIL", default="sartography-automated-committer@users.noreply.github.com" +) +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( + "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="qa1.yml" +) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py new file mode 100644 index 000000000..807163315 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/staging.py @@ -0,0 +1,7 @@ +"""Staging.""" +from os import environ + +GIT_BRANCH = environ.get("GIT_BRANCH", default="staging") +GIT_BRANCH_TO_PUBLISH_TO = environ.get("GIT_BRANCH_TO_PUBLISH_TO", default="main") +GIT_COMMIT_ON_SAVE = False +SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = "staging.yml" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py index 4310d76a1..efd451834 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/terraform_deployed_environment.py @@ -5,8 +5,8 @@ from os import environ environment_identifier_for_this_config_file_only = environ["SPIFFWORKFLOW_BACKEND_ENV"] GIT_COMMIT_ON_SAVE = True -GIT_USERNAME = environment_identifier_for_this_config_file_only -GIT_USER_EMAIL = f"{environment_identifier_for_this_config_file_only}@example.com" +GIT_USERNAME = "sartography-automated-committer" +GIT_USER_EMAIL = f"{GIT_USERNAME}@users.noreply.github.com" SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME", default="terraform_deployed_environment.yml", @@ -24,3 +24,6 @@ SPIFFWORKFLOW_BACKEND_URL = ( f"https://api.{environment_identifier_for_this_config_file_only}.spiffworkflow.org" ) CONNECTOR_PROXY_URL = f"https://connector-proxy.{environment_identifier_for_this_config_file_only}.spiffworkflow.org" +GIT_CLONE_URL_FOR_PUBLISHING = environ.get( + "GIT_CLONE_URL", default="https://github.com/sartography/sample-process-models.git" +) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py index bbda9db9a..605c1bccc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/testing.py @@ -15,6 +15,7 @@ SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME = environ.get( SPIFFWORKFLOW_BACKEND_LOG_LEVEL = environ.get( "SPIFFWORKFLOW_BACKEND_LOG_LEVEL", default="debug" ) +GIT_COMMIT_ON_SAVE = False # NOTE: set this here since nox shoves tests and src code to # different places and this allows us to know exactly where we are at the start diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/helpers/api_version.py b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/api_version.py new file mode 100644 index 000000000..607b6c16b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/helpers/api_version.py @@ -0,0 +1,2 @@ +"""Api_version.""" +V1_API_PATH_PREFIX = "/v1.0" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py index 71adb57c6..bc79a8e39 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/load_database_models.py @@ -17,7 +17,7 @@ from spiffworkflow_backend.models.user_group_assignment import ( from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401 -from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401 +from spiffworkflow_backend.models.human_task import HumanTaskModel # noqa: F401 from spiffworkflow_backend.models.spec_reference import ( SpecReferenceCache, ) # noqa: F401 diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py index 3b7edd6ce..980fc9302 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/group.py @@ -27,6 +27,9 @@ class GroupModel(FlaskBpmnGroupModel): identifier = db.Column(db.String(255)) user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") + user_group_assignments_waiting = relationship( # type: ignore + "UserGroupAssignmentWaitingModel", cascade="delete" + ) users = relationship( # type: ignore "UserModel", viewonly=True, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py similarity index 73% rename from spiffworkflow-backend/src/spiffworkflow_backend/models/active_task.py rename to spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py index ea9e10552..940a51fc0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task.py @@ -1,4 +1,4 @@ -"""Active_task.""" +"""Human_task.""" from __future__ import annotations from dataclasses import dataclass @@ -8,7 +8,6 @@ from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship -from sqlalchemy.orm import RelationshipProperty from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -17,29 +16,30 @@ from spiffworkflow_backend.models.user import UserModel if TYPE_CHECKING: - from spiffworkflow_backend.models.active_task_user import ( # noqa: F401 - ActiveTaskUserModel, + from spiffworkflow_backend.models.human_task_user import ( # noqa: F401 + HumanTaskUserModel, ) @dataclass -class ActiveTaskModel(SpiffworkflowBaseDBModel): - """ActiveTaskModel.""" +class HumanTaskModel(SpiffworkflowBaseDBModel): + """HumanTaskModel.""" - __tablename__ = "active_task" + __tablename__ = "human_task" __table_args__ = ( - db.UniqueConstraint( - "task_id", "process_instance_id", name="active_task_unique" - ), + db.UniqueConstraint("task_id", "process_instance_id", name="human_task_unique"), ) - actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) id: int = db.Column(db.Integer, primary_key=True) process_instance_id: int = db.Column( ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) - actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) lane_assignment_id: int | None = db.Column(ForeignKey(GroupModel.id)) + completed_by_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) + + actual_owner_id: int = db.Column(ForeignKey(UserModel.id)) + # actual_owner: RelationshipProperty[UserModel] = relationship(UserModel) + form_file_name: str | None = db.Column(db.String(50)) ui_form_file_name: str | None = db.Column(db.String(50)) @@ -52,17 +52,18 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel): task_type: str = db.Column(db.String(50)) task_status: str = db.Column(db.String(50)) process_model_display_name: str = db.Column(db.String(255)) + completed: bool = db.Column(db.Boolean, default=False, nullable=False, index=True) - active_task_users = relationship("ActiveTaskUserModel", cascade="delete") + human_task_users = relationship("HumanTaskUserModel", cascade="delete") potential_owners = relationship( # type: ignore "UserModel", viewonly=True, - secondary="active_task_user", - overlaps="active_task_user,users", + secondary="human_task_user", + overlaps="human_task_user,users", ) @classmethod - def to_task(cls, task: ActiveTaskModel) -> Task: + def to_task(cls, task: HumanTaskModel) -> Task: """To_task.""" new_task = Task( task.task_id, @@ -79,7 +80,7 @@ class ActiveTaskModel(SpiffworkflowBaseDBModel): if hasattr(task, "process_model_identifier"): new_task.process_model_identifier = task.process_model_identifier - # active tasks only have status when getting the list on the home page + # human tasks only have status when getting the list on the home page # and it comes from the process_instance. it should not be confused with task_status. if hasattr(task, "status"): new_task.process_instance_status = task.status diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py similarity index 55% rename from spiffworkflow-backend/src/spiffworkflow_backend/models/active_task_user.py rename to spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py index f194c38e4..7d98880fc 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/active_task_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/human_task_user.py @@ -1,4 +1,4 @@ -"""Active_task_user.""" +"""Human_task_user.""" from __future__ import annotations from dataclasses import dataclass @@ -7,26 +7,26 @@ from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey -from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.user import UserModel @dataclass -class ActiveTaskUserModel(SpiffworkflowBaseDBModel): - """ActiveTaskUserModel.""" +class HumanTaskUserModel(SpiffworkflowBaseDBModel): + """HumanTaskUserModel.""" - __tablename__ = "active_task_user" + __tablename__ = "human_task_user" __table_args__ = ( db.UniqueConstraint( - "active_task_id", + "human_task_id", "user_id", - name="active_task_user_unique", + name="human_task_user_unique", ), ) id = db.Column(db.Integer, primary_key=True) - active_task_id = db.Column( - ForeignKey(ActiveTaskModel.id), nullable=False, index=True # type: ignore + human_task_id = db.Column( + ForeignKey(HumanTaskModel.id), nullable=False, index=True # type: ignore ) user_id = db.Column(ForeignKey(UserModel.id), nullable=False, index=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py index 2559a6352..b0cc2aa34 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/message_instance.py @@ -86,5 +86,6 @@ def ensure_failure_cause_is_set_if_message_instance_failed( if isinstance(instance, MessageInstanceModel): if instance.status == "failed" and instance.failure_cause is None: raise ValueError( - f"{instance.__class__.__name__}: failure_cause must be set if status is failed" + f"{instance.__class__.__name__}: failure_cause must be set if" + " status is failed" ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py index 63295f74e..04dfb5fac 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/permission_assignment.py @@ -32,14 +32,6 @@ class Permission(enum.Enum): update = "update" delete = "delete" - # maybe read to GET process_model/process-instances instead? - list = "list" - - # maybe use create instead on - # POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/* - # POST http://localhost:7000/v1.0/process-models/category_number_one/call-activity/process-instances/332/run - instantiate = "instantiate" # this is something you do to a process model - class PermissionAssignmentModel(SpiffworkflowBaseDBModel): """PermissionAssignmentModel.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py index e6a5f6849..aa1440b45 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance.py @@ -26,34 +26,12 @@ class ProcessInstanceNotFoundError(Exception): """ProcessInstanceNotFoundError.""" -class NavigationItemSchema(Schema): - """NavigationItemSchema.""" +class ProcessInstanceTaskDataCannotBeUpdatedError(Exception): + """ProcessInstanceTaskDataCannotBeUpdatedError.""" - class Meta: - """Meta.""" - fields = [ - "spec_id", - "name", - "spec_type", - "task_id", - "description", - "backtracks", - "indent", - "lane", - "state", - "children", - ] - unknown = INCLUDE - - state = marshmallow.fields.String(required=False, allow_none=True) - description = marshmallow.fields.String(required=False, allow_none=True) - backtracks = marshmallow.fields.String(required=False, allow_none=True) - lane = marshmallow.fields.String(required=False, allow_none=True) - task_id = marshmallow.fields.String(required=False, allow_none=True) - children = marshmallow.fields.List( - marshmallow.fields.Nested(lambda: NavigationItemSchema()) - ) +class ProcessInstanceCannotBeDeletedError(Exception): + """ProcessInstanceCannotBeDeletedError.""" class ProcessInstanceStatus(SpiffEnum): @@ -82,7 +60,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) process_initiator = relationship("UserModel") - active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore + active_human_tasks = relationship( + "HumanTaskModel", + primaryjoin=( + "and_(HumanTaskModel.process_instance_id==ProcessInstanceModel.id," + " HumanTaskModel.completed == False)" + ), + ) # type: ignore + + human_tasks = relationship( + "HumanTaskModel", + cascade="delete", + overlaps="active_human_tasks", + ) # type: ignore message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore @@ -93,7 +83,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): created_at_in_seconds: int = db.Column(db.Integer) status: str = db.Column(db.String(50)) - bpmn_xml_file_contents: bytes | None = None + bpmn_xml_file_contents: str | None = None bpmn_version_control_type: str = db.Column(db.String(50)) bpmn_version_control_identifier: str = db.Column(db.String(255)) spiff_step: int = db.Column(db.Integer) @@ -101,9 +91,6 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): @property def serialized(self) -> dict[str, Any]: """Return object data in serializeable format.""" - local_bpmn_xml_file_contents = "" - if self.bpmn_xml_file_contents: - local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8") return { "id": self.id, "process_model_identifier": self.process_model_identifier, @@ -112,7 +99,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): "start_in_seconds": self.start_in_seconds, "end_in_seconds": self.end_in_seconds, "process_initiator_id": self.process_initiator_id, - "bpmn_xml_file_contents": local_bpmn_xml_file_contents, + "bpmn_xml_file_contents": self.bpmn_xml_file_contents, "bpmn_version_control_identifier": self.bpmn_version_control_identifier, "bpmn_version_control_type": self.bpmn_version_control_type, "spiff_step": self.spiff_step, @@ -134,6 +121,19 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel): """Validate_status.""" return self.validate_enum_field(key, value, ProcessInstanceStatus) + def can_submit_task(self) -> bool: + """Can_submit_task.""" + return not self.has_terminal_status() and self.status != "suspended" + + def has_terminal_status(self) -> bool: + """Has_terminal_status.""" + return self.status in self.terminal_statuses() + + @classmethod + def terminal_statuses(cls) -> list[str]: + """Terminal_statuses.""" + return ["complete", "error", "terminated"] + class ProcessInstanceModelSchema(Schema): """ProcessInstanceModelSchema.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py index c9003594b..f2e4c2221 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/process_instance_metadata.py @@ -1,4 +1,4 @@ -"""Spiff_step_details.""" +"""Process_instance_metadata.""" from dataclasses import dataclass from flask_bpmn.models.db import db diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py index 1e85f7229..50b73fbae 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spec_reference.py @@ -8,6 +8,10 @@ from marshmallow import INCLUDE from sqlalchemy import UniqueConstraint +class SpecReferenceNotFoundError(Exception): + """SpecReferenceNotFoundError.""" + + @dataclass() class SpecReference: """File Reference Information. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py index b0b908877..532a6c09c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_logging.py @@ -8,7 +8,7 @@ from flask_bpmn.models.db import SpiffworkflowBaseDBModel @dataclass class SpiffLoggingModel(SpiffworkflowBaseDBModel): - """LoggingModel.""" + """SpiffLoggingModel.""" __tablename__ = "spiff_logging" id: int = db.Column(db.Integer, primary_key=True) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py index 91d70116a..11c3aeada 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/spiff_step_details.py @@ -1,13 +1,11 @@ """Spiff_step_details.""" from dataclasses import dataclass -from typing import Optional from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from sqlalchemy import ForeignKey from sqlalchemy.orm import deferred -from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModel @@ -20,10 +18,13 @@ class SpiffStepDetailsModel(SpiffworkflowBaseDBModel): process_instance_id: int = db.Column( ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore ) + # human_task_id: int = db.Column( + # ForeignKey(HumanTaskModel.id) # type: ignore + # ) spiff_step: int = db.Column(db.Integer, nullable=False) - task_json: str = deferred(db.Column(db.JSON, nullable=False)) # type: ignore + task_json: dict = deferred(db.Column(db.JSON, nullable=False)) # type: ignore timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) - completed_by_user_id: int = db.Column(db.Integer, nullable=True) - lane_assignment_id: Optional[int] = db.Column( - ForeignKey(GroupModel.id), nullable=True - ) + # completed_by_user_id: int = db.Column(db.Integer, nullable=True) + # lane_assignment_id: Optional[int] = db.Column( + # ForeignKey(GroupModel.id), nullable=True + # ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py index 52bb11715..79814c1d5 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/task.py @@ -43,8 +43,8 @@ class Task: FIELD_TYPE_EMAIL = "email" # email: Email address FIELD_TYPE_URL = "url" # url: Website address - FIELD_PROP_AUTO_COMPLETE_MAX = ( - "autocomplete_num" # Not used directly, passed in from the front end. + FIELD_PROP_AUTO_COMPLETE_MAX = ( # Not used directly, passed in from the front end. + "autocomplete_num" ) # Required field @@ -77,8 +77,8 @@ class Task: # File specific field properties FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code - FIELD_PROP_FILE_DATA = ( - "file_data" # to associate a bit of data with a specific file upload file. + FIELD_PROP_FILE_DATA = ( # to associate a bit of data with a specific file upload file. + "file_data" ) # Additional properties @@ -108,7 +108,7 @@ class Task: multi_instance_type: Union[MultiInstanceType, None] = None, multi_instance_count: str = "", multi_instance_index: str = "", - process_name: str = "", + process_identifier: str = "", properties: Union[dict, None] = None, process_instance_id: Union[int, None] = None, process_instance_status: Union[str, None] = None, @@ -118,6 +118,8 @@ class Task: form_schema: Union[str, None] = None, form_ui_schema: Union[str, None] = None, parent: Optional[str] = None, + event_definition: Union[dict[str, Any], None] = None, + call_activity_process_identifier: Optional[str] = None, ): """__init__.""" self.id = id @@ -129,6 +131,8 @@ class Task: self.documentation = documentation self.lane = lane self.parent = parent + self.event_definition = event_definition + self.call_activity_process_identifier = call_activity_process_identifier self.data = data if self.data is None: @@ -151,7 +155,7 @@ class Task: self.multi_instance_index = ( multi_instance_index # And the index of the currently repeating task. ) - self.process_name = process_name + self.process_identifier = process_identifier self.properties = properties # Arbitrary extension properties from BPMN editor. if self.properties is None: @@ -177,7 +181,7 @@ class Task: "multi_instance_type": multi_instance_type, "multi_instance_count": self.multi_instance_count, "multi_instance_index": self.multi_instance_index, - "process_name": self.process_name, + "process_identifier": self.process_identifier, "properties": self.properties, "process_instance_id": self.process_instance_id, "process_instance_status": self.process_instance_status, @@ -187,6 +191,8 @@ class Task: "form_schema": self.form_schema, "form_ui_schema": self.form_ui_schema, "parent": self.parent, + "event_definition": self.event_definition, + "call_activity_process_identifier": self.call_activity_process_identifier, } @classmethod @@ -282,18 +288,19 @@ class TaskSchema(Schema): "multi_instance_type", "multi_instance_count", "multi_instance_index", - "process_name", + "process_identifier", "properties", "process_instance_id", "form_schema", "form_ui_schema", + "event_definition", ] multi_instance_type = EnumField(MultiInstanceType) documentation = marshmallow.fields.String(required=False, allow_none=True) # form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True) title = marshmallow.fields.String(required=False, allow_none=True) - process_name = marshmallow.fields.String(required=False, allow_none=True) + process_identifier = marshmallow.fields.String(required=False, allow_none=True) lane = marshmallow.fields.String(required=False, allow_none=True) @marshmallow.post_load diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py index b8c83d0f7..a94364b07 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user.py @@ -1,22 +1,15 @@ """User.""" from __future__ import annotations -from typing import Any - import jwt import marshmallow from flask import current_app -from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db from flask_bpmn.models.db import SpiffworkflowBaseDBModel from marshmallow import Schema from sqlalchemy.orm import relationship -from sqlalchemy.orm import validates from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.services.authentication_service import ( - AuthenticationProviderTypes, -) class UserNotFoundError(Exception): @@ -28,15 +21,18 @@ class UserModel(SpiffworkflowBaseDBModel): __tablename__ = "user" __table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),) - id = db.Column(db.Integer, primary_key=True) - # server and service id must be unique, not username. - username = db.Column(db.String(255), nullable=False, unique=False) - uid = db.Column(db.String(50), unique=True) - service = db.Column(db.String(50), nullable=False, unique=False) + username = db.Column( + db.String(255), nullable=False, unique=True + ) # should always be a unique value + service = db.Column( + db.String(255), nullable=False, unique=False + ) # not 'openid' -- google, aws service_id = db.Column(db.String(255), nullable=False, unique=False) - name = db.Column(db.String(255)) + display_name = db.Column(db.String(255)) email = db.Column(db.String(255)) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") # type: ignore groups = relationship( # type: ignore @@ -47,21 +43,6 @@ class UserModel(SpiffworkflowBaseDBModel): ) principal = relationship("PrincipalModel", uselist=False) # type: ignore - @validates("service") - def validate_service(self, key: str, value: Any) -> str: - """Validate_service.""" - try: - ap_type = getattr(AuthenticationProviderTypes, value, None) - except Exception as e: - raise ValueError(f"invalid service type: {value}") from e - if ap_type is not None: - ap_value: str = ap_type.value - return ap_value - raise ApiError( - error_code="invalid_service", - message=f"Could not validate service with value: {value}", - ) - def encode_auth_token(self) -> str: """Generate the Auth Token. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py new file mode 100644 index 000000000..ac2747c85 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/models/user_group_assignment_waiting.py @@ -0,0 +1,34 @@ +"""UserGroupAssignment.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.models.group import GroupModel + + +class UserGroupAssignmentWaitingModel(SpiffworkflowBaseDBModel): + """When a user is assigned to a group, but that username does not exist. + + We cache it here to be applied in the event the user does log in to the system. + """ + + MATCH_ALL_USERS = "*" + __tablename__ = "user_group_assignment_waiting" + __table_args__ = ( + db.UniqueConstraint( + "username", "group_id", name="user_group_assignment_staged_unique" + ), + ) + + id = db.Column(db.Integer, primary_key=True) + username = db.Column(db.String(255), nullable=False) + group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) + + group = relationship("GroupModel", overlaps="groups,user_group_assignments_waiting,users") # type: ignore + + def is_match_all(self) -> bool: + """Is_match_all.""" + if self.username == self.MATCH_ALL_USERS: + return True + return False diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py index f1223ae0d..5cb0ae89b 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py @@ -141,7 +141,7 @@ def process_model_save(process_model_id: str, file_name: str) -> Union[str, Resp @admin_blueprint.route("/process-models//run", methods=["GET"]) def process_model_run(process_model_id: str) -> Union[str, Response]: """Process_model_run.""" - user = UserService.create_user("internal", "Mr. Test", username="Mr. Test") + user = UserService.create_user("Mr. Test", "internal", "Mr. Test") process_instance = ( ProcessInstanceService.create_process_instance_from_process_model_identifier( process_model_id, user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/health_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/health_controller.py new file mode 100644 index 000000000..e98311101 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/health_controller.py @@ -0,0 +1,13 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json + +import flask.wrappers +from flask.wrappers import Response + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + + +def status() -> flask.wrappers.Response: + """Status.""" + ProcessInstanceModel.query.filter().first() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py new file mode 100644 index 000000000..51290770f --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/messages_controller.py @@ -0,0 +1,176 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Any +from typing import Dict +from typing import Optional + +import flask.wrappers +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_process_instance_by_id_or_raise, +) +from spiffworkflow_backend.services.message_service import MessageService + + +def message_instance_list( + process_instance_id: Optional[int] = None, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Message_instance_list.""" + # to make sure the process instance exists + message_instances_query = MessageInstanceModel.query + + if process_instance_id: + message_instances_query = message_instances_query.filter_by( + process_instance_id=process_instance_id + ) + + message_instances = ( + message_instances_query.order_by( + MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore + MessageInstanceModel.id.desc(), # type: ignore + ) + .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) + .join(ProcessInstanceModel) + .add_columns( + MessageModel.identifier.label("message_identifier"), + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + for message_instance in message_instances: + message_correlations: dict = {} + for ( + mcmi + ) in ( + message_instance.MessageInstanceModel.message_correlations_message_instances + ): + mc = MessageCorrelationModel.query.filter_by( + id=mcmi.message_correlation_id + ).all() + for m in mc: + if m.name not in message_correlations: + message_correlations[m.name] = {} + message_correlations[m.name][ + m.message_correlation_property.identifier + ] = m.value + message_instance.MessageInstanceModel.message_correlations = ( + message_correlations + ) + + response_json = { + "results": message_instances.items, + "pagination": { + "count": len(message_instances.items), + "total": message_instances.total, + "pages": message_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +# body: { +# payload: dict, +# process_instance_id: Optional[int], +# } +def message_start( + message_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Message_start.""" + message_model = MessageModel.query.filter_by(identifier=message_identifier).first() + if message_model is None: + raise ( + ApiError( + error_code="unknown_message", + message=f"Could not find message with identifier: {message_identifier}", + status_code=404, + ) + ) + + if "payload" not in body: + raise ( + ApiError( + error_code="missing_payload", + message="Body is missing payload.", + status_code=400, + ) + ) + + process_instance = None + if "process_instance_id" in body: + # to make sure we have a valid process_instance_id + process_instance = _find_process_instance_by_id_or_raise( + body["process_instance_id"] + ) + + message_instance = MessageInstanceModel.query.filter_by( + process_instance_id=process_instance.id, + message_model_id=message_model.id, + message_type="receive", + status="ready", + ).first() + if message_instance is None: + raise ( + ApiError( + error_code="cannot_find_waiting_message", + message=( + "Could not find waiting message for identifier" + f" {message_identifier} and process instance" + f" {process_instance.id}" + ), + status_code=400, + ) + ) + MessageService.process_message_receive( + message_instance, message_model.name, body["payload"] + ) + + else: + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_model.id + ).first() + ) + + if message_triggerable_process_model is None: + raise ( + ApiError( + error_code="cannot_start_message", + message=( + "Message with identifier cannot be start with message:" + f" {message_identifier}" + ), + status_code=400, + ) + ) + + process_instance = MessageService.process_message_triggerable_process_model( + message_triggerable_process_model, + message_model.name, + body["payload"], + g.user, + ) + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py index f812ab034..f25100eed 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/openid_blueprint/openid_blueprint.py @@ -111,6 +111,7 @@ def token() -> dict: "iat": time.time(), "exp": time.time() + 86400, # Expire after a day. "sub": user_name, + "email": user_details["email"], "preferred_username": user_details.get("preferred_username", user_name), }, client_secret, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py index 7e583de9e..6dcd79f78 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -1,136 +1,54 @@ """APIs for dealing with process groups, process models, and process instances.""" import json -import random -import re -import string -import uuid from typing import Any from typing import Dict -from typing import Optional -from typing import TypedDict -from typing import Union -import connexion # type: ignore import flask.wrappers -import jinja2 -import werkzeug from flask import Blueprint from flask import current_app from flask import g from flask import jsonify from flask import make_response -from flask import redirect from flask import request from flask.wrappers import Response from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db -from lxml import etree # type: ignore -from lxml.builder import ElementMaker # type: ignore -from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from SpiffWorkflow.task import TaskState -from sqlalchemy import and_ -from sqlalchemy import asc -from sqlalchemy import desc -from sqlalchemy import func -from sqlalchemy.orm import aliased -from sqlalchemy.orm import selectinload from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel -from spiffworkflow_backend.models.file import FileSchema -from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel -from spiffworkflow_backend.models.message_instance import MessageInstanceModel -from spiffworkflow_backend.models.message_model import MessageModel -from spiffworkflow_backend.models.message_triggerable_process_model import ( - MessageTriggerableProcessModel, -) from spiffworkflow_backend.models.principal import PrincipalModel -from spiffworkflow_backend.models.process_group import ProcessGroup -from spiffworkflow_backend.models.process_group import ProcessGroupSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema -from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus -from spiffworkflow_backend.models.process_instance_metadata import ( - ProcessInstanceMetadataModel, -) -from spiffworkflow_backend.models.process_instance_report import ( - ProcessInstanceReportModel, +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceTaskDataCannotBeUpdatedError, ) from spiffworkflow_backend.models.process_model import ProcessModelInfo -from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema -from spiffworkflow_backend.models.secret_model import SecretModel -from spiffworkflow_backend.models.secret_model import SecretModelSchema from spiffworkflow_backend.models.spec_reference import SpecReferenceCache from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema -from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel -from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel -from spiffworkflow_backend.routes.user import verify_token from spiffworkflow_backend.services.authorization_service import AuthorizationService -from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService from spiffworkflow_backend.services.git_service import GitService -from spiffworkflow_backend.services.message_service import MessageService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportFilter, -) -from spiffworkflow_backend.services.process_instance_report_service import ( - ProcessInstanceReportService, -) -from spiffworkflow_backend.services.process_instance_service import ( - ProcessInstanceService, -) from spiffworkflow_backend.services.process_model_service import ProcessModelService -from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner -from spiffworkflow_backend.services.secret_service import SecretService -from spiffworkflow_backend.services.service_task_service import ServiceTaskService -from spiffworkflow_backend.services.spec_file_service import SpecFileService -from spiffworkflow_backend.services.user_service import UserService - - -class TaskDataSelectOption(TypedDict): - """TaskDataSelectOption.""" - - value: str - label: str - - -class ReactJsonSchemaSelectOption(TypedDict): - """ReactJsonSchemaSelectOption.""" - - type: str - title: str - enum: list[str] process_api_blueprint = Blueprint("process_api", __name__) -def status() -> flask.wrappers.Response: - """Status.""" - ProcessInstanceModel.query.filter().first() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.Response: """Permissions_check.""" if "requests_to_check" not in body: raise ( ApiError( error_code="could_not_requests_to_check", - message="The key 'requests_to_check' not found at root of request body.", + message=( + "The key 'requests_to_check' not found at root of request body." + ), status_code=400, ) ) - response_dict: dict[str, dict[str, bool]] = {} requests_to_check = body["requests_to_check"] @@ -153,274 +71,12 @@ def permissions_check(body: Dict[str, Dict[str, list[str]]]) -> flask.wrappers.R return make_response(jsonify({"results": response_dict}), 200) -def modify_process_model_id(process_model_id: str) -> str: - """Modify_process_model_id.""" - return process_model_id.replace("/", ":") - - -def un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: - """Un_modify_modified_process_model_id.""" - return modified_process_model_identifier.replace(":", "/") - - -def process_group_add(body: dict) -> flask.wrappers.Response: - """Add_process_group.""" - process_group = ProcessGroup(**body) - ProcessModelService.add_process_group(process_group) - return make_response(jsonify(process_group), 201) - - -def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: - """Process_group_delete.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - ProcessModelService().process_group_delete(process_group_id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_group_update( - modified_process_group_id: str, body: dict -) -> flask.wrappers.Response: - """Process Group Update.""" - body_include_list = ["display_name", "description"] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - process_group = ProcessGroup(id=process_group_id, **body_filtered) - ProcessModelService.update_process_group(process_group) - return make_response(jsonify(process_group), 200) - - -def process_group_list( - process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_group_list.""" - if process_group_identifier is not None: - process_groups = ProcessModelService.get_process_groups( - process_group_identifier - ) - else: - process_groups = ProcessModelService.get_process_groups() - batch = ProcessModelService().get_batch( - items=process_groups, page=page, per_page=per_page - ) - pages = len(process_groups) // per_page - remainder = len(process_groups) % per_page - if remainder > 0: - pages += 1 - - response_json = { - "results": ProcessGroupSchema(many=True).dump(batch), - "pagination": { - "count": len(batch), - "total": len(process_groups), - "pages": pages, - }, - } - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def process_group_show( - modified_process_group_id: str, -) -> Any: - """Process_group_show.""" - process_group_id = un_modify_modified_process_model_id(modified_process_group_id) - try: - process_group = ProcessModelService.get_process_group(process_group_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_group_cannot_be_found", - message=f"Process group cannot be found: {process_group_id}", - status_code=400, - ) - ) from exception - - process_group.parent_groups = ProcessModelService.get_parent_group_array( - process_group.id - ) - return make_response(jsonify(process_group), 200) - - -def process_group_move( - modified_process_group_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_group_move.""" - original_process_group_id = un_modify_modified_process_model_id( - modified_process_group_identifier - ) - new_process_group = ProcessModelService().process_group_move( - original_process_group_id, new_location - ) - return make_response(jsonify(new_process_group), 201) - - -def process_model_create( - modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Process_model_create.""" - body_include_list = [ - "id", - "display_name", - "primary_file_name", - "primary_process_id", - "description", - "metadata_extraction_paths", - ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - if modified_process_group_id is None: - raise ApiError( - error_code="process_group_id_not_specified", - message="Process Model could not be created when process_group_id path param is unspecified", - status_code=400, - ) - - unmodified_process_group_id = un_modify_modified_process_model_id( - modified_process_group_id - ) - process_group = ProcessModelService.get_process_group(unmodified_process_group_id) - if process_group is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body because Process Group could not be found: {body}", - status_code=400, - ) - - process_model_info = ProcessModelInfo(**body_filtered) # type: ignore - if process_model_info is None: - raise ApiError( - error_code="process_model_could_not_be_created", - message=f"Process Model could not be created from given body: {body}", - status_code=400, - ) - - ProcessModelService.add_process_model(process_model_info) - return Response( - json.dumps(ProcessModelInfoSchema().dump(process_model_info)), - status=201, - mimetype="application/json", - ) - - -def process_model_delete( - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_model_delete.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - ProcessModelService().process_model_delete(process_model_identifier) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_update( - modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] -) -> Any: - """Process_model_update.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - body_include_list = [ - "display_name", - "primary_file_name", - "primary_process_id", - "description", - "metadata_extraction_paths", - ] - body_filtered = { - include_item: body[include_item] - for include_item in body_include_list - if include_item in body - } - - process_model = get_process_model(process_model_identifier) - ProcessModelService.update_process_model(process_model, body_filtered) - return ProcessModelInfoSchema().dump(process_model) - - -def process_model_show(modified_process_model_identifier: str) -> Any: - """Process_model_show.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - files = sorted(SpecFileService.get_files(process_model)) - process_model.files = files - for file in process_model.files: - file.references = SpecFileService.get_references_for_file(file, process_model) - - process_model.parent_groups = ProcessModelService.get_parent_group_array( - process_model.id - ) - return make_response(jsonify(process_model), 200) - - -def process_model_move( - modified_process_model_identifier: str, new_location: str -) -> flask.wrappers.Response: - """Process_model_move.""" - original_process_model_id = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - new_process_model = ProcessModelService().process_model_move( - original_process_model_id, new_location - ) - return make_response(jsonify(new_process_model), 201) - - -def process_model_publish( - modified_process_model_identifier: str, branch_to_update: Optional[str] = None -) -> flask.wrappers.Response: - """Process_model_publish.""" - if branch_to_update is None: - branch_to_update = current_app.config["GIT_MERGE_BRANCH"] - process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - pr_url = GitService().publish(process_model_identifier, branch_to_update) - data = {"ok": True, "pr_url": pr_url} - return Response(json.dumps(data), status=200, mimetype="application/json") - - -def process_model_list( - process_group_identifier: Optional[str] = None, - recursive: Optional[bool] = False, - filter_runnable_by_user: Optional[bool] = False, - include_parent_groups: Optional[bool] = False, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process model list!""" - process_models = ProcessModelService.get_process_models( - process_group_id=process_group_identifier, - recursive=recursive, - filter_runnable_by_user=filter_runnable_by_user, - ) - process_models_to_return = ProcessModelService().get_batch( - process_models, page=page, per_page=per_page - ) - - if include_parent_groups: - for process_model in process_models_to_return: - process_model.parent_groups = ProcessModelService.get_parent_group_array( - process_model.id - ) - - pages = len(process_models) // per_page - remainder = len(process_models) % per_page - if remainder > 0: - pages += 1 - response_json = { - "results": process_models_to_return, - "pagination": { - "count": len(process_models_to_return), - "total": len(process_models), - "pages": pages, - }, - } - return make_response(jsonify(response_json), 200) +def user_group_list_for_current_user() -> flask.wrappers.Response: + """User_group_list_for_current_user.""" + groups = g.user.groups + # TODO: filter out the default group and have a way to know what is the default group + group_identifiers = [i.identifier for i in groups if i.identifier != "everybody"] + return make_response(jsonify(sorted(group_identifiers)), 200) def process_list() -> Any: @@ -433,377 +89,96 @@ def process_list() -> Any: return SpecReferenceSchema(many=True).dump(references) -def get_file(modified_process_model_identifier: str, file_name: str) -> Any: - """Get_file.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - files = SpecFileService.get_files(process_model, file_name) - if len(files) == 0: - raise ApiError( - error_code="unknown file", - message=f"No information exists for file {file_name}" - f" it does not exist in workflow {process_model_identifier}.", - status_code=404, - ) - - file = files[0] - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id - # file.process_group_id = process_model.process_group_id - return FileSchema().dump(file) - - -def process_model_file_update( - modified_process_model_identifier: str, file_name: str +def process_data_show( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, ) -> flask.wrappers.Response: - """Process_model_file_update.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) + """Process_data_show.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + processor = ProcessInstanceProcessor(process_instance) + all_process_data = processor.get_data() + process_data_value = None + if process_data_identifier in all_process_data: + process_data_value = all_process_data[process_data_identifier] - request_file = get_file_from_request() - request_file_contents = request_file.stream.read() - if not request_file_contents: - raise ApiError( - error_code="file_contents_empty", - message="Given request file does not have any content", - status_code=400, - ) - - SpecFileService.update_file(process_model, file_name, request_file_contents) - - if current_app.config["GIT_COMMIT_ON_SAVE"]: - git_output = GitService.commit( - message=f"User: {g.user.username} clicked save for {process_model_identifier}/{file_name}" - ) - current_app.logger.info(f"git output: {git_output}") - else: - current_app.logger.info("Git commit on save is disabled") - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_model_file_delete( - modified_process_model_identifier: str, file_name: str -) -> flask.wrappers.Response: - """Process_model_file_delete.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - try: - SpecFileService.delete_file(process_model, file_name) - except FileNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_file_cannot_be_found", - message=f"Process model file cannot be found: {file_name}", - status_code=400, - ) - ) from exception - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def add_file(modified_process_model_identifier: str) -> flask.wrappers.Response: - """Add_file.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_model = get_process_model(process_model_identifier) - request_file = get_file_from_request() - if not request_file.filename: - raise ApiError( - error_code="could_not_get_filename", - message="Could not get filename from request", - status_code=400, - ) - - file = SpecFileService.add_file( - process_model, request_file.filename, request_file.stream.read() + return make_response( + jsonify( + { + "process_data_identifier": process_data_identifier, + "process_data_value": process_data_value, + } + ), + 200, ) - file_contents = SpecFileService.get_data(process_model, file.name) - file.file_contents = file_contents - file.process_model_id = process_model.id + + +# sample body: +# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models", +# "full_name": "sartography/sample-process-models", "private": False .... }} +# test with: ngrok http 7000 +# where 7000 is the port the app is running on locally +def github_webhook_receive(body: Dict) -> Response: + """Github_webhook_receive.""" + auth_header = request.headers.get("X-Hub-Signature-256") + AuthorizationService.verify_sha256_token(auth_header) + result = GitService.handle_web_hook(body) return Response( - json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" + json.dumps({"git_pull": result}), status=200, mimetype="application/json" ) -def process_instance_create( +def task_data_update( + process_instance_id: str, modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = un_modify_modified_process_model_id( - modified_process_model_identifier - ) - process_instance = ( - ProcessInstanceService.create_process_instance_from_process_model_identifier( - process_model_identifier, g.user - ) - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=201, - mimetype="application/json", - ) - - -def process_instance_run( - modified_process_model_identifier: str, - process_instance_id: int, - do_engine_steps: bool = True, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - - if do_engine_steps: - try: - processor.do_engine_steps() - except ApiError as e: - ErrorHandlingService().handle_error(processor, e) - raise e - except Exception as e: - ErrorHandlingService().handle_error(processor, e) - task = processor.bpmn_process_instance.last_task - raise ApiError.from_task( - error_code="unknown_exception", - message=f"An unknown error occurred. Original error: {e}", - status_code=400, - task=task, - ) from e - processor.save() - - if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: - MessageService.process_message_instances() - - process_instance_api = ProcessInstanceService.processor_to_process_instance_api( - processor - ) - process_instance_data = processor.get_data() - process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) - process_instance_metadata["data"] = process_instance_data - return Response( - json.dumps(process_instance_metadata), status=200, mimetype="application/json" - ) - - -def process_instance_terminate( - process_instance_id: int, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_run.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.terminate() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_suspend( - process_instance_id: int, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_suspend.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.suspend() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_resume( - process_instance_id: int, - modified_process_model_identifier: str, -) -> flask.wrappers.Response: - """Process_instance_resume.""" - process_instance = ProcessInstanceService().get_process_instance( - process_instance_id - ) - processor = ProcessInstanceProcessor(process_instance) - processor.resume() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_log_list( - modified_process_model_identifier: str, - process_instance_id: int, - page: int = 1, - per_page: int = 100, - detailed: bool = False, -) -> flask.wrappers.Response: - """Process_instance_log_list.""" - # to make sure the process instance exists - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - log_query = SpiffLoggingModel.query.filter( - SpiffLoggingModel.process_instance_id == process_instance.id - ) - if not detailed: - log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore - - logs = ( - log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore - .join( - UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True - ) # isouter since if we don't have a user, we still want the log - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - response_json = { - "results": logs.items, - "pagination": { - "count": len(logs.items), - "total": logs.total, - "pages": logs.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def message_instance_list( - process_instance_id: Optional[int] = None, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Message_instance_list.""" - # to make sure the process instance exists - message_instances_query = MessageInstanceModel.query - - if process_instance_id: - message_instances_query = message_instances_query.filter_by( - process_instance_id=process_instance_id - ) - - message_instances = ( - message_instances_query.order_by( - MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore - MessageInstanceModel.id.desc(), # type: ignore - ) - .join(MessageModel, MessageModel.id == MessageInstanceModel.message_model_id) - .join(ProcessInstanceModel) - .add_columns( - MessageModel.identifier.label("message_identifier"), - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_model_display_name, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - for message_instance in message_instances: - message_correlations: dict = {} - for ( - mcmi - ) in ( - message_instance.MessageInstanceModel.message_correlations_message_instances - ): - mc = MessageCorrelationModel.query.filter_by( - id=mcmi.message_correlation_id - ).all() - for m in mc: - if m.name not in message_correlations: - message_correlations[m.name] = {} - message_correlations[m.name][ - m.message_correlation_property.identifier - ] = m.value - message_instance.MessageInstanceModel.message_correlations = ( - message_correlations - ) - - response_json = { - "results": message_instances.items, - "pagination": { - "count": len(message_instances.items), - "total": message_instances.total, - "pages": message_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -# body: { -# payload: dict, -# process_instance_id: Optional[int], -# } -def message_start( - message_identifier: str, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Message_start.""" - message_model = MessageModel.query.filter_by(identifier=message_identifier).first() - if message_model is None: - raise ( - ApiError( - error_code="unknown_message", - message=f"Could not find message with identifier: {message_identifier}", - status_code=404, + task_id: str, + body: Dict, +) -> Response: + """Update task data.""" + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + if process_instance.status != "suspended": + raise ProcessInstanceTaskDataCannotBeUpdatedError( + "The process instance needs to be suspended to udpate the task-data." + f" It is currently: {process_instance.status}" ) - ) - if "payload" not in body: - raise ( - ApiError( - error_code="missing_payload", - message="Body is missing payload.", - status_code=400, - ) - ) - - process_instance = None - if "process_instance_id" in body: - # to make sure we have a valid process_instance_id - process_instance = find_process_instance_by_id_or_raise( - body["process_instance_id"] - ) - - message_instance = MessageInstanceModel.query.filter_by( - process_instance_id=process_instance.id, - message_model_id=message_model.id, - message_type="receive", - status="ready", - ).first() - if message_instance is None: - raise ( - ApiError( - error_code="cannot_find_waiting_message", - message=f"Could not find waiting message for identifier {message_identifier} " - f"and process instance {process_instance.id}", - status_code=400, + process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) + if "new_task_data" in body: + new_task_data_str: str = body["new_task_data"] + new_task_data_dict = json.loads(new_task_data_str) + if task_id in process_instance_bpmn_json_dict["tasks"]: + process_instance_bpmn_json_dict["tasks"][task_id][ + "data" + ] = new_task_data_dict + process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) + db.session.add(process_instance) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="update_task_data_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + else: + raise ApiError( + error_code="update_task_data_error", + message=( + f"Could not find Task: {task_id} in Instance:" + f" {process_instance_id}." + ), ) - ) - MessageService.process_message_receive( - message_instance, message_model.name, body["payload"] - ) - else: - message_triggerable_process_model = ( - MessageTriggerableProcessModel.query.filter_by( - message_model_id=message_model.id - ).first() + raise ApiError( + error_code="update_task_data_error", + message=( + f"Could not update task data for Instance: {process_instance_id}, and" + f" Task: {task_id}." + ), ) - - if message_triggerable_process_model is None: - raise ( - ApiError( - error_code="cannot_start_message", - message=f"Message with identifier cannot be start with message: {message_identifier}", - status_code=400, - ) - ) - - process_instance = MessageService.process_message_triggerable_process_model( - message_triggerable_process_model, - message_model.name, - body["payload"], - g.user, - ) - return Response( json.dumps(ProcessInstanceModelSchema().dump(process_instance)), status=200, @@ -811,940 +186,91 @@ def message_start( ) -def process_instance_list( - process_model_identifier: Optional[str] = None, - page: int = 1, - per_page: int = 100, - start_from: Optional[int] = None, - start_to: Optional[int] = None, - end_from: Optional[int] = None, - end_to: Optional[int] = None, - process_status: Optional[str] = None, - initiated_by_me: Optional[bool] = None, - with_tasks_completed_by_me: Optional[bool] = None, - with_tasks_completed_by_my_group: Optional[bool] = None, - user_filter: Optional[bool] = False, - report_identifier: Optional[str] = None, - report_id: Optional[int] = None, -) -> flask.wrappers.Response: - """Process_instance_list.""" - process_instance_report = ProcessInstanceReportService.report_with_identifier( - g.user, report_id, report_identifier - ) +def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: + """Get_required_parameter_or_raise.""" + return_value = None + if parameter in post_body: + return_value = post_body[parameter] - if user_filter: - report_filter = ProcessInstanceReportFilter( - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status.split(",") if process_status else None, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, - ) - else: - report_filter = ( - ProcessInstanceReportService.filter_from_metadata_with_overrides( - process_instance_report, - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status, - initiated_by_me, - with_tasks_completed_by_me, - with_tasks_completed_by_my_group, - ) - ) - - process_instance_query = ProcessInstanceModel.query - # Always join that hot user table for good performance at serialization time. - process_instance_query = process_instance_query.options( - selectinload(ProcessInstanceModel.process_initiator) - ) - - if report_filter.process_model_identifier is not None: - process_model = get_process_model( - f"{report_filter.process_model_identifier}", - ) - - process_instance_query = process_instance_query.filter_by( - process_model_identifier=process_model.id - ) - - # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. - if ( - ProcessInstanceModel.start_in_seconds is None - or ProcessInstanceModel.end_in_seconds is None - ): + if return_value is None or return_value == "": raise ( ApiError( - error_code="unexpected_condition", - message="Something went very wrong", - status_code=500, + error_code="missing_required_parameter", + message=f"Parameter is missing from json request body: {parameter}", + status_code=400, ) ) - if report_filter.start_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds >= report_filter.start_from - ) - if report_filter.start_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.start_in_seconds <= report_filter.start_to - ) - if report_filter.end_from is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds >= report_filter.end_from - ) - if report_filter.end_to is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.end_in_seconds <= report_filter.end_to - ) - if report_filter.process_status is not None: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore - ) + return return_value - if report_filter.initiated_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.filter_by( - process_initiator=g.user - ) - # TODO: not sure if this is exactly what is wanted - if report_filter.with_tasks_completed_by_me is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - # process_instance_query = process_instance_query.join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - # process_instance_query = process_instance_query.add_columns(UserModel.username) - # search for process_instance.UserModel.username in this file for more details about why adding columns is annoying. - - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.process_initiator_id != g.user.id - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.filter( - SpiffStepDetailsModel.completed_by_user_id == g.user.id - ) - - if report_filter.with_tasks_completed_by_my_group is True: - process_instance_query = process_instance_query.filter( - ProcessInstanceModel.status.in_(["complete", "error", "terminated"]) # type: ignore - ) - process_instance_query = process_instance_query.join( - SpiffStepDetailsModel, - ProcessInstanceModel.id == SpiffStepDetailsModel.process_instance_id, - ) - process_instance_query = process_instance_query.join( - SpiffLoggingModel, - ProcessInstanceModel.id == SpiffLoggingModel.process_instance_id, - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.message.contains("COMPLETED") # type: ignore - ) - process_instance_query = process_instance_query.filter( - SpiffLoggingModel.spiff_step == SpiffStepDetailsModel.spiff_step - ) - process_instance_query = process_instance_query.join( - GroupModel, - GroupModel.id == SpiffStepDetailsModel.lane_assignment_id, - ) - process_instance_query = process_instance_query.join( - UserGroupAssignmentModel, - UserGroupAssignmentModel.group_id == GroupModel.id, - ) - process_instance_query = process_instance_query.filter( - UserGroupAssignmentModel.user_id == g.user.id - ) - - instance_metadata_aliases = {} - stock_columns = ProcessInstanceReportService.get_column_names_for_model( - ProcessInstanceModel - ) - for column in process_instance_report.report_metadata["columns"]: - if column["accessor"] in stock_columns: - continue - instance_metadata_alias = aliased(ProcessInstanceMetadataModel) - instance_metadata_aliases[column["accessor"]] = instance_metadata_alias - - filter_for_column = None - if "filter_by" in process_instance_report.report_metadata: - filter_for_column = next( - ( - f - for f in process_instance_report.report_metadata["filter_by"] - if f["field_name"] == column["accessor"] - ), - None, - ) - isouter = True - conditions = [ - ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, - instance_metadata_alias.key == column["accessor"], - ] - if filter_for_column: - isouter = False - conditions.append( - instance_metadata_alias.value == filter_for_column["field_value"] - ) - process_instance_query = process_instance_query.join( - instance_metadata_alias, and_(*conditions), isouter=isouter - ).add_columns(func.max(instance_metadata_alias.value).label(column["accessor"])) - - order_by_query_array = [] - order_by_array = process_instance_report.report_metadata["order_by"] - if len(order_by_array) < 1: - order_by_array = ProcessInstanceReportModel.default_order_by() - for order_by_option in order_by_array: - attribute = re.sub("^-", "", order_by_option) - if attribute in stock_columns: - if order_by_option.startswith("-"): - order_by_query_array.append( - getattr(ProcessInstanceModel, attribute).desc() - ) - else: - order_by_query_array.append( - getattr(ProcessInstanceModel, attribute).asc() - ) - elif attribute in instance_metadata_aliases: - if order_by_option.startswith("-"): - order_by_query_array.append( - instance_metadata_aliases[attribute].value.desc() - ) - else: - order_by_query_array.append( - instance_metadata_aliases[attribute].value.asc() - ) - - process_instances = ( - process_instance_query.group_by(ProcessInstanceModel.id) - .add_columns(ProcessInstanceModel.id) - .order_by(*order_by_query_array) - .paginate(page=page, per_page=per_page, error_out=False) - ) - - results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( - process_instances.items, process_instance_report.report_metadata["columns"] - ) - - response_json = { - "report": process_instance_report, - "results": results, - "filters": report_filter.to_dict(), - "pagination": { - "count": len(results), - "total": process_instances.total, - "pages": process_instances.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def process_instance_report_column_list() -> flask.wrappers.Response: - """Process_instance_report_column_list.""" - table_columns = ProcessInstanceReportService.builtin_column_options() - columns_for_metadata = ( - db.session.query(ProcessInstanceMetadataModel.key) - .order_by(ProcessInstanceMetadataModel.key) - .distinct() # type: ignore - .all() - ) - columns_for_metadata_strings = [ - {"Header": i[0], "accessor": i[0], "filterable": True} - for i in columns_for_metadata - ] - return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) - - -def process_instance_show( - modified_process_model_identifier: str, process_instance_id: int -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_model_identifier = modified_process_model_identifier.replace(":", "/") - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - current_version_control_revision = GitService.get_current_revision() - process_model = get_process_model(process_model_identifier) - - if process_model.primary_file_name: - if ( - process_instance.bpmn_version_control_identifier - == current_version_control_revision - ): - bpmn_xml_file_contents = SpecFileService.get_data( - process_model, process_model.primary_file_name - ) - else: - bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( - process_model, process_instance.bpmn_version_control_identifier - ) - process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents - - return make_response(jsonify(process_instance), 200) - - -def process_instance_delete( - process_instance_id: int, modified_process_model_identifier: str -) -> flask.wrappers.Response: - """Create_process_instance.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - # (Pdb) db.session.delete - # > - db.session.query(SpiffLoggingModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.query(SpiffStepDetailsModel).filter_by( - process_instance_id=process_instance.id - ).delete() - db.session.delete(process_instance) - db.session.commit() - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def process_instance_report_list( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Process_instance_report_list.""" - process_instance_reports = ProcessInstanceReportModel.query.filter_by( - created_by_id=g.user.id, - ).all() - - return make_response(jsonify(process_instance_reports), 200) - - -def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.create_report( - identifier=body["identifier"], - user=g.user, - report_metadata=body["report_metadata"], - ) - - return make_response(jsonify(process_instance_report), 201) - - -def process_instance_report_update( - report_id: int, - body: Dict[str, Any], -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - process_instance_report.report_metadata = body["report_metadata"] - db.session.commit() - - return make_response(jsonify(process_instance_report), 201) - - -def process_instance_report_delete( - report_id: int, -) -> flask.wrappers.Response: - """Process_instance_report_create.""" - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - db.session.delete(process_instance_report) - db.session.commit() - - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def service_task_list() -> flask.wrappers.Response: - """Service_task_list.""" - available_connectors = ServiceTaskService.available_connectors() - return Response( - json.dumps(available_connectors), status=200, mimetype="application/json" - ) - - -def authentication_list() -> flask.wrappers.Response: - """Authentication_list.""" - available_authentications = ServiceTaskService.authentication_list() - response_json = { - "results": available_authentications, - "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], - "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", - } - - return Response(json.dumps(response_json), status=200, mimetype="application/json") - - -def authentication_callback( - service: str, - auth_method: str, -) -> werkzeug.wrappers.Response: - """Authentication_callback.""" - verify_token(request.args.get("token"), force_run=True) - response = request.args["response"] - SecretService().update_secret( - f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True - ) - return redirect( - f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" - ) - - -def process_instance_report_show( - report_id: int, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Process_instance_report_show.""" - process_instances = ProcessInstanceModel.query.order_by( - ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore - ).paginate(page=page, per_page=per_page, error_out=False) - - process_instance_report = ProcessInstanceReportModel.query.filter_by( - id=report_id, - created_by_id=g.user.id, - ).first() - if process_instance_report is None: - raise ApiError( - error_code="unknown_process_instance_report", - message="Unknown process instance report", - status_code=404, - ) - - substitution_variables = request.args.to_dict() - result_dict = process_instance_report.generate_report( - process_instances.items, substitution_variables - ) - - # update this if we go back to a database query instead of filtering in memory - result_dict["pagination"] = { - "count": len(result_dict["results"]), - "total": len(result_dict["results"]), - "pages": 1, - } - - return Response(json.dumps(result_dict), status=200, mimetype="application/json") - - -# TODO: see comment for before_request -# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) -def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_my_tasks.""" - principal = find_principal_or_raise() - active_tasks = ( - ActiveTaskModel.query.order_by(desc(ActiveTaskModel.id)) # type: ignore - .join(ProcessInstanceModel) - .join(ActiveTaskUserModel) - .filter_by(user_id=principal.user_id) - # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. - .add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.process_model_display_name, - ProcessInstanceModel.status, - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.task_type, - ActiveTaskModel.task_status, - ActiveTaskModel.task_id, - ActiveTaskModel.id, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] - - response_json = { - "results": tasks, - "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, - }, - } - - return make_response(jsonify(response_json), 200) - - -def task_list_for_my_open_processes( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_my_open_processes.""" - return get_tasks(page=page, per_page=per_page) - - -def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: - """Task_list_for_processes_started_by_others.""" - return get_tasks( - processes_started_by_user=False, - has_lane_assignment_id=False, - page=page, - per_page=per_page, - ) - - -def task_list_for_my_groups( - page: int = 1, per_page: int = 100 -) -> flask.wrappers.Response: - """Task_list_for_processes_started_by_others.""" - return get_tasks(processes_started_by_user=False, page=page, per_page=per_page) - - -def get_tasks( - processes_started_by_user: bool = True, - has_lane_assignment_id: bool = True, - page: int = 1, - per_page: int = 100, -) -> flask.wrappers.Response: - """Get_tasks.""" - user_id = g.user.id - - # use distinct to ensure we only get one row per active task otherwise - # we can get back multiple for the same active task row which throws off - # pagination later on - # https://stackoverflow.com/q/34582014/6090676 - active_tasks_query = ( - ActiveTaskModel.query.distinct() - .outerjoin(GroupModel, GroupModel.id == ActiveTaskModel.lane_assignment_id) - .join(ProcessInstanceModel) - .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) - ) - - if processes_started_by_user: - active_tasks_query = active_tasks_query.filter( - ProcessInstanceModel.process_initiator_id == user_id - ).outerjoin( - ActiveTaskUserModel, - and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, - ), - ) - else: - active_tasks_query = active_tasks_query.filter( - ProcessInstanceModel.process_initiator_id != user_id - ).join( - ActiveTaskUserModel, - and_( - ActiveTaskUserModel.user_id == user_id, - ActiveTaskModel.id == ActiveTaskUserModel.active_task_id, - ), - ) - if has_lane_assignment_id: - active_tasks_query = active_tasks_query.filter( - ActiveTaskModel.lane_assignment_id.is_not(None) # type: ignore - ) - else: - active_tasks_query = active_tasks_query.filter(ActiveTaskModel.lane_assignment_id.is_(None)) # type: ignore - - active_tasks = active_tasks_query.add_columns( - ProcessInstanceModel.process_model_identifier, - ProcessInstanceModel.status.label("process_instance_status"), # type: ignore - ProcessInstanceModel.updated_at_in_seconds, - ProcessInstanceModel.created_at_in_seconds, - UserModel.username, - GroupModel.identifier.label("group_identifier"), - ActiveTaskModel.task_name, - ActiveTaskModel.task_title, - ActiveTaskModel.process_model_display_name, - ActiveTaskModel.process_instance_id, - ActiveTaskUserModel.user_id.label("current_user_is_potential_owner"), - ).paginate(page=page, per_page=per_page, error_out=False) - - response_json = { - "results": active_tasks.items, - "pagination": { - "count": len(active_tasks.items), - "total": active_tasks.total, - "pages": active_tasks.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def process_instance_task_list( +def send_bpmn_event( modified_process_model_identifier: str, - process_instance_id: int, - all_tasks: bool = False, - spiff_step: int = 0, -) -> flask.wrappers.Response: - """Process_instance_task_list.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if spiff_step > 0: - step_detail = ( - db.session.query(SpiffStepDetailsModel) - .filter( - SpiffStepDetailsModel.process_instance_id == process_instance.id, - SpiffStepDetailsModel.spiff_step == spiff_step, - ) - .first() - ) - if step_detail is not None and process_instance.bpmn_json is not None: - bpmn_json = json.loads(process_instance.bpmn_json) - bpmn_json["tasks"] = step_detail.task_json - process_instance.bpmn_json = json.dumps(bpmn_json) - - processor = ProcessInstanceProcessor(process_instance) - - spiff_tasks = None - if all_tasks: - spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) - else: - spiff_tasks = processor.get_all_user_tasks() - - tasks = [] - for spiff_task in spiff_tasks: - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - tasks.append(task) - - return make_response(jsonify(tasks), 200) - - -def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: - """Task_show.""" - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - if process_instance.status == ProcessInstanceStatus.suspended.value: - raise ApiError( - error_code="error_suspended", - message="The process instance is suspended", - status_code=400, - ) - - process_model = get_process_model( - process_instance.process_model_identifier, - ) - - form_schema_file_name = "" - form_ui_schema_file_name = "" - spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) - extensions = spiff_task.task_spec.extensions - - if "properties" in extensions: - properties = extensions["properties"] - if "formJsonSchemaFilename" in properties: - form_schema_file_name = properties["formJsonSchemaFilename"] - if "formUiSchemaFilename" in properties: - form_ui_schema_file_name = properties["formUiSchemaFilename"] - task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) - task.data = spiff_task.data - task.process_model_display_name = process_model.display_name - task.process_model_identifier = process_model.id - process_model_with_form = process_model - - if task.type == "User Task": - if not form_schema_file_name: - raise ( - ApiError( - error_code="missing_form_file", - message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", - status_code=400, - ) - ) - - form_contents = prepare_form_data( - form_schema_file_name, - task.data, - process_model_with_form, - ) - - try: - # form_contents is a str - form_dict = json.loads(form_contents) - except Exception as exception: - raise ( - ApiError( - error_code="error_loading_form", - message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", - status_code=400, - ) - ) from exception - - if task.data: - _update_form_schema_with_task_data_as_needed(form_dict, task.data) - - if form_contents: - task.form_schema = form_dict - - if form_ui_schema_file_name: - ui_form_contents = prepare_form_data( - form_ui_schema_file_name, - task.data, - process_model_with_form, - ) - if ui_form_contents: - task.form_ui_schema = ui_form_contents - - if task.properties and task.data and "instructionsForEndUser" in task.properties: - if task.properties["instructionsForEndUser"]: - task.properties["instructionsForEndUser"] = render_jinja_template( - task.properties["instructionsForEndUser"], task.data - ) - return make_response(jsonify(task), 200) - - -def task_submit( - process_instance_id: int, - task_id: str, - body: Dict[str, Any], - terminate_loop: bool = False, -) -> flask.wrappers.Response: - """Task_submit_user_data.""" - principal = find_principal_or_raise() - process_instance = find_process_instance_by_id_or_raise(process_instance_id) - - processor = ProcessInstanceProcessor(process_instance) - spiff_task = get_spiff_task_from_process_instance( - task_id, process_instance, processor=processor - ) - AuthorizationService.assert_user_can_complete_spiff_task( - process_instance.id, spiff_task, principal.user - ) - - if spiff_task.state != TaskState.READY: - raise ( - ApiError( - error_code="invalid_state", - message="You may not update a task unless it is in the READY state.", - status_code=400, - ) - ) - - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - active_task = ActiveTaskModel.query.filter_by( - process_instance_id=process_instance_id, task_id=task_id + process_instance_id: str, + body: Dict, +) -> Response: + """Send a bpmn event to a workflow.""" + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) ).first() - if active_task is None: - raise ( - ApiError( - error_code="no_active_task", - message="Cannot find an active task with task id '{task_id}' for process instance {process_instance_id}.", - status_code=500, - ) - ) - - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - active_task=active_task, - ) - - # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same - # task spec, complete that form as well. - # if update_all: - # last_index = spiff_task.task_info()["mi_index"] - # next_task = processor.next_task() - # while next_task and next_task.task_info()["mi_index"] > last_index: - # __update_task(processor, next_task, form_data, user) - # last_index = next_task.task_info()["mi_index"] - # next_task = processor.next_task() - - next_active_task_assigned_to_me = ( - ActiveTaskModel.query.filter_by(process_instance_id=process_instance_id) - .order_by(asc(ActiveTaskModel.id)) # type: ignore - .join(ActiveTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_active_task_assigned_to_me: - return make_response( - jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_create( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_create.""" - bpmn_task_identifier = _get_required_parameter_or_raise( - "bpmn_task_identifier", body - ) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - process_model_identifier = f"{process_group_id}/{process_model_id}" - process_model = get_process_model(process_model_identifier) - file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] - if file is None: - raise ApiError( - error_code="cannot_find_file", - message=f"Could not find the primary bpmn file for process_model: {process_model.id}", - status_code=404, - ) - - # TODO: move this to an xml service or something - file_contents = SpecFileService.get_data(process_model, file.name) - bpmn_etree_element = etree.fromstring(file_contents) - - nsmap = bpmn_etree_element.nsmap - spiff_element_maker = ElementMaker( - namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap - ) - - script_task_elements = bpmn_etree_element.xpath( - f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(script_task_elements) == 0: - raise ApiError( - error_code="missing_script_task", - message=f"Cannot find a script task with id: {bpmn_task_identifier}", - status_code=404, - ) - script_task_element = script_task_elements[0] - - extension_elements = None - extension_elements_array = script_task_element.xpath( - "//bpmn:extensionElements", - namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, - ) - if len(extension_elements_array) == 0: - bpmn_element_maker = ElementMaker( - namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap - ) - extension_elements = bpmn_element_maker("extensionElements") - script_task_element.append(extension_elements) + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.send_bpmn_event(body) else: - extension_elements = extension_elements_array[0] - - unit_test_elements = None - unit_test_elements_array = extension_elements.xpath( - "//spiffworkflow:unitTests", - namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, - ) - if len(unit_test_elements_array) == 0: - unit_test_elements = spiff_element_maker("unitTests") - extension_elements.append(unit_test_elements) - else: - unit_test_elements = unit_test_elements_array[0] - - fuzz = "".join( - random.choice(string.ascii_uppercase + string.digits) # noqa: S311 - for _ in range(7) - ) - unit_test_id = f"unit_test_{fuzz}" - - input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) - expected_output_json_element = spiff_element_maker( - "expectedOutputJson", json.dumps(expected_output_json) - ) - unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) - unit_test_element.append(input_json_element) - unit_test_element.append(expected_output_json_element) - unit_test_elements.append(unit_test_element) - SpecFileService.update_file( - process_model, file.name, etree.tostring(bpmn_etree_element) - ) - - return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") - - -def script_unit_test_run( - process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] -) -> flask.wrappers.Response: - """Script_unit_test_run.""" - # FIXME: We should probably clear this somewhere else but this works - current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None - current_app.config["THREAD_LOCAL_DATA"].spiff_step = None - - python_script = _get_required_parameter_or_raise("python_script", body) - input_json = _get_required_parameter_or_raise("input_json", body) - expected_output_json = _get_required_parameter_or_raise( - "expected_output_json", body - ) - - result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( - python_script, input_json, expected_output_json - ) - return make_response(jsonify(result), 200) - - -def get_file_from_request() -> Any: - """Get_file_from_request.""" - request_file = connexion.request.files.get("file") - if not request_file: raise ApiError( - error_code="no_file_given", - message="Given request does not contain a file", - status_code=400, + error_code="send_bpmn_event_error", + message=f"Could not send event to Instance: {process_instance_id}", ) - return request_file + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) -def get_process_model(process_model_id: str) -> ProcessModelInfo: - """Get_process_model.""" - process_model = None - try: - process_model = ProcessModelService.get_process_model(process_model_id) - except ProcessEntityNotFoundError as exception: - raise ( - ApiError( - error_code="process_model_cannot_be_found", - message=f"Process model cannot be found: {process_model_id}", - status_code=400, - ) - ) from exception - - return process_model - - -def find_principal_or_raise() -> PrincipalModel: - """Find_principal_or_raise.""" - principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() - if principal is None: - raise ( - ApiError( - error_code="principal_not_found", - message=f"Principal not found from user id: {g.user.id}", - status_code=400, - ) +def manual_complete_task( + modified_process_model_identifier: str, + process_instance_id: str, + task_id: str, + body: Dict, +) -> Response: + """Mark a task complete without executing it.""" + execute = body.get("execute", True) + process_instance = ProcessInstanceModel.query.filter( + ProcessInstanceModel.id == int(process_instance_id) + ).first() + if process_instance: + processor = ProcessInstanceProcessor(process_instance) + processor.manual_complete_task(task_id, execute) + else: + raise ApiError( + error_code="complete_task", + message=( + f"Could not complete Task {task_id} in Instance {process_instance_id}" + ), ) - return principal # type: ignore + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) -def find_process_instance_by_id_or_raise( +def _commit_and_push_to_git(message: str) -> None: + """Commit_and_push_to_git.""" + if current_app.config["GIT_COMMIT_ON_SAVE"]: + git_output = GitService.commit(message=message) + current_app.logger.info(f"git output: {git_output}") + else: + current_app.logger.info("Git commit on save is disabled") + + +def _un_modify_modified_process_model_id(modified_process_model_identifier: str) -> str: + """Un_modify_modified_process_model_id.""" + return modified_process_model_identifier.replace(":", "/") + + +def _find_process_instance_by_id_or_raise( process_instance_id: int, ) -> ProcessInstanceModel: """Find_process_instance_by_id_or_raise.""" @@ -1771,245 +297,34 @@ def find_process_instance_by_id_or_raise( return process_instance # type: ignore -def get_value_from_array_with_index(array: list, index: int) -> Any: - """Get_value_from_array_with_index.""" - if index < 0: - return None - - if index >= len(array): - return None - - return array[index] - - -def prepare_form_data( - form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo -) -> str: - """Prepare_form_data.""" - if task_data is None: - return "" - - file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") - return render_jinja_template(file_contents, task_data) - - -def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: - """Render_jinja_template.""" - jinja_environment = jinja2.Environment( - autoescape=True, lstrip_blocks=True, trim_blocks=True - ) - template = jinja_environment.from_string(unprocessed_template) - return template.render(**data) - - -def get_spiff_task_from_process_instance( - task_id: str, - process_instance: ProcessInstanceModel, - processor: Union[ProcessInstanceProcessor, None] = None, -) -> SpiffTask: - """Get_spiff_task_from_process_instance.""" - if processor is None: - processor = ProcessInstanceProcessor(process_instance) - task_uuid = uuid.UUID(task_id) - spiff_task = processor.bpmn_process_instance.get_task(task_uuid) - - if spiff_task is None: +# process_model_id uses forward slashes on all OSes +# this seems to return an object where process_model.id has backslashes on windows +def _get_process_model(process_model_id: str) -> ProcessModelInfo: + """Get_process_model.""" + process_model = None + try: + process_model = ProcessModelService.get_process_model(process_model_id) + except ProcessEntityNotFoundError as exception: raise ( ApiError( - error_code="empty_task", - message="Processor failed to obtain task.", - status_code=500, + error_code="process_model_cannot_be_found", + message=f"Process model cannot be found: {process_model_id}", + status_code=400, ) - ) - return spiff_task + ) from exception + + return process_model -# sample body: -# {'ref': 'refs/heads/main', 'repository': {'name': 'sample-process-models', -# 'full_name': 'sartography/sample-process-models', 'private': False .... }} -# test with: ngrok http 7000 -# where 7000 is the port the app is running on locally -def github_webhook_receive(body: dict) -> Response: - """Github_webhook_receive.""" - print(f"body: {body}") - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -# -# Methods for secrets CRUD - maybe move somewhere else: -# - - -def get_secret(key: str) -> Optional[str]: - """Get_secret.""" - return SecretService.get_secret(key) - - -def secret_list( - page: int = 1, - per_page: int = 100, -) -> Response: - """Secret_list.""" - secrets = ( - SecretModel.query.order_by(SecretModel.key) - .join(UserModel) - .add_columns( - UserModel.username, - ) - .paginate(page=page, per_page=per_page, error_out=False) - ) - response_json = { - "results": secrets.items, - "pagination": { - "count": len(secrets.items), - "total": secrets.total, - "pages": secrets.pages, - }, - } - return make_response(jsonify(response_json), 200) - - -def add_secret(body: Dict) -> Response: - """Add secret.""" - secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) - assert secret_model # noqa: S101 - return Response( - json.dumps(SecretModelSchema().dump(secret_model)), - status=201, - mimetype="application/json", - ) - - -def update_secret(key: str, body: dict) -> Response: - """Update secret.""" - SecretService().update_secret(key, body["value"], g.user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def delete_secret(key: str) -> Response: - """Delete secret.""" - current_user = UserService.current_user() - SecretService.delete_secret(key, current_user.id) - return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") - - -def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: - """Get_required_parameter_or_raise.""" - return_value = None - if parameter in post_body: - return_value = post_body[parameter] - - if return_value is None or return_value == "": +def _find_principal_or_raise() -> PrincipalModel: + """Find_principal_or_raise.""" + principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() + if principal is None: raise ( ApiError( - error_code="missing_required_parameter", - message=f"Parameter is missing from json request body: {parameter}", + error_code="principal_not_found", + message=f"Principal not found from user id: {g.user.id}", status_code=400, ) ) - - return return_value - - -# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches -def _update_form_schema_with_task_data_as_needed( - in_dict: dict, task_data: dict -) -> None: - """Update_nested.""" - for k, value in in_dict.items(): - if "anyOf" == k: - # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] - if isinstance(value, list): - if len(value) == 1: - first_element_in_value_list = value[0] - if isinstance(first_element_in_value_list, str): - if first_element_in_value_list.startswith( - "options_from_task_data_var:" - ): - task_data_var = first_element_in_value_list.replace( - "options_from_task_data_var:", "" - ) - - if task_data_var not in task_data: - raise ( - ApiError( - error_code="missing_task_data_var", - message=f"Task data is missing variable: {task_data_var}", - status_code=500, - ) - ) - - select_options_from_task_data = task_data.get(task_data_var) - if isinstance(select_options_from_task_data, list): - if all( - "value" in d and "label" in d - for d in select_options_from_task_data - ): - - def map_function( - task_data_select_option: TaskDataSelectOption, - ) -> ReactJsonSchemaSelectOption: - """Map_function.""" - return { - "type": "string", - "enum": [task_data_select_option["value"]], - "title": task_data_select_option["label"], - } - - options_for_react_json_schema_form = list( - map(map_function, select_options_from_task_data) - ) - - in_dict[k] = options_for_react_json_schema_form - elif isinstance(value, dict): - _update_form_schema_with_task_data_as_needed(value, task_data) - elif isinstance(value, list): - for o in value: - if isinstance(o, dict): - _update_form_schema_with_task_data_as_needed(o, task_data) - - -def update_task_data( - process_instance_id: str, - modified_process_model_identifier: str, - task_id: str, - body: Dict, -) -> Response: - """Update task data.""" - process_instance = ProcessInstanceModel.query.filter( - ProcessInstanceModel.id == int(process_instance_id) - ).first() - if process_instance: - process_instance_bpmn_json_dict = json.loads(process_instance.bpmn_json) - if "new_task_data" in body: - new_task_data_str: str = body["new_task_data"] - new_task_data_dict = json.loads(new_task_data_str) - if task_id in process_instance_bpmn_json_dict["tasks"]: - process_instance_bpmn_json_dict["tasks"][task_id][ - "data" - ] = new_task_data_dict - process_instance.bpmn_json = json.dumps(process_instance_bpmn_json_dict) - db.session.add(process_instance) - try: - db.session.commit() - except Exception as e: - db.session.rollback() - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update the Instance. Original error is {e}", - ) from e - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not find Task: {task_id} in Instance: {process_instance_id}.", - ) - else: - raise ApiError( - error_code="update_task_data_error", - message=f"Could not update task data for Instance: {process_instance_id}, and Task: {task_id}.", - ) - return Response( - json.dumps(ProcessInstanceModelSchema().dump(process_instance)), - status=200, - mimetype="application/json", - ) + return principal # type: ignore diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py new file mode 100644 index 000000000..228be1815 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_groups_controller.py @@ -0,0 +1,130 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Any +from typing import Optional + +import flask.wrappers +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git +from spiffworkflow_backend.routes.process_api_blueprint import ( + _un_modify_modified_process_model_id, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +def process_group_create(body: dict) -> flask.wrappers.Response: + """Add_process_group.""" + process_group = ProcessGroup(**body) + ProcessModelService.add_process_group(process_group) + _commit_and_push_to_git( + f"User: {g.user.username} added process group {process_group.id}" + ) + return make_response(jsonify(process_group), 201) + + +def process_group_delete(modified_process_group_id: str) -> flask.wrappers.Response: + """Process_group_delete.""" + process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) + ProcessModelService().process_group_delete(process_group_id) + _commit_and_push_to_git( + f"User: {g.user.username} deleted process group {process_group_id}" + ) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_group_update( + modified_process_group_id: str, body: dict +) -> flask.wrappers.Response: + """Process Group Update.""" + body_include_list = ["display_name", "description"] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) + process_group = ProcessGroup(id=process_group_id, **body_filtered) + ProcessModelService.update_process_group(process_group) + _commit_and_push_to_git( + f"User: {g.user.username} updated process group {process_group_id}" + ) + return make_response(jsonify(process_group), 200) + + +def process_group_list( + process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_group_list.""" + if process_group_identifier is not None: + process_groups = ProcessModelService.get_process_groups( + process_group_identifier + ) + else: + process_groups = ProcessModelService.get_process_groups() + batch = ProcessModelService().get_batch( + items=process_groups, page=page, per_page=per_page + ) + pages = len(process_groups) // per_page + remainder = len(process_groups) % per_page + if remainder > 0: + pages += 1 + + response_json = { + "results": ProcessGroupSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_groups), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_group_show( + modified_process_group_id: str, +) -> Any: + """Process_group_show.""" + process_group_id = _un_modify_modified_process_model_id(modified_process_group_id) + try: + process_group = ProcessModelService.get_process_group(process_group_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_group_cannot_be_found", + message=f"Process group cannot be found: {process_group_id}", + status_code=400, + ) + ) from exception + + process_group.parent_groups = ProcessModelService.get_parent_group_array( + process_group.id + ) + return make_response(jsonify(process_group), 200) + + +def process_group_move( + modified_process_group_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_group_move.""" + original_process_group_id = _un_modify_modified_process_model_id( + modified_process_group_identifier + ) + new_process_group = ProcessModelService().process_group_move( + original_process_group_id, new_location + ) + _commit_and_push_to_git( + f"User: {g.user.username} moved process group {original_process_group_id} to" + f" {new_process_group.id}" + ) + return make_response(jsonify(new_process_group), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py new file mode 100644 index 000000000..3f7da50c9 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_instances_controller.py @@ -0,0 +1,693 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Any +from typing import Dict +from typing import Optional + +import flask.wrappers +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask import request +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from SpiffWorkflow.task import TaskState # type: ignore +from sqlalchemy import and_ +from sqlalchemy import or_ + +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceCannotBeDeletedError, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.spec_reference import SpecReferenceCache +from spiffworkflow_backend.models.spec_reference import SpecReferenceNotFoundError +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel +from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_process_instance_by_id_or_raise, +) +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.routes.process_api_blueprint import ( + _un_modify_modified_process_model_id, +) +from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService +from spiffworkflow_backend.services.git_service import GitCommandError +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportFilter, +) +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportService, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +def process_instance_create( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_model_identifier = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) + process_instance = ( + ProcessInstanceService.create_process_instance_from_process_model_identifier( + process_model_identifier, g.user + ) + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=201, + mimetype="application/json", + ) + + +def process_instance_run( + modified_process_model_identifier: str, + process_instance_id: int, + do_engine_steps: bool = True, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + if process_instance.status != "not_started": + raise ApiError( + error_code="process_instance_not_runnable", + message=( + f"Process Instance ({process_instance.id}) is currently running or has" + " already run." + ), + status_code=400, + ) + + processor = ProcessInstanceProcessor(process_instance) + + if do_engine_steps: + try: + processor.do_engine_steps(save=True) + except ApiError as e: + ErrorHandlingService().handle_error(processor, e) + raise e + except Exception as e: + ErrorHandlingService().handle_error(processor, e) + task = processor.bpmn_process_instance.last_task + raise ApiError.from_task( + error_code="unknown_exception", + message=f"An unknown error occurred. Original error: {e}", + status_code=400, + task=task, + ) from e + + if not current_app.config["RUN_BACKGROUND_SCHEDULER"]: + MessageService.process_message_instances() + + process_instance_api = ProcessInstanceService.processor_to_process_instance_api( + processor + ) + process_instance_data = processor.get_data() + process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) + process_instance_metadata["data"] = process_instance_data + return Response( + json.dumps(process_instance_metadata), status=200, mimetype="application/json" + ) + + +def process_instance_terminate( + process_instance_id: int, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.terminate() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_suspend( + process_instance_id: int, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_suspend.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.suspend() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_resume( + process_instance_id: int, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_resume.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.resume() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_log_list( + modified_process_model_identifier: str, + process_instance_id: int, + page: int = 1, + per_page: int = 100, + detailed: bool = False, +) -> flask.wrappers.Response: + """Process_instance_log_list.""" + # to make sure the process instance exists + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + + log_query = SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + if not detailed: + log_query = log_query.filter(SpiffLoggingModel.message.in_(["State change to COMPLETED"])) # type: ignore + + logs = ( + log_query.order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + .join( + UserModel, UserModel.id == SpiffLoggingModel.current_user_id, isouter=True + ) # isouter since if we don't have a user, we still want the log + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": logs.items, + "pagination": { + "count": len(logs.items), + "total": logs.total, + "pages": logs.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def process_instance_list_for_me( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, + report_id: Optional[int] = None, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list_for_me.""" + return process_instance_list( + process_model_identifier=process_model_identifier, + page=page, + per_page=per_page, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + user_filter=user_filter, + report_identifier=report_identifier, + report_id=report_id, + user_group_identifier=user_group_identifier, + with_relation_to_me=True, + ) + + +def process_instance_list( + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_to: Optional[int] = None, + end_from: Optional[int] = None, + end_to: Optional[int] = None, + process_status: Optional[str] = None, + with_relation_to_me: Optional[bool] = None, + user_filter: Optional[bool] = False, + report_identifier: Optional[str] = None, + report_id: Optional[int] = None, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instance_report = ProcessInstanceReportService.report_with_identifier( + g.user, report_id, report_identifier + ) + + if user_filter: + report_filter = ProcessInstanceReportFilter( + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + with_relation_to_me=with_relation_to_me, + process_status=process_status.split(",") if process_status else None, + ) + else: + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model_identifier, + user_group_identifier=user_group_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, + with_relation_to_me=with_relation_to_me, + ) + ) + + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + page=page, + per_page=per_page, + user=g.user, + ) + + return make_response(jsonify(response_json), 200) + + +def process_instance_report_column_list() -> flask.wrappers.Response: + """Process_instance_report_column_list.""" + table_columns = ProcessInstanceReportService.builtin_column_options() + columns_for_metadata = ( + db.session.query(ProcessInstanceMetadataModel.key) + .order_by(ProcessInstanceMetadataModel.key) + .distinct() # type: ignore + .all() + ) + columns_for_metadata_strings = [ + {"Header": i[0], "accessor": i[0], "filterable": True} + for i in columns_for_metadata + ] + return make_response(jsonify(table_columns + columns_for_metadata_strings), 200) + + +def process_instance_show_for_me( + modified_process_model_identifier: str, + process_instance_id: int, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_show_for_me.""" + process_instance = _find_process_instance_for_me_or_raise(process_instance_id) + return _get_process_instance( + process_instance=process_instance, + modified_process_model_identifier=modified_process_model_identifier, + process_identifier=process_identifier, + ) + + +def process_instance_show( + modified_process_model_identifier: str, + process_instance_id: int, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + return _get_process_instance( + process_instance=process_instance, + modified_process_model_identifier=modified_process_model_identifier, + process_identifier=process_identifier, + ) + + +def process_instance_delete( + process_instance_id: int, modified_process_model_identifier: str +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + + if not process_instance.has_terminal_status(): + raise ProcessInstanceCannotBeDeletedError( + f"Process instance ({process_instance.id}) cannot be deleted since it does" + f" not have a terminal status. Current status is {process_instance.status}." + ) + + # (Pdb) db.session.delete + # > + db.session.query(SpiffLoggingModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.query(SpiffStepDetailsModel).filter_by( + process_instance_id=process_instance.id + ).delete() + db.session.delete(process_instance) + db.session.commit() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_list( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_instance_report_list.""" + process_instance_reports = ProcessInstanceReportModel.query.filter_by( + created_by_id=g.user.id, + ).all() + + return make_response(jsonify(process_instance_reports), 200) + + +def process_instance_report_create(body: Dict[str, Any]) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.create_report( + identifier=body["identifier"], + user=g.user, + report_metadata=body["report_metadata"], + ) + + return make_response(jsonify(process_instance_report), 201) + + +def process_instance_report_update( + report_id: int, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Process_instance_report_update.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + process_instance_report.report_metadata = body["report_metadata"] + db.session.commit() + + return make_response(jsonify(process_instance_report), 201) + + +def process_instance_report_delete( + report_id: int, +) -> flask.wrappers.Response: + """Process_instance_report_delete.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + db.session.delete(process_instance_report) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_show( + report_id: int, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_report_show.""" + process_instances = ProcessInstanceModel.query.order_by( + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ).paginate(page=page, per_page=per_page, error_out=False) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + id=report_id, + created_by_id=g.user.id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + substitution_variables = request.args.to_dict() + result_dict = process_instance_report.generate_report( + process_instances.items, substitution_variables + ) + + # update this if we go back to a database query instead of filtering in memory + result_dict["pagination"] = { + "count": len(result_dict["results"]), + "total": len(result_dict["results"]), + "pages": 1, + } + + return Response(json.dumps(result_dict), status=200, mimetype="application/json") + + +def process_instance_task_list_without_task_data_for_me( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_without_task_data_for_me.""" + process_instance = _find_process_instance_for_me_or_raise(process_instance_id) + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=False, + ) + + +def process_instance_task_list_without_task_data( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_without_task_data.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=False, + ) + + +def process_instance_task_list_with_task_data( + modified_process_model_identifier: str, + process_instance_id: int, + all_tasks: bool = False, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_task_list_with_task_data.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + return process_instance_task_list( + modified_process_model_identifier, + process_instance, + all_tasks, + spiff_step, + get_task_data=True, + ) + + +def process_instance_task_list( + _modified_process_model_identifier: str, + process_instance: ProcessInstanceModel, + all_tasks: bool = False, + spiff_step: int = 0, + get_task_data: bool = False, +) -> flask.wrappers.Response: + """Process_instance_task_list.""" + if spiff_step > 0: + step_detail = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.spiff_step == spiff_step, + ) + .first() + ) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json["tasks"] + bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] + process_instance.bpmn_json = json.dumps(bpmn_json) + + processor = ProcessInstanceProcessor(process_instance) + + spiff_tasks = None + if all_tasks: + spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + else: + spiff_tasks = processor.get_all_user_tasks() + + tasks = [] + for spiff_task in spiff_tasks: + task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task) + if get_task_data: + task.data = spiff_task.data + tasks.append(task) + + return make_response(jsonify(tasks), 200) + + +def process_instance_reset( + process_instance_id: int, + modified_process_model_identifier: str, + spiff_step: int = 0, +) -> flask.wrappers.Response: + """Process_instance_reset.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + step_detail = ( + db.session.query(SpiffStepDetailsModel) + .filter( + SpiffStepDetailsModel.process_instance_id == process_instance.id, + SpiffStepDetailsModel.spiff_step == spiff_step, + ) + .first() + ) + if step_detail is not None and process_instance.bpmn_json is not None: + bpmn_json = json.loads(process_instance.bpmn_json) + bpmn_json["tasks"] = step_detail.task_json["tasks"] + bpmn_json["subprocesses"] = step_detail.task_json["subprocesses"] + process_instance.bpmn_json = json.dumps(bpmn_json) + + db.session.add(process_instance) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="reset_process_instance_error", + message=f"Could not update the Instance. Original error is {e}", + ) from e + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def _get_process_instance( + modified_process_model_identifier: str, + process_instance: ProcessInstanceModel, + process_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """_get_process_instance.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + try: + current_version_control_revision = GitService.get_current_revision() + except GitCommandError: + current_version_control_revision = "" + + process_model_with_diagram = None + name_of_file_with_diagram = None + if process_identifier: + spec_reference = SpecReferenceCache.query.filter_by( + identifier=process_identifier, type="process" + ).first() + if spec_reference is None: + raise SpecReferenceNotFoundError( + "Could not find given process identifier in the cache:" + f" {process_identifier}" + ) + + process_model_with_diagram = ProcessModelService.get_process_model( + spec_reference.process_model_id + ) + name_of_file_with_diagram = spec_reference.file_name + else: + process_model_with_diagram = _get_process_model(process_model_identifier) + if process_model_with_diagram.primary_file_name: + name_of_file_with_diagram = process_model_with_diagram.primary_file_name + + if process_model_with_diagram and name_of_file_with_diagram: + if ( + process_instance.bpmn_version_control_identifier + == current_version_control_revision + ): + bpmn_xml_file_contents = SpecFileService.get_data( + process_model_with_diagram, name_of_file_with_diagram + ).decode("utf-8") + else: + bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( + process_model_with_diagram, + process_instance.bpmn_version_control_identifier, + file_name=name_of_file_with_diagram, + ) + process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents + + return make_response(jsonify(process_instance), 200) + + +def _find_process_instance_for_me_or_raise( + process_instance_id: int, +) -> ProcessInstanceModel: + """_find_process_instance_for_me_or_raise.""" + process_instance: ProcessInstanceModel = ( + ProcessInstanceModel.query.filter_by(id=process_instance_id) + .outerjoin(HumanTaskModel) + .outerjoin( + HumanTaskUserModel, + and_( + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + HumanTaskUserModel.user_id == g.user.id, + ), + ) + .filter( + or_( + HumanTaskUserModel.id.is_not(None), + ProcessInstanceModel.process_initiator_id == g.user.id, + ) + ) + .first() + ) + + if process_instance is None: + raise ( + ApiError( + error_code="process_instance_cannot_be_found", + message=( + f"Process instance with id {process_instance_id} cannot be found" + " that is associated with you." + ), + status_code=400, + ) + ) + + return process_instance diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py new file mode 100644 index 000000000..0f877ce73 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/process_models_controller.py @@ -0,0 +1,481 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import os +import re +from typing import Any +from typing import Dict +from typing import Optional +from typing import Union + +import connexion # type: ignore +import flask.wrappers +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.models.file import FileSchema +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.routes.process_api_blueprint import _commit_and_push_to_git +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.routes.process_api_blueprint import ( + _un_modify_modified_process_model_id, +) +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.git_service import MissingGitConfigsError +from spiffworkflow_backend.services.process_instance_report_service import ( + ProcessInstanceReportService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +def process_model_create( + modified_process_group_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Process_model_create.""" + body_include_list = [ + "id", + "display_name", + "primary_file_name", + "primary_process_id", + "description", + "metadata_extraction_paths", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + _get_process_group_from_modified_identifier(modified_process_group_id) + + process_model_info = ProcessModelInfo(**body_filtered) # type: ignore + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + ProcessModelService.add_process_model(process_model_info) + _commit_and_push_to_git( + f"User: {g.user.username} created process model {process_model_info.id}" + ) + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def process_model_delete( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_model_delete.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + ProcessModelService().process_model_delete(process_model_identifier) + _commit_and_push_to_git( + f"User: {g.user.username} deleted process model {process_model_identifier}" + ) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_update( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> Any: + """Process_model_update.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + body_include_list = [ + "display_name", + "primary_file_name", + "primary_process_id", + "description", + "metadata_extraction_paths", + ] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_model = _get_process_model(process_model_identifier) + ProcessModelService.update_process_model(process_model, body_filtered) + _commit_and_push_to_git( + f"User: {g.user.username} updated process model {process_model_identifier}" + ) + return ProcessModelInfoSchema().dump(process_model) + + +def process_model_show(modified_process_model_identifier: str) -> Any: + """Process_model_show.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + files = sorted( + SpecFileService.get_files(process_model), + key=lambda f: "" if f.name == process_model.primary_file_name else f.sort_index, + ) + process_model.files = files + for file in process_model.files: + file.references = SpecFileService.get_references_for_file(file, process_model) + + process_model.parent_groups = ProcessModelService.get_parent_group_array( + process_model.id + ) + return make_response(jsonify(process_model), 200) + + +def process_model_move( + modified_process_model_identifier: str, new_location: str +) -> flask.wrappers.Response: + """Process_model_move.""" + original_process_model_id = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) + new_process_model = ProcessModelService().process_model_move( + original_process_model_id, new_location + ) + _commit_and_push_to_git( + f"User: {g.user.username} moved process model {original_process_model_id} to" + f" {new_process_model.id}" + ) + return make_response(jsonify(new_process_model), 200) + + +def process_model_publish( + modified_process_model_identifier: str, branch_to_update: Optional[str] = None +) -> flask.wrappers.Response: + """Process_model_publish.""" + if branch_to_update is None: + branch_to_update = current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] + if branch_to_update is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH_TO_PUBLISH_TO. " + "This is required for publishing process models" + ) + process_model_identifier = _un_modify_modified_process_model_id( + modified_process_model_identifier + ) + pr_url = GitService().publish(process_model_identifier, branch_to_update) + data = {"ok": True, "pr_url": pr_url} + return Response(json.dumps(data), status=200, mimetype="application/json") + + +def process_model_list( + process_group_identifier: Optional[str] = None, + recursive: Optional[bool] = False, + filter_runnable_by_user: Optional[bool] = False, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process model list!""" + process_models = ProcessModelService.get_process_models( + process_group_id=process_group_identifier, + recursive=recursive, + filter_runnable_by_user=filter_runnable_by_user, + ) + batch = ProcessModelService().get_batch( + process_models, page=page, per_page=per_page + ) + pages = len(process_models) // per_page + remainder = len(process_models) % per_page + if remainder > 0: + pages += 1 + response_json = { + "results": ProcessModelInfoSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_models), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_model_file_update( + modified_process_model_identifier: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_update.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + + request_file = _get_file_from_request() + request_file_contents = request_file.stream.read() + if not request_file_contents: + raise ApiError( + error_code="file_contents_empty", + message="Given request file does not have any content", + status_code=400, + ) + + SpecFileService.update_file(process_model, file_name, request_file_contents) + _commit_and_push_to_git( + f"User: {g.user.username} clicked save for" + f" {process_model_identifier}/{file_name}" + ) + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_delete( + modified_process_model_identifier: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_delete.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + try: + SpecFileService.delete_file(process_model, file_name) + except FileNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_file_cannot_be_found", + message=f"Process model file cannot be found: {file_name}", + status_code=400, + ) + ) from exception + + _commit_and_push_to_git( + f"User: {g.user.username} deleted process model file" + f" {process_model_identifier}/{file_name}" + ) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_create( + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_model_file_create.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + request_file = _get_file_from_request() + if not request_file.filename: + raise ApiError( + error_code="could_not_get_filename", + message="Could not get filename from request", + status_code=400, + ) + + file = SpecFileService.add_file( + process_model, request_file.filename, request_file.stream.read() + ) + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + _commit_and_push_to_git( + f"User: {g.user.username} added process model file" + f" {process_model_identifier}/{file.name}" + ) + return Response( + json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" + ) + + +def process_model_file_show( + modified_process_model_identifier: str, file_name: str +) -> Any: + """Process_model_file_show.""" + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + files = SpecFileService.get_files(process_model, file_name) + if len(files) == 0: + raise ApiError( + error_code="unknown file", + message=( + f"No information exists for file {file_name}" + f" it does not exist in workflow {process_model_identifier}." + ), + status_code=404, + ) + + file = files[0] + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + return FileSchema().dump(file) + + +# { +# "natural_language_text": "Create a bug tracker process model \ +# with a bug-details form that collects summary, description, and priority" +# } +def process_model_create_with_natural_language( + modified_process_group_id: str, body: Dict[str, str] +) -> flask.wrappers.Response: + """Process_model_create_with_natural_language.""" + pattern = re.compile( + r"Create a (?P.*?) process model with a (?P.*?) form that" + r" collects (?P.*)" + ) + match = pattern.match(body["natural_language_text"]) + if match is None: + raise ApiError( + error_code="natural_language_text_not_yet_supported", + message=( + "Natural language text is not yet supported. Please use the form:" + f" {pattern.pattern}" + ), + status_code=400, + ) + process_model_display_name = match.group("pm_name") + process_model_identifier = re.sub(r"[ _]", "-", process_model_display_name) + process_model_identifier = re.sub(r"-{2,}", "-", process_model_identifier).lower() + + form_name = match.group("form_name") + form_identifier = re.sub(r"[ _]", "-", form_name) + form_identifier = re.sub(r"-{2,}", "-", form_identifier).lower() + + column_names = match.group("columns") + columns = re.sub(r"(, (and )?)", ",", column_names).split(",") + + process_group = _get_process_group_from_modified_identifier( + modified_process_group_id + ) + qualified_process_model_identifier = ( + f"{process_group.id}/{process_model_identifier}" + ) + + metadata_extraction_paths = [] + for column in columns: + metadata_extraction_paths.append({"key": column, "path": column}) + + process_model_attributes = { + "id": qualified_process_model_identifier, + "display_name": process_model_display_name, + "description": None, + "metadata_extraction_paths": metadata_extraction_paths, + } + + process_model_info = ProcessModelInfo(**process_model_attributes) # type: ignore + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + bpmn_template_file = os.path.join( + current_app.root_path, "templates", "basic_with_user_task_template.bpmn" + ) + if not os.path.exists(bpmn_template_file): + raise ApiError( + error_code="bpmn_template_file_does_not_exist", + message="Could not find the bpmn template file to create process model.", + status_code=500, + ) + + ProcessModelService.add_process_model(process_model_info) + bpmn_process_identifier = f"{process_model_identifier}_process" + bpmn_template_contents = "" + with open(bpmn_template_file, encoding="utf-8") as f: + bpmn_template_contents = f.read() + + bpmn_template_contents = bpmn_template_contents.replace( + "natural_language_process_id_template", bpmn_process_identifier + ) + bpmn_template_contents = bpmn_template_contents.replace( + "form-identifier-id-template", form_identifier + ) + + form_uischema_json: dict = {"ui:order": columns} + + form_properties: dict = {} + for column in columns: + form_properties[column] = { + "type": "string", + "title": column, + } + form_schema_json = { + "title": form_identifier, + "description": "", + "properties": form_properties, + "required": [], + } + + SpecFileService.add_file( + process_model_info, + f"{process_model_identifier}.bpmn", + str.encode(bpmn_template_contents), + ) + SpecFileService.add_file( + process_model_info, + f"{form_identifier}-schema.json", + str.encode(json.dumps(form_schema_json)), + ) + SpecFileService.add_file( + process_model_info, + f"{form_identifier}-uischema.json", + str.encode(json.dumps(form_uischema_json)), + ) + + _commit_and_push_to_git( + f"User: {g.user.username} created process model via natural language:" + f" {process_model_info.id}" + ) + + default_report_metadata = ProcessInstanceReportService.system_metadata_map( + "default" + ) + for column in columns: + default_report_metadata["columns"].append( + {"Header": column, "accessor": column, "filterable": True} + ) + ProcessInstanceReportModel.create_report( + identifier=process_model_identifier, + user=g.user, + report_metadata=default_report_metadata, + ) + + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def _get_file_from_request() -> Any: + """Get_file_from_request.""" + request_file = connexion.request.files.get("file") + if not request_file: + raise ApiError( + error_code="no_file_given", + message="Given request does not contain a file", + status_code=400, + ) + return request_file + + +def _get_process_group_from_modified_identifier( + modified_process_group_id: str, +) -> ProcessGroup: + """_get_process_group_from_modified_identifier.""" + if modified_process_group_id is None: + raise ApiError( + error_code="process_group_id_not_specified", + message=( + "Process Model could not be created when process_group_id path param is" + " unspecified" + ), + status_code=400, + ) + + unmodified_process_group_id = _un_modify_modified_process_model_id( + modified_process_group_id + ) + process_group = ProcessModelService.get_process_group(unmodified_process_group_id) + if process_group is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=( + "Process Model could not be created from given body because Process" + f" Group could not be found: {unmodified_process_group_id}" + ), + status_code=400, + ) + return process_group diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py new file mode 100644 index 000000000..e97b26ae6 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/script_unit_tests_controller.py @@ -0,0 +1,134 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import random +import string +from typing import Dict +from typing import Union + +import flask.wrappers +from flask import current_app +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from lxml import etree # type: ignore +from lxml.builder import ElementMaker # type: ignore + +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.routes.process_api_blueprint import ( + _get_required_parameter_or_raise, +) +from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +def script_unit_test_create( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_create.""" + bpmn_task_identifier = _get_required_parameter_or_raise( + "bpmn_task_identifier", body + ) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + process_model_identifier = modified_process_model_identifier.replace(":", "/") + process_model = _get_process_model(process_model_identifier) + file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] + if file is None: + raise ApiError( + error_code="cannot_find_file", + message=( + "Could not find the primary bpmn file for process_model:" + f" {process_model.id}" + ), + status_code=404, + ) + + # TODO: move this to an xml service or something + file_contents = SpecFileService.get_data(process_model, file.name) + bpmn_etree_element = etree.fromstring(file_contents) + + nsmap = bpmn_etree_element.nsmap + spiff_element_maker = ElementMaker( + namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap + ) + + script_task_elements = bpmn_etree_element.xpath( + f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(script_task_elements) == 0: + raise ApiError( + error_code="missing_script_task", + message=f"Cannot find a script task with id: {bpmn_task_identifier}", + status_code=404, + ) + script_task_element = script_task_elements[0] + + extension_elements = None + extension_elements_array = script_task_element.xpath( + ".//bpmn:extensionElements", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(extension_elements_array) == 0: + bpmn_element_maker = ElementMaker( + namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap + ) + extension_elements = bpmn_element_maker("extensionElements") + script_task_element.append(extension_elements) + else: + extension_elements = extension_elements_array[0] + + unit_test_elements = None + unit_test_elements_array = extension_elements.xpath( + "//spiffworkflow:unitTests", + namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, + ) + if len(unit_test_elements_array) == 0: + unit_test_elements = spiff_element_maker("unitTests") + extension_elements.append(unit_test_elements) + else: + unit_test_elements = unit_test_elements_array[0] + + fuzz = "".join( + random.choice(string.ascii_uppercase + string.digits) # noqa: S311 + for _ in range(7) + ) + unit_test_id = f"unit_test_{fuzz}" + + input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) + expected_output_json_element = spiff_element_maker( + "expectedOutputJson", json.dumps(expected_output_json) + ) + unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) + unit_test_element.append(input_json_element) + unit_test_element.append(expected_output_json_element) + unit_test_elements.append(unit_test_element) + SpecFileService.update_file( + process_model, file.name, etree.tostring(bpmn_etree_element) + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_run( + modified_process_model_identifier: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_run.""" + # FIXME: We should probably clear this somewhere else but this works + current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None + current_app.config["THREAD_LOCAL_DATA"].spiff_step = None + + python_script = _get_required_parameter_or_raise("python_script", body) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + python_script, input_json, expected_output_json + ) + return make_response(jsonify(result), 200) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/secrets_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/secrets_controller.py new file mode 100644 index 000000000..fdf4c7fae --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/secrets_controller.py @@ -0,0 +1,67 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +from typing import Dict +from typing import Optional + +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response + +from spiffworkflow_backend.models.secret_model import SecretModel +from spiffworkflow_backend.models.secret_model import SecretModelSchema +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.user_service import UserService + + +def secret_show(key: str) -> Optional[str]: + """Secret_show.""" + return SecretService.get_secret(key) + + +def secret_list( + page: int = 1, + per_page: int = 100, +) -> Response: + """Secret_list.""" + secrets = ( + SecretModel.query.order_by(SecretModel.key) + .join(UserModel) + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + response_json = { + "results": secrets.items, + "pagination": { + "count": len(secrets.items), + "total": secrets.total, + "pages": secrets.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def secret_create(body: Dict) -> Response: + """Add secret.""" + secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) + return Response( + json.dumps(SecretModelSchema().dump(secret_model)), + status=201, + mimetype="application/json", + ) + + +def secret_update(key: str, body: dict) -> Response: + """Update secret.""" + SecretService().update_secret(key, body["value"], g.user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def secret_delete(key: str) -> Response: + """Delete secret.""" + current_user = UserService.current_user() + SecretService.delete_secret(key, current_user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py new file mode 100644 index 000000000..a1708ce8d --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/service_tasks_controller.py @@ -0,0 +1,49 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json + +import flask.wrappers +import werkzeug +from flask import current_app +from flask import g +from flask import redirect +from flask import request +from flask.wrappers import Response + +from spiffworkflow_backend.routes.user import verify_token +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.service_task_service import ServiceTaskService + + +def service_task_list() -> flask.wrappers.Response: + """Service_task_list.""" + available_connectors = ServiceTaskService.available_connectors() + return Response( + json.dumps(available_connectors), status=200, mimetype="application/json" + ) + + +def authentication_list() -> flask.wrappers.Response: + """Authentication_list.""" + available_authentications = ServiceTaskService.authentication_list() + response_json = { + "results": available_authentications, + "connector_proxy_base_url": current_app.config["CONNECTOR_PROXY_URL"], + "redirect_url": f"{current_app.config['SPIFFWORKFLOW_BACKEND_URL']}/v1.0/authentication_callback", + } + + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def authentication_callback( + service: str, + auth_method: str, +) -> werkzeug.wrappers.Response: + """Authentication_callback.""" + verify_token(request.args.get("token"), force_run=True) + response = request.args["response"] + SecretService().update_secret( + f"{service}/{auth_method}", response, g.user.id, create_if_not_exists=True + ) + return redirect( + f"{current_app.config['SPIFFWORKFLOW_FRONTEND_URL']}/admin/configuration" + ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py new file mode 100644 index 000000000..2c7ceeeaa --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -0,0 +1,563 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import os +import uuid +from typing import Any +from typing import Dict +from typing import Optional +from typing import TypedDict +from typing import Union + +import flask.wrappers +import jinja2 +from flask import g +from flask import jsonify +from flask import make_response +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState +from sqlalchemy import and_ +from sqlalchemy import asc +from sqlalchemy import desc +from sqlalchemy import func +from sqlalchemy.orm import aliased + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_principal_or_raise, +) +from spiffworkflow_backend.routes.process_api_blueprint import ( + _find_process_instance_by_id_or_raise, +) +from spiffworkflow_backend.routes.process_api_blueprint import _get_process_model +from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +class TaskDataSelectOption(TypedDict): + """TaskDataSelectOption.""" + + value: str + label: str + + +class ReactJsonSchemaSelectOption(TypedDict): + """ReactJsonSchemaSelectOption.""" + + type: str + title: str + enum: list[str] + + +# TODO: see comment for before_request +# @process_api_blueprint.route("/v1.0/tasks", methods=["GET"]) +def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_my_tasks.""" + principal = _find_principal_or_raise() + human_tasks = ( + HumanTaskModel.query.order_by(desc(HumanTaskModel.id)) # type: ignore + .join(ProcessInstanceModel) + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .filter(HumanTaskModel.completed == False) # noqa: E712 + # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. + .add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_model_display_name, + ProcessInstanceModel.status, + HumanTaskModel.task_name, + HumanTaskModel.task_title, + HumanTaskModel.task_type, + HumanTaskModel.task_status, + HumanTaskModel.task_id, + HumanTaskModel.id, + HumanTaskModel.process_model_display_name, + HumanTaskModel.process_instance_id, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + tasks = [HumanTaskModel.to_task(human_task) for human_task in human_tasks.items] + + response_json = { + "results": tasks, + "pagination": { + "count": len(human_tasks.items), + "total": human_tasks.total, + "pages": human_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def task_list_for_my_open_processes( + page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_my_open_processes.""" + return _get_tasks(page=page, per_page=per_page) + + +def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_for_me.""" + return _get_tasks( + processes_started_by_user=False, + has_lane_assignment_id=False, + page=page, + per_page=per_page, + ) + + +def task_list_for_my_groups( + user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Task_list_for_my_groups.""" + return _get_tasks( + user_group_identifier=user_group_identifier, + processes_started_by_user=False, + page=page, + per_page=per_page, + ) + + +def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: + """Task_show.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + + if process_instance.status == ProcessInstanceStatus.suspended.value: + raise ApiError( + error_code="error_suspended", + message="The process instance is suspended", + status_code=400, + ) + + process_model = _get_process_model( + process_instance.process_model_identifier, + ) + + human_task = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id + ).first() + if human_task is None: + raise ( + ApiError( + error_code="no_human_task", + message=( + f"Cannot find a task to complete for task id '{task_id}' and" + f" process instance {process_instance_id}." + ), + status_code=500, + ) + ) + + form_schema_file_name = "" + form_ui_schema_file_name = "" + spiff_task = _get_spiff_task_from_process_instance(task_id, process_instance) + extensions = spiff_task.task_spec.extensions + + if "properties" in extensions: + properties = extensions["properties"] + if "formJsonSchemaFilename" in properties: + form_schema_file_name = properties["formJsonSchemaFilename"] + if "formUiSchemaFilename" in properties: + form_ui_schema_file_name = properties["formUiSchemaFilename"] + processor = ProcessInstanceProcessor(process_instance) + task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task) + task.data = spiff_task.data + task.process_model_display_name = process_model.display_name + task.process_model_identifier = process_model.id + + process_model_with_form = process_model + refs = SpecFileService.get_references_for_process(process_model_with_form) + all_processes = [i.identifier for i in refs] + if task.process_identifier not in all_processes: + bpmn_file_full_path = ( + ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( + task.process_identifier + ) + ) + relative_path = os.path.relpath( + bpmn_file_full_path, start=FileSystemService.root_path() + ) + process_model_relative_path = os.path.dirname(relative_path) + process_model_with_form = ( + ProcessModelService.get_process_model_from_relative_path( + process_model_relative_path + ) + ) + + if task.type == "User Task": + if not form_schema_file_name: + raise ( + ApiError( + error_code="missing_form_file", + message=( + "Cannot find a form file for process_instance_id:" + f" {process_instance_id}, task_id: {task_id}" + ), + status_code=400, + ) + ) + + form_contents = _prepare_form_data( + form_schema_file_name, + task.data, + process_model_with_form, + ) + + try: + # form_contents is a str + form_dict = json.loads(form_contents) + except Exception as exception: + raise ( + ApiError( + error_code="error_loading_form", + message=( + f"Could not load form schema from: {form_schema_file_name}." + f" Error was: {str(exception)}" + ), + status_code=400, + ) + ) from exception + + if task.data: + _update_form_schema_with_task_data_as_needed(form_dict, task.data) + + if form_contents: + task.form_schema = form_dict + + if form_ui_schema_file_name: + ui_form_contents = _prepare_form_data( + form_ui_schema_file_name, + task.data, + process_model_with_form, + ) + if ui_form_contents: + task.form_ui_schema = ui_form_contents + + if task.properties and task.data and "instructionsForEndUser" in task.properties: + if task.properties["instructionsForEndUser"]: + task.properties["instructionsForEndUser"] = _render_jinja_template( + task.properties["instructionsForEndUser"], task.data + ) + return make_response(jsonify(task), 200) + + +def process_data_show( + process_instance_id: int, + process_data_identifier: str, + modified_process_model_identifier: str, +) -> flask.wrappers.Response: + """Process_data_show.""" + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + processor = ProcessInstanceProcessor(process_instance) + all_process_data = processor.get_data() + process_data_value = None + if process_data_identifier in all_process_data: + process_data_value = all_process_data[process_data_identifier] + + return make_response( + jsonify( + { + "process_data_identifier": process_data_identifier, + "process_data_value": process_data_value, + } + ), + 200, + ) + + +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + principal = _find_principal_or_raise() + process_instance = _find_process_instance_by_id_or_raise(process_instance_id) + if not process_instance.can_submit_task(): + raise ApiError( + error_code="process_instance_not_runnable", + message=( + f"Process Instance ({process_instance.id}) has status " + f"{process_instance.status} which does not allow tasks to be submitted." + ), + status_code=400, + ) + + processor = ProcessInstanceProcessor(process_instance) + spiff_task = _get_spiff_task_from_process_instance( + task_id, process_instance, processor=processor + ) + AuthorizationService.assert_user_can_complete_spiff_task( + process_instance.id, spiff_task, principal.user + ) + + if spiff_task.state != TaskState.READY: + raise ( + ApiError( + error_code="invalid_state", + message="You may not update a task unless it is in the READY state.", + status_code=400, + ) + ) + + if terminate_loop and spiff_task.is_looping(): + spiff_task.terminate_loop() + + human_task = HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, task_id=task_id, completed=False + ).first() + if human_task is None: + raise ( + ApiError( + error_code="no_human_task", + message=( + f"Cannot find a task to complete for task id '{task_id}' and" + f" process instance {process_instance_id}." + ), + status_code=500, + ) + ) + + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + + # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same + # task spec, complete that form as well. + # if update_all: + # last_index = spiff_task.task_info()["mi_index"] + # next_task = processor.next_task() + # while next_task and next_task.task_info()["mi_index"] > last_index: + # __update_task(processor, next_task, form_data, user) + # last_index = next_task.task_info()["mi_index"] + # next_task = processor.next_task() + + next_human_task_assigned_to_me = ( + HumanTaskModel.query.filter_by( + process_instance_id=process_instance_id, completed=False + ) + .order_by(asc(HumanTaskModel.id)) # type: ignore + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_human_task_assigned_to_me: + return make_response( + jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200 + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def _get_tasks( + processes_started_by_user: bool = True, + has_lane_assignment_id: bool = True, + page: int = 1, + per_page: int = 100, + user_group_identifier: Optional[str] = None, +) -> flask.wrappers.Response: + """Get_tasks.""" + user_id = g.user.id + + # use distinct to ensure we only get one row per human task otherwise + # we can get back multiple for the same human task row which throws off + # pagination later on + # https://stackoverflow.com/q/34582014/6090676 + human_tasks_query = ( + db.session.query(HumanTaskModel) + .group_by(HumanTaskModel.id) # type: ignore + .outerjoin(GroupModel, GroupModel.id == HumanTaskModel.lane_assignment_id) + .join(ProcessInstanceModel) + .join(UserModel, UserModel.id == ProcessInstanceModel.process_initiator_id) + .filter(HumanTaskModel.completed == False) # noqa: E712 + ) + + assigned_user = aliased(UserModel) + if processes_started_by_user: + human_tasks_query = ( + human_tasks_query.filter( + ProcessInstanceModel.process_initiator_id == user_id + ) + .outerjoin( + HumanTaskUserModel, + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + ) + .outerjoin(assigned_user, assigned_user.id == HumanTaskUserModel.user_id) + ) + else: + human_tasks_query = human_tasks_query.filter( + ProcessInstanceModel.process_initiator_id != user_id + ).join( + HumanTaskUserModel, + and_( + HumanTaskUserModel.user_id == user_id, + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + ), + ) + if has_lane_assignment_id: + if user_group_identifier: + human_tasks_query = human_tasks_query.filter( + GroupModel.identifier == user_group_identifier + ) + else: + human_tasks_query = human_tasks_query.filter( + HumanTaskModel.lane_assignment_id.is_not(None) # type: ignore + ) + else: + human_tasks_query = human_tasks_query.filter(HumanTaskModel.lane_assignment_id.is_(None)) # type: ignore + + human_tasks = ( + human_tasks_query.add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.status.label("process_instance_status"), # type: ignore + ProcessInstanceModel.updated_at_in_seconds, + ProcessInstanceModel.created_at_in_seconds, + UserModel.username.label("process_initiator_username"), + GroupModel.identifier.label("assigned_user_group_identifier"), + HumanTaskModel.task_name, + HumanTaskModel.task_title, + HumanTaskModel.process_model_display_name, + HumanTaskModel.process_instance_id, + func.group_concat(assigned_user.username.distinct()).label( + "potential_owner_usernames" + ), + ) + .order_by(desc(HumanTaskModel.id)) # type: ignore + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": human_tasks.items, + "pagination": { + "count": len(human_tasks.items), + "total": human_tasks.total, + "pages": human_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def _prepare_form_data( + form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo +) -> str: + """Prepare_form_data.""" + if task_data is None: + return "" + + file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") + return _render_jinja_template(file_contents, task_data) + + +def _render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: + """Render_jinja_template.""" + jinja_environment = jinja2.Environment( + autoescape=True, lstrip_blocks=True, trim_blocks=True + ) + template = jinja_environment.from_string(unprocessed_template) + return template.render(**data) + + +def _get_spiff_task_from_process_instance( + task_id: str, + process_instance: ProcessInstanceModel, + processor: Union[ProcessInstanceProcessor, None] = None, +) -> SpiffTask: + """Get_spiff_task_from_process_instance.""" + if processor is None: + processor = ProcessInstanceProcessor(process_instance) + task_uuid = uuid.UUID(task_id) + spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + + if spiff_task is None: + raise ( + ApiError( + error_code="empty_task", + message="Processor failed to obtain task.", + status_code=500, + ) + ) + return spiff_task + + +# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches +def _update_form_schema_with_task_data_as_needed( + in_dict: dict, task_data: dict +) -> None: + """Update_nested.""" + for k, value in in_dict.items(): + if "anyOf" == k: + # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] + if isinstance(value, list): + if len(value) == 1: + first_element_in_value_list = value[0] + if isinstance(first_element_in_value_list, str): + if first_element_in_value_list.startswith( + "options_from_task_data_var:" + ): + task_data_var = first_element_in_value_list.replace( + "options_from_task_data_var:", "" + ) + + if task_data_var not in task_data: + raise ( + ApiError( + error_code="missing_task_data_var", + message=( + "Task data is missing variable:" + f" {task_data_var}" + ), + status_code=500, + ) + ) + + select_options_from_task_data = task_data.get(task_data_var) + if isinstance(select_options_from_task_data, list): + if all( + "value" in d and "label" in d + for d in select_options_from_task_data + ): + + def map_function( + task_data_select_option: TaskDataSelectOption, + ) -> ReactJsonSchemaSelectOption: + """Map_function.""" + return { + "type": "string", + "enum": [task_data_select_option["value"]], + "title": task_data_select_option["label"], + } + + options_for_react_json_schema_form = list( + map(map_function, select_options_from_task_data) + ) + + in_dict[k] = options_for_react_json_schema_form + elif isinstance(value, dict): + _update_form_schema_with_task_data_as_needed(value, task_data) + elif isinstance(value, list): + for o in value: + if isinstance(o, dict): + _update_form_schema_with_task_data_as_needed(o, task_data) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py index 2bbbc1374..1ac6207c0 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user.py @@ -16,8 +16,9 @@ from flask_bpmn.api.api_error import ApiError from werkzeug.wrappers import Response from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.authentication_service import AuthenticationService from spiffworkflow_backend.services.authentication_service import ( - AuthenticationService, + MissingAccessTokenError, ) from spiffworkflow_backend.services.authorization_service import AuthorizationService from spiffworkflow_backend.services.user_service import UserService @@ -66,16 +67,19 @@ def verify_token( user_model = get_user_from_decoded_internal_token(decoded_token) except Exception as e: current_app.logger.error( - f"Exception in verify_token getting user from decoded internal token. {e}" + "Exception in verify_token getting user from decoded" + f" internal token. {e}" ) elif "iss" in decoded_token.keys(): try: if AuthenticationService.validate_id_token(token): user_info = decoded_token - except ApiError as ae: # API Error is only thrown in the token is outdated. + except ( + ApiError + ) as ae: # API Error is only thrown in the token is outdated. # Try to refresh the token user = UserService.get_user_by_service_and_service_id( - "open_id", decoded_token["sub"] + decoded_token["iss"], decoded_token["sub"] ) if user: refresh_token = AuthenticationService.get_refresh_token(user.id) @@ -104,10 +108,12 @@ def verify_token( ) from e if ( - user_info is not None and "error" not in user_info + user_info is not None + and "error" not in user_info + and "iss" in user_info ): # not sure what to test yet user_model = ( - UserModel.query.filter(UserModel.service == "open_id") + UserModel.query.filter(UserModel.service == user_info["iss"]) .filter(UserModel.service_id == user_info["sub"]) .first() ) @@ -268,10 +274,10 @@ def login_api_return(code: str, state: str, session_state: str) -> str: code, "/v1.0/login_api_return" ) access_token: str = auth_token_object["access_token"] - assert access_token # noqa: S101 + if access_token is None: + raise MissingAccessTokenError("Cannot find the access token for the request") + return access_token - # return redirect("localhost:7000/v1.0/ui") - # return {'uid': 'user_1'} def logout(id_token: str, redirect_url: Optional[str]) -> Response: @@ -292,7 +298,6 @@ def get_decoded_token(token: str) -> Optional[Dict]: try: decoded_token = jwt.decode(token, options={"verify_signature": False}) except Exception as e: - print(f"Exception in get_token_type: {e}") raise ApiError( error_code="invalid_token", message="Cannot decode token." ) from e @@ -340,9 +345,5 @@ def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserMo ) if user: return user - user = UserModel( - username=service_id, - service=service, - service_id=service_id, - ) + user = UserService.create_user(service_id, service, service_id) return user diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py index 29bbddcd1..fd5c1ae90 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/user_blueprint.py @@ -26,6 +26,7 @@ user_blueprint = Blueprint("main", __name__) # user = UserService.create_user('internal', username) # return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON) + # def _create_user(username): # user = UserModel.query.filter_by(username=username).first() # if user is not None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/add_user_to_group.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/add_user_to_group.py deleted file mode 100644 index d3c777118..000000000 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/add_user_to_group.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Get_env.""" -from typing import Any - -from spiffworkflow_backend.models.group import GroupModel -from spiffworkflow_backend.models.group import GroupNotFoundError -from spiffworkflow_backend.models.script_attributes_context import ( - ScriptAttributesContext, -) -from spiffworkflow_backend.models.user import UserModel -from spiffworkflow_backend.models.user import UserNotFoundError -from spiffworkflow_backend.scripts.script import Script -from spiffworkflow_backend.services.user_service import UserService - - -class AddUserToGroup(Script): - """AddUserToGroup.""" - - def get_description(self) -> str: - """Get_description.""" - return """Add a given user to a given group.""" - - def run( - self, - script_attributes_context: ScriptAttributesContext, - *args: Any, - **kwargs: Any, - ) -> Any: - """Run.""" - username = args[0] - group_identifier = args[1] - user = UserModel.query.filter_by(username=username).first() - if user is None: - raise UserNotFoundError( - f"Script 'add_user_to_group' could not find a user with username: {username}" - ) - - group = GroupModel.query.filter_by(identifier=group_identifier).first() - if group is None: - raise GroupNotFoundError( - f"Script 'add_user_to_group' could not find group with identifier '{group_identifier}'." - ) - - UserService.add_user_to_group(user, group) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py new file mode 100644 index 000000000..5b4225253 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/delete_process_instances_with_criteria.py @@ -0,0 +1,63 @@ +"""Delete_process_instances_with_criteria.""" +from time import time +from typing import Any + +from flask_bpmn.models.db import db +from sqlalchemy import or_ + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel +from spiffworkflow_backend.scripts.script import Script + + +class DeleteProcessInstancesWithCriteria(Script): + """DeleteProcessInstancesWithCriteria.""" + + def get_description(self) -> str: + """Get_description.""" + return "Delete process instances that match the provided criteria," + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + criteria_list = args[0] + + delete_criteria = [] + delete_time = time() + + for criteria in criteria_list: + delete_criteria.append( + (ProcessInstanceModel.process_model_identifier == criteria["name"]) + & ProcessInstanceModel.status.in_(criteria["status"]) # type: ignore + & ( + ProcessInstanceModel.updated_at_in_seconds + < (delete_time - criteria["last_updated_delta"]) + ) + ) + + results = ( + ProcessInstanceModel.query.filter(or_(*delete_criteria)).limit(100).all() + ) + rows_affected = len(results) + + if rows_affected > 0: + ids_to_delete = list(map(lambda r: r.id, results)) # type: ignore + + step_details = SpiffStepDetailsModel.query.filter( + SpiffStepDetailsModel.process_instance_id.in_(ids_to_delete) # type: ignore + ).all() + + for deletion in step_details: + db.session.delete(deletion) + for deletion in results: + db.session.delete(deletion) + db.session.commit() + + return rows_affected diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py index ee86a84a7..c739d15aa 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/fact_service.py @@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script class FactService(Script): """FactService.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Just your basic class that can pull in data from a few api endpoints and @@ -30,7 +35,10 @@ class FactService(Script): if fact == "cat": details = "The cat in the hat" # self.get_cat() elif fact == "norris": - details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." + details = ( + "Chuck Norris doesn’t read books. He stares them down until he gets the" + " information he wants." + ) elif fact == "buzzword": details = "Move the Needle." # self.get_buzzword() else: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py new file mode 100644 index 000000000..e2ab07637 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_all_permissions.py @@ -0,0 +1,71 @@ +"""Get_env.""" +from collections import OrderedDict +from typing import Any + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.permission_target import PermissionTargetModel +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script + + +class GetAllPermissions(Script): + """GetAllPermissions.""" + + def get_description(self) -> str: + """Get_description.""" + return """Get all permissions currently in the system.""" + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + permission_assignments = ( + PermissionAssignmentModel.query.join( + PrincipalModel, + PrincipalModel.id == PermissionAssignmentModel.principal_id, + ) + .join(GroupModel, GroupModel.id == PrincipalModel.group_id) + .join( + PermissionTargetModel, + PermissionTargetModel.id + == PermissionAssignmentModel.permission_target_id, + ) + .add_columns( + PermissionAssignmentModel.permission, + PermissionTargetModel.uri, + GroupModel.identifier.label("group_identifier"), + ) + ) + + permissions: OrderedDict[tuple[str, str], list[str]] = OrderedDict() + for pa in permission_assignments: + permissions.setdefault((pa.group_identifier, pa.uri), []).append( + pa.permission + ) + + def replace_suffix(string: str, old: str, new: str) -> str: + """Replace_suffix.""" + if string.endswith(old): + return string[: -len(old)] + new + return string + + # sort list of strings based on a specific order + def sort_by_order(string_list: list, order: list) -> list: + """Sort_by_order.""" + return sorted(string_list, key=lambda x: order.index(x)) + + return [ + { + "group_identifier": k[0], + "uri": replace_suffix(k[1], "%", "*"), + "permissions": sort_by_order(v, ["create", "read", "update", "delete"]), + } + for k, v in permissions.items() + ] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py index a1a1b47e9..66d21a4ca 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_current_user.py @@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script class GetCurrentUser(Script): """GetCurrentUser.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Return the current user.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py index cd586ae00..7a6b0f44c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_env.py @@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script class GetEnv(Script): """GetEnv.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Returns the current environment - ie testing, staging, production.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py index 9490df95a..b128214ab 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_frontend_url.py @@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script class GetFrontendUrl(Script): """GetFrontendUrl.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Return the url to the frontend.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py index 243a8c524..0f20fbb3c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_group_members.py @@ -12,6 +12,11 @@ from spiffworkflow_backend.scripts.script import Script class GetGroupMembers(Script): """GetGroupMembers.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Return the list of usernames of the users in the given group.""" @@ -27,7 +32,8 @@ class GetGroupMembers(Script): group = GroupModel.query.filter_by(identifier=group_identifier).first() if group is None: raise GroupNotFoundError( - f"Script 'get_group_members' could not find group with identifier '{group_identifier}'." + "Script 'get_group_members' could not find group with identifier" + f" '{group_identifier}'." ) usernames = [u.username for u in group.users] diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py index 689b86d8c..7c688e56f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_localtime.py @@ -14,6 +14,11 @@ from spiffworkflow_backend.scripts.script import Script class GetLocaltime(Script): """GetLocaltime.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Converts a Datetime object into a Datetime object for a specific timezone. diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py index 45c70d6ba..99eb4ce26 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/get_process_info.py @@ -10,6 +10,11 @@ from spiffworkflow_backend.scripts.script import Script class GetProcessInfo(Script): """GetProcessInfo.""" + @staticmethod + def requires_privileged_permissions() -> bool: + """We have deemed this function safe to run without elevated permissions.""" + return False + def get_description(self) -> str: """Get_description.""" return """Returns a dictionary of information about the currently running process.""" @@ -23,5 +28,7 @@ class GetProcessInfo(Script): """Run.""" return { "process_instance_id": script_attributes_context.process_instance_id, - "process_model_identifier": script_attributes_context.process_model_identifier, + "process_model_identifier": ( + script_attributes_context.process_model_identifier + ), } diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/refresh_permissions.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/refresh_permissions.py new file mode 100644 index 000000000..4981af93d --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/refresh_permissions.py @@ -0,0 +1,39 @@ +"""Get_env.""" +from typing import Any + +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.scripts.script import Script +from spiffworkflow_backend.services.authorization_service import AuthorizationService + + +class RefreshPermissions(Script): + """RefreshPermissions.""" + + def get_description(self) -> str: + """Get_description.""" + return """Add permissions using a dict. + group_info: [ + { + 'name': group_identifier, + 'users': array_of_users, + 'permissions': [ + { + 'actions': array_of_actions - create, read, etc, + 'uri': target_uri + } + ] + } + ] + """ + + def run( + self, + script_attributes_context: ScriptAttributesContext, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + group_info = args[0] + AuthorizationService.refresh_permissions(group_info) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py index b744694a2..7ca798466 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/scripts/script.py @@ -10,9 +10,12 @@ from typing import Callable from flask_bpmn.api.api_error import ApiError +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceNotFoundError from spiffworkflow_backend.models.script_attributes_context import ( ScriptAttributesContext, ) +from spiffworkflow_backend.services.authorization_service import AuthorizationService # Generally speaking, having some global in a flask app is TERRIBLE. # This is here, because after loading the application this will never change under @@ -20,6 +23,10 @@ from spiffworkflow_backend.models.script_attributes_context import ( SCRIPT_SUB_CLASSES = None +class ScriptUnauthorizedForUserError(Exception): + """ScriptUnauthorizedForUserError.""" + + class Script: """Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks.""" @@ -43,6 +50,15 @@ class Script: + "does not properly implement the run function.", ) + @staticmethod + def requires_privileged_permissions() -> bool: + """It seems safer to default to True and make safe functions opt in for any user to run them. + + To give access to script for a given user, add a 'create' permission with following target-uri: + '/can-run-privileged-script/{script_name}' + """ + return True + @staticmethod def generate_augmented_list( script_attributes_context: ScriptAttributesContext, @@ -71,18 +87,52 @@ class Script: that we created. """ instance = subclass() - return lambda *ar, **kw: subclass.run( - instance, - script_attributes_context, - *ar, - **kw, - ) + + def check_script_permission() -> None: + """Check_script_permission.""" + if subclass.requires_privileged_permissions(): + script_function_name = get_script_function_name(subclass) + uri = f"/can-run-privileged-script/{script_function_name}" + process_instance = ProcessInstanceModel.query.filter_by( + id=script_attributes_context.process_instance_id + ).first() + if process_instance is None: + raise ProcessInstanceNotFoundError( + "Could not find a process instance with id" + f" '{script_attributes_context.process_instance_id}' when" + f" running script '{script_function_name}'" + ) + user = process_instance.process_initiator + has_permission = AuthorizationService.user_has_permission( + user=user, permission="create", target_uri=uri + ) + if not has_permission: + raise ScriptUnauthorizedForUserError( + f"User {user.username} does not have access to run" + f" privileged script '{script_function_name}'" + ) + + def run_script_if_allowed(*ar: Any, **kw: Any) -> Any: + """Run_script_if_allowed.""" + check_script_permission() + return subclass.run( + instance, + script_attributes_context, + *ar, + **kw, + ) + + return run_script_if_allowed + + def get_script_function_name(subclass: type[Script]) -> str: + """Get_script_function_name.""" + return subclass.__module__.split(".")[-1] execlist = {} subclasses = Script.get_all_subclasses() for x in range(len(subclasses)): subclass = subclasses[x] - execlist[subclass.__module__.split(".")[-1]] = make_closure( + execlist[get_script_function_name(subclass)] = make_closure( subclass, script_attributes_context=script_attributes_context ) return execlist @@ -101,7 +151,7 @@ class Script: """_get_all_subclasses.""" # hackish mess to make sure we have all the modules loaded for the scripts pkg_dir = os.path.dirname(__file__) - for (_module_loader, name, _ispkg) in pkgutil.iter_modules([pkg_dir]): + for _module_loader, name, _ispkg in pkgutil.iter_modules([pkg_dir]): importlib.import_module("." + name, __package__) """Returns a list of all classes that extend this class.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py index 81488910e..6bbcad331 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/acceptance_test_fixtures.py @@ -29,7 +29,6 @@ def load_acceptance_test_fixtures() -> list[ProcessInstanceModel]: # suspended - 6 hours ago process_instances = [] for i in range(len(statuses)): - process_instance = ProcessInstanceService.create_process_instance_from_process_model_identifier( test_process_model_id, user ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py index f4bd357b1..fd2bdb898 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authentication_service.py @@ -16,6 +16,10 @@ from werkzeug.wrappers import Response from spiffworkflow_backend.models.refresh_token import RefreshTokenModel +class MissingAccessTokenError(Exception): + """MissingAccessTokenError.""" + + class AuthenticationProviderTypes(enum.Enum): """AuthenticationServiceProviders.""" @@ -89,7 +93,7 @@ class AuthenticationService: + f"?state={state}&" + "response_type=code&" + f"client_id={self.client_id()}&" - + "scope=openid&" + + "scope=openid profile email&" + f"redirect_uri={return_redirect_url}" ) return login_redirect_url diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py index 511d138eb..69d19cb7f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/authorization_service.py @@ -1,7 +1,14 @@ """Authorization_service.""" import inspect import re +from dataclasses import dataclass +from hashlib import sha256 +from hmac import compare_digest +from hmac import HMAC +from typing import Any from typing import Optional +from typing import Set +from typing import TypedDict from typing import Union import jwt @@ -16,8 +23,9 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from sqlalchemy import or_ from sqlalchemy import text -from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel from spiffworkflow_backend.models.permission_target import PermissionTargetModel from spiffworkflow_backend.models.principal import MissingPrincipalError @@ -34,23 +42,79 @@ class PermissionsFileNotSetError(Exception): """PermissionsFileNotSetError.""" -class ActiveTaskNotFoundError(Exception): - """ActiveTaskNotFoundError.""" +class HumanTaskNotFoundError(Exception): + """HumanTaskNotFoundError.""" class UserDoesNotHaveAccessToTaskError(Exception): """UserDoesNotHaveAccessToTaskError.""" +class InvalidPermissionError(Exception): + """InvalidPermissionError.""" + + +@dataclass +class PermissionToAssign: + """PermissionToAssign.""" + + permission: str + target_uri: str + + +# the relevant permissions are the only API methods that are currently available for each path prefix. +# if we add further API methods, we'll need to evaluate whether they should be added here. +PATH_SEGMENTS_FOR_PERMISSION_ALL = [ + {"path": "/logs", "relevant_permissions": ["read"]}, + { + "path": "/process-instances", + "relevant_permissions": ["create", "read", "delete"], + }, + {"path": "/process-instance-suspend", "relevant_permissions": ["create"]}, + {"path": "/process-instance-terminate", "relevant_permissions": ["create"]}, + {"path": "/task-data", "relevant_permissions": ["read", "update"]}, + {"path": "/process-data", "relevant_permissions": ["read"]}, +] + + +class DesiredPermissionDict(TypedDict): + """DesiredPermissionDict.""" + + group_identifiers: Set[str] + permission_assignments: list[PermissionAssignmentModel] + + class AuthorizationService: """Determine whether a user has permission to perform their request.""" + # https://stackoverflow.com/a/71320673/6090676 + @classmethod + def verify_sha256_token(cls, auth_header: Optional[str]) -> None: + """Verify_sha256_token.""" + if auth_header is None: + raise ApiError( + error_code="unauthorized", + message="", + status_code=403, + ) + + received_sign = auth_header.split("sha256=")[-1].strip() + secret = current_app.config["GITHUB_WEBHOOK_SECRET"].encode() + expected_sign = HMAC(key=secret, msg=request.data, digestmod=sha256).hexdigest() + if not compare_digest(received_sign, expected_sign): + raise ApiError( + error_code="unauthorized", + message="", + status_code=403, + ) + @classmethod def has_permission( cls, principals: list[PrincipalModel], permission: str, target_uri: str ) -> bool: """Has_permission.""" principal_ids = [p.id for p in principals] + target_uri_normalized = target_uri.removeprefix(V1_API_PATH_PREFIX) permission_assignments = ( PermissionAssignmentModel.query.filter( @@ -60,10 +124,13 @@ class AuthorizationService: .join(PermissionTargetModel) .filter( or_( - text(f"'{target_uri}' LIKE permission_target.uri"), + text(f"'{target_uri_normalized}' LIKE permission_target.uri"), # to check for exact matches as well # see test_user_can_access_base_path_when_given_wildcard_permission unit test - text(f"'{target_uri}' = replace(permission_target.uri, '/%', '')"), + text( + f"'{target_uri_normalized}' =" + " replace(replace(permission_target.uri, '/%', ''), ':%', '')" + ), ) ) .all() @@ -103,17 +170,15 @@ class AuthorizationService: return cls.has_permission(principals, permission, target_uri) @classmethod - def delete_all_permissions_and_recreate(cls) -> None: - """Delete_all_permissions_and_recreate.""" + def delete_all_permissions(cls) -> None: + """Delete_all_permissions_and_recreate. EXCEPT For permissions for the current user?""" for model in [PermissionAssignmentModel, PermissionTargetModel]: db.session.query(model).delete() # cascading to principals doesn't seem to work when attempting to delete all so do it like this instead for group in GroupModel.query.all(): db.session.delete(group) - db.session.commit() - cls.import_permissions_from_yaml_file() @classmethod def associate_user_with_group(cls, user: UserModel, group: GroupModel) -> None: @@ -131,12 +196,13 @@ class AuthorizationService: @classmethod def import_permissions_from_yaml_file( cls, raise_if_missing_user: bool = False - ) -> None: + ) -> DesiredPermissionDict: """Import_permissions_from_yaml_file.""" if current_app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"] is None: raise ( PermissionsFileNotSetError( - "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in order to import permissions" + "SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME needs to be set in" + " order to import permissions" ) ) @@ -145,13 +211,16 @@ class AuthorizationService: permission_configs = yaml.safe_load(file) default_group = None + unique_user_group_identifiers: Set[str] = set() if "default_group" in permission_configs: default_group_identifier = permission_configs["default_group"] default_group = GroupService.find_or_create_group(default_group_identifier) + unique_user_group_identifiers.add(default_group_identifier) if "groups" in permission_configs: for group_identifier, group_config in permission_configs["groups"].items(): group = GroupService.find_or_create_group(group_identifier) + unique_user_group_identifiers.add(group_identifier) for username in group_config["users"]: user = UserModel.query.filter_by(username=username).first() if user is None: @@ -164,26 +233,25 @@ class AuthorizationService: continue cls.associate_user_with_group(user, group) + permission_assignments = [] if "permissions" in permission_configs: for _permission_identifier, permission_config in permission_configs[ "permissions" ].items(): uri = permission_config["uri"] - uri_with_percent = re.sub(r"\*", "%", uri) - permission_target = PermissionTargetModel.query.filter_by( - uri=uri_with_percent - ).first() - if permission_target is None: - permission_target = PermissionTargetModel(uri=uri_with_percent) - db.session.add(permission_target) - db.session.commit() + permission_target = cls.find_or_create_permission_target(uri) for allowed_permission in permission_config["allowed_permissions"]: if "groups" in permission_config: for group_identifier in permission_config["groups"]: group = GroupService.find_or_create_group(group_identifier) - cls.create_permission_for_principal( - group.principal, permission_target, allowed_permission + unique_user_group_identifiers.add(group_identifier) + permission_assignments.append( + cls.create_permission_for_principal( + group.principal, + permission_target, + allowed_permission, + ) ) if "users" in permission_config: for username in permission_config["users"]: @@ -194,14 +262,35 @@ class AuthorizationService: .filter(UserModel.username == username) .first() ) - cls.create_permission_for_principal( - principal, permission_target, allowed_permission + permission_assignments.append( + cls.create_permission_for_principal( + principal, permission_target, allowed_permission + ) ) if default_group is not None: for user in UserModel.query.all(): cls.associate_user_with_group(user, default_group) + return { + "group_identifiers": unique_user_group_identifiers, + "permission_assignments": permission_assignments, + } + + @classmethod + def find_or_create_permission_target(cls, uri: str) -> PermissionTargetModel: + """Find_or_create_permission_target.""" + uri_with_percent = re.sub(r"\*", "%", uri) + target_uri_normalized = uri_with_percent.removeprefix(V1_API_PATH_PREFIX) + permission_target: Optional[PermissionTargetModel] = ( + PermissionTargetModel.query.filter_by(uri=target_uri_normalized).first() + ) + if permission_target is None: + permission_target = PermissionTargetModel(uri=target_uri_normalized) + db.session.add(permission_target) + db.session.commit() + return permission_target + @classmethod def create_permission_for_principal( cls, @@ -210,13 +299,13 @@ class AuthorizationService: permission: str, ) -> PermissionAssignmentModel: """Create_permission_for_principal.""" - permission_assignment: Optional[ - PermissionAssignmentModel - ] = PermissionAssignmentModel.query.filter_by( - principal_id=principal.id, - permission_target_id=permission_target.id, - permission=permission, - ).first() + permission_assignment: Optional[PermissionAssignmentModel] = ( + PermissionAssignmentModel.query.filter_by( + principal_id=principal.id, + permission_target_id=permission_target.id, + permission=permission, + ).first() + ) if permission_assignment is None: permission_assignment = PermissionAssignmentModel( principal_id=principal.id, @@ -316,7 +405,10 @@ class AuthorizationService: raise ApiError( error_code="unauthorized", - message=f"User {g.user.username} is not authorized to perform requested action: {permission_string} - {request.path}", + message=( + f"User {g.user.username} is not authorized to perform requested action:" + f" {permission_string} - {request.path}" + ), status_code=403, ) @@ -395,7 +487,10 @@ class AuthorizationService: except jwt.InvalidTokenError as exception: raise ApiError( "token_invalid", - "The Authentication token you provided is invalid. You need a new token. ", + ( + "The Authentication token you provided is invalid. You need a new" + " token. " + ), ) from exception @staticmethod @@ -405,53 +500,69 @@ class AuthorizationService: user: UserModel, ) -> bool: """Assert_user_can_complete_spiff_task.""" - active_task = ActiveTaskModel.query.filter_by( + human_task = HumanTaskModel.query.filter_by( task_name=spiff_task.task_spec.name, process_instance_id=process_instance_id, ).first() - if active_task is None: - raise ActiveTaskNotFoundError( - f"Could find an active task with task name '{spiff_task.task_spec.name}'" + if human_task is None: + raise HumanTaskNotFoundError( + f"Could find an human task with task name '{spiff_task.task_spec.name}'" f" for process instance '{process_instance_id}'" ) - if user not in active_task.potential_owners: + if user not in human_task.potential_owners: raise UserDoesNotHaveAccessToTaskError( - f"User {user.username} does not have access to update task'{spiff_task.task_spec.name}'" - f" for process instance '{process_instance_id}'" + f"User {user.username} does not have access to update" + f" task'{spiff_task.task_spec.name}' for process instance" + f" '{process_instance_id}'" ) return True @classmethod def create_user_from_sign_in(cls, user_info: dict) -> UserModel: """Create_user_from_sign_in.""" + """Name, family_name, given_name, middle_name, nickname, preferred_username,""" + """Profile, picture, website, gender, birthdate, zoneinfo, locale, and updated_at. """ + """Email.""" is_new_user = False user_model = ( - UserModel.query.filter(UserModel.service == "open_id") + UserModel.query.filter(UserModel.service == user_info["iss"]) .filter(UserModel.service_id == user_info["sub"]) .first() ) + email = display_name = username = "" + if "email" in user_info: + username = user_info["email"] + email = user_info["email"] + else: # we fall back to the sub, which may be very ugly. + username = user_info["sub"] + "@" + user_info["iss"] + + if "preferred_username" in user_info: + display_name = user_info["preferred_username"] + elif "nickname" in user_info: + display_name = user_info["nickname"] + elif "name" in user_info: + display_name = user_info["name"] if user_model is None: current_app.logger.debug("create_user in login_return") is_new_user = True - name = username = email = "" - if "name" in user_info: - name = user_info["name"] - if "username" in user_info: - username = user_info["username"] - elif "preferred_username" in user_info: - username = user_info["preferred_username"] - if "email" in user_info: - email = user_info["email"] user_model = UserService().create_user( - service="open_id", - service_id=user_info["sub"], - name=name, username=username, + service=user_info["iss"], + service_id=user_info["sub"], email=email, + display_name=display_name, ) + else: + # Update with the latest information + user_model.username = username + user_model.email = email + user_model.display_name = display_name + user_model.service = user_info["iss"] + user_model.service_id = user_info["sub"] + # this may eventually get too slow. # when it does, be careful about backgrounding, because # the user will immediately need permissions to use the site. @@ -461,11 +572,229 @@ class AuthorizationService: cls.import_permissions_from_yaml_file() if is_new_user: - UserService.add_user_to_active_tasks_if_appropriate(user_model) + UserService.add_user_to_human_tasks_if_appropriate(user_model) # this cannot be None so ignore mypy return user_model # type: ignore + @classmethod + def get_permissions_to_assign( + cls, + permission_set: str, + process_related_path_segment: str, + target_uris: list[str], + ) -> list[PermissionToAssign]: + """Get_permissions_to_assign.""" + permissions = permission_set.split(",") + if permission_set == "all": + permissions = ["create", "read", "update", "delete"] + + permissions_to_assign: list[PermissionToAssign] = [] + + # we were thinking that if you can start an instance, you ought to be able to view your own instances. + if permission_set == "start": + target_uri = f"/process-instances/{process_related_path_segment}" + permissions_to_assign.append( + PermissionToAssign(permission="create", target_uri=target_uri) + ) + target_uri = f"/process-instances/for-me/{process_related_path_segment}" + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri=target_uri) + ) + + else: + if permission_set == "all": + for path_segment_dict in PATH_SEGMENTS_FOR_PERMISSION_ALL: + target_uri = ( + f"{path_segment_dict['path']}/{process_related_path_segment}" + ) + relevant_permissions = path_segment_dict["relevant_permissions"] + for permission in relevant_permissions: + permissions_to_assign.append( + PermissionToAssign( + permission=permission, target_uri=target_uri + ) + ) + + for target_uri in target_uris: + for permission in permissions: + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri=target_uri) + ) + + return permissions_to_assign + + @classmethod + def explode_permissions( + cls, permission_set: str, target: str + ) -> list[PermissionToAssign]: + """Explodes given permissions to and returns list of PermissionToAssign objects. + + These can be used to then iterate through and inserted into the database. + Target Macros: + ALL + * gives access to ALL api endpoints - useful to give admin-like permissions + PG:[process_group_identifier] + * affects given process-group and all sub process-groups and process-models + PM:[process_model_identifier] + * affects given process-model + BASIC + * Basic access to complete tasks and use the site + + Permission Macros: + all + * create, read, update, delete + start + * create process-instances (aka instantiate or start a process-model) + * only works with PG and PM target macros + """ + permissions_to_assign: list[PermissionToAssign] = [] + permissions = permission_set.split(",") + if permission_set == "all": + permissions = ["create", "read", "update", "delete"] + + if target.startswith("PG:"): + process_group_identifier = ( + target.removeprefix("PG:").replace("/", ":").removeprefix(":") + ) + process_related_path_segment = f"{process_group_identifier}:*" + if process_group_identifier == "ALL": + process_related_path_segment = "*" + target_uris = [ + f"/process-groups/{process_related_path_segment}", + f"/process-models/{process_related_path_segment}", + ] + permissions_to_assign = ( + permissions_to_assign + + cls.get_permissions_to_assign( + permission_set, process_related_path_segment, target_uris + ) + ) + + elif target.startswith("PM:"): + process_model_identifier = ( + target.removeprefix("PM:").replace("/", ":").removeprefix(":") + ) + process_related_path_segment = f"{process_model_identifier}/*" + + if process_model_identifier == "ALL": + process_related_path_segment = "*" + + target_uris = [f"/process-models/{process_related_path_segment}"] + permissions_to_assign = ( + permissions_to_assign + + cls.get_permissions_to_assign( + permission_set, process_related_path_segment, target_uris + ) + ) + + elif permission_set == "start": + raise InvalidPermissionError( + "Permission 'start' is only available for macros PM and PG." + ) + + elif target.startswith("BASIC"): + permissions_to_assign.append( + PermissionToAssign( + permission="read", target_uri="/process-instances/for-me" + ) + ) + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri="/processes") + ) + permissions_to_assign.append( + PermissionToAssign(permission="read", target_uri="/service-tasks") + ) + permissions_to_assign.append( + PermissionToAssign( + permission="read", target_uri="/user-groups/for-current-user" + ) + ) + + for permission in ["create", "read", "update", "delete"]: + permissions_to_assign.append( + PermissionToAssign( + permission=permission, target_uri="/process-instances/reports/*" + ) + ) + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri="/tasks/*") + ) + elif target == "ALL": + for permission in permissions: + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri="/*") + ) + elif target.startswith("/"): + for permission in permissions: + permissions_to_assign.append( + PermissionToAssign(permission=permission, target_uri=target) + ) + else: + raise InvalidPermissionError( + f"Target uri '{target}' with permission set '{permission_set}' is" + " invalid. The target uri must either be a macro of PG, PM, BASIC, or" + " ALL or an api uri." + ) + + return permissions_to_assign + + @classmethod + def add_permission_from_uri_or_macro( + cls, group_identifier: str, permission: str, target: str + ) -> list[PermissionAssignmentModel]: + """Add_permission_from_uri_or_macro.""" + group = GroupService.find_or_create_group(group_identifier) + permissions_to_assign = cls.explode_permissions(permission, target) + permission_assignments = [] + for permission_to_assign in permissions_to_assign: + permission_target = cls.find_or_create_permission_target( + permission_to_assign.target_uri + ) + permission_assignments.append( + cls.create_permission_for_principal( + group.principal, permission_target, permission_to_assign.permission + ) + ) + return permission_assignments + + @classmethod + def refresh_permissions(cls, group_info: list[dict[str, Any]]) -> None: + """Adds new permission assignments and deletes old ones.""" + initial_permission_assignments = PermissionAssignmentModel.query.all() + result = cls.import_permissions_from_yaml_file() + desired_permission_assignments = result["permission_assignments"] + desired_group_identifiers = result["group_identifiers"] + + for group in group_info: + group_identifier = group["name"] + for username in group["users"]: + GroupService.add_user_to_group_or_add_to_waiting( + username, group_identifier + ) + desired_group_identifiers.add(group_identifier) + for permission in group["permissions"]: + for crud_op in permission["actions"]: + desired_permission_assignments.extend( + cls.add_permission_from_uri_or_macro( + group_identifier=group_identifier, + target=permission["uri"], + permission=crud_op, + ) + ) + desired_group_identifiers.add(group_identifier) + + for ipa in initial_permission_assignments: + if ipa not in desired_permission_assignments: + db.session.delete(ipa) + + groups_to_delete = GroupModel.query.filter( + GroupModel.identifier.not_in(desired_group_identifiers) + ).all() + for gtd in groups_to_delete: + db.session.delete(gtd) + db.session.commit() + class KeycloakAuthorization: """Interface with Keycloak server.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py index a2a9181d4..cb8b44c6d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/file_system_service.py @@ -40,10 +40,9 @@ class FileSystemService: @staticmethod def root_path() -> str: """Root_path.""" - # fixme: allow absolute files dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] - app_root = current_app.root_path - return os.path.abspath(os.path.join(app_root, "..", dir_name)) + # ensure this is a string - thanks mypy... + return os.path.abspath(os.path.join(dir_name, "")) @staticmethod def id_string_to_relative_path(id_string: str) -> str: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py index 4a9037362..43c18edc6 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/git_service.py @@ -1,54 +1,89 @@ """Git_service.""" import os import shutil +import subprocess # noqa we need the subprocess module to safely run the git commands import uuid from typing import Optional +from typing import Union from flask import current_app from flask import g +from spiffworkflow_backend.config import ConfigurationError from spiffworkflow_backend.models.process_model import ProcessModelInfo from spiffworkflow_backend.services.file_system_service import FileSystemService +class MissingGitConfigsError(Exception): + """MissingGitConfigsError.""" + + +class InvalidGitWebhookBodyError(Exception): + """InvalidGitWebhookBodyError.""" + + +class GitCloneUrlMismatchError(Exception): + """GitCloneUrlMismatchError.""" + + +class GitCommandError(Exception): + """GitCommandError.""" + + +# TOOD: check for the existence of git and configs on bootup if publishing is enabled class GitService: """GitService.""" - @staticmethod - def get_current_revision() -> str: + @classmethod + def get_current_revision(cls) -> str: """Get_current_revision.""" bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] # The value includes a carriage return character at the end, so we don't grab the last character - current_git_revision = os.popen( # noqa: S605 - f"cd {bpmn_spec_absolute_dir} && git rev-parse --short HEAD" - ).read()[ - :-1 - ] # noqa: S605 - return current_git_revision + with FileSystemService.cd(bpmn_spec_absolute_dir): + return cls.run_shell_command_to_get_stdout( + ["git", "rev-parse", "--short", "HEAD"] + ) - @staticmethod + @classmethod def get_instance_file_contents_for_revision( - process_model: ProcessModelInfo, revision: str - ) -> bytes: + cls, + process_model: ProcessModelInfo, + revision: str, + file_name: Optional[str] = None, + ) -> str: """Get_instance_file_contents_for_revision.""" bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] process_model_relative_path = FileSystemService.process_model_relative_path( process_model ) - shell_cd_command = f"cd {bpmn_spec_absolute_dir}" - shell_git_command = f"git show {revision}:{process_model_relative_path}/{process_model.primary_file_name}" - shell_command = f"{shell_cd_command} && {shell_git_command}" - # git show 78ae5eb:category_number_one/script-task/script-task.bpmn - file_contents: str = os.popen(shell_command).read()[:-1] # noqa: S605 - assert file_contents # noqa: S101 - return file_contents.encode("utf-8") + file_name_to_use = file_name + if file_name_to_use is None: + file_name_to_use = process_model.primary_file_name + with FileSystemService.cd(bpmn_spec_absolute_dir): + shell_command = [ + "git", + "show", + f"{revision}:{process_model_relative_path}/{file_name_to_use}", + ] + return cls.run_shell_command_to_get_stdout(shell_command) - @staticmethod - def commit(message: str, repo_path: Optional[str] = None) -> str: + @classmethod + def commit( + cls, + message: str, + repo_path: Optional[str] = None, + branch_name: Optional[str] = None, + ) -> str: """Commit.""" + cls.check_for_basic_configs() + branch_name_to_use = branch_name + if branch_name_to_use is None: + branch_name_to_use = current_app.config["GIT_BRANCH"] repo_path_to_use = repo_path if repo_path is None: repo_path_to_use = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + if repo_path_to_use is None: + raise ConfigurationError("BPMN_SPEC_ABSOLUTE_DIR config must be set") git_username = "" git_email = "" @@ -58,13 +93,121 @@ class GitService: shell_command_path = os.path.join( current_app.root_path, "..", "..", "bin", "git_commit_bpmn_models_repo" ) - shell_command = f"{shell_command_path} '{repo_path_to_use}' '{message}' '{git_username}' '{git_email}'" - output = os.popen(shell_command).read() # noqa: S605 - return output + shell_command = [ + shell_command_path, + repo_path_to_use, + message, + branch_name_to_use, + git_username, + git_email, + current_app.config["GIT_USER_PASSWORD"], + ] + return cls.run_shell_command_to_get_stdout(shell_command) + + @classmethod + def check_for_basic_configs(cls) -> None: + """Check_for_basic_configs.""" + if current_app.config["GIT_BRANCH"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH. " + "This is required for publishing process models" + ) + + @classmethod + def check_for_publish_configs(cls) -> None: + """Check_for_configs.""" + cls.check_for_basic_configs() + if current_app.config["GIT_BRANCH_TO_PUBLISH_TO"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH_TO_PUBLISH_TO. " + "This is required for publishing process models" + ) + if current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_CLONE_URL_FOR_PUBLISHING. " + "This is required for publishing process models" + ) + + @classmethod + def run_shell_command_as_boolean(cls, command: list[str]) -> bool: + """Run_shell_command_as_boolean.""" + # we know result will be a bool here + result: bool = cls.run_shell_command(command, return_success_state=True) # type: ignore + return result + + @classmethod + def run_shell_command_to_get_stdout(cls, command: list[str]) -> str: + """Run_shell_command_to_get_stdout.""" + # we know result will be a CompletedProcess here + result: subprocess.CompletedProcess[bytes] = cls.run_shell_command( + command, return_success_state=False + ) # type: ignore + return result.stdout.decode("utf-8").strip() + + @classmethod + def run_shell_command( + cls, command: list[str], return_success_state: bool = False + ) -> Union[subprocess.CompletedProcess[bytes], bool]: + """Run_shell_command.""" + # this is fine since we pass the commands directly + result = subprocess.run(command, check=False, capture_output=True) # noqa + if return_success_state: + return result.returncode == 0 + + if result.returncode != 0: + stdout = result.stdout.decode("utf-8") + stderr = result.stderr.decode("utf-8") + raise GitCommandError( + f"Failed to execute git command: {command} " + f"Stdout: {stdout} " + f"Stderr: {stderr} " + ) + + return result + + # only supports github right now + @classmethod + def handle_web_hook(cls, webhook: dict) -> bool: + """Handle_web_hook.""" + cls.check_for_publish_configs() + + if "repository" not in webhook or "clone_url" not in webhook["repository"]: + raise InvalidGitWebhookBodyError( + "Cannot find required keys of 'repository:clone_url' from webhook" + f" body: {webhook}" + ) + + clone_url = webhook["repository"]["clone_url"] + if clone_url != current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"]: + raise GitCloneUrlMismatchError( + "Configured clone url does not match clone url from webhook:" + f" {clone_url}" + ) + + if "ref" not in webhook: + raise InvalidGitWebhookBodyError( + f"Could not find the 'ref' arg in the webhook boy: {webhook}" + ) + + if current_app.config["GIT_BRANCH"] is None: + raise MissingGitConfigsError( + "Missing config for GIT_BRANCH. This is required for updating the" + " repository as a result of the webhook" + ) + + ref = webhook["ref"] + git_branch = current_app.config["GIT_BRANCH"] + if ref != f"refs/heads/{git_branch}": + return False + + with FileSystemService.cd(current_app.config["BPMN_SPEC_ABSOLUTE_DIR"]): + cls.run_shell_command(["git", "pull"]) + return True @classmethod def publish(cls, process_model_id: str, branch_to_update: str) -> str: """Publish.""" + cls.check_for_publish_configs() source_process_model_root = FileSystemService.root_path() source_process_model_path = os.path.join( source_process_model_root, process_model_id @@ -76,21 +219,29 @@ class GitService: # we are adding a guid to this so the flake8 issue has been mitigated destination_process_root = f"/tmp/{clone_dir}" # noqa - cmd = ( - f"git clone https://{current_app.config['GIT_USERNAME']}:{current_app.config['GIT_USER_PASSWORD']}" - f"@github.com/sartography/sample-process-models.git {destination_process_root}" + git_clone_url = current_app.config["GIT_CLONE_URL_FOR_PUBLISHING"].replace( + "https://", + f"https://{current_app.config['GIT_USERNAME']}:{current_app.config['GIT_USER_PASSWORD']}@", ) - os.system(cmd) # noqa: S605 + cmd = ["git", "clone", git_clone_url, destination_process_root] + + cls.run_shell_command(cmd) with FileSystemService.cd(destination_process_root): # create publish branch from branch_to_update - os.system(f"git checkout {branch_to_update}") # noqa: S605 - publish_branch = f"publish-{process_model_id}" - command = f"git show-ref --verify refs/remotes/origin/{publish_branch}" - output = os.popen(command).read() # noqa: S605 - if output: - os.system(f"git checkout {publish_branch}") # noqa: S605 + cls.run_shell_command(["git", "checkout", branch_to_update]) + branch_to_pull_request = f"publish-{process_model_id}" + + # check if branch exists and checkout appropriately + command = [ + "git", + "show-ref", + "--verify", + f"refs/remotes/origin/{branch_to_pull_request}", + ] + if cls.run_shell_command_as_boolean(command): + cls.run_shell_command(["git", "checkout", branch_to_pull_request]) else: - os.system(f"git checkout -b {publish_branch}") # noqa: S605 + cls.run_shell_command(["git", "checkout", "-b", branch_to_pull_request]) # copy files from process model into the new publish branch destination_process_model_path = os.path.join( @@ -100,15 +251,19 @@ class GitService: shutil.rmtree(destination_process_model_path) shutil.copytree(source_process_model_path, destination_process_model_path) - # add and commit files to publish_branch, then push - commit_message = f"Request to publish changes to {process_model_id}, from {g.user.username}" - cls.commit(commit_message, destination_process_root) - os.system("git push") # noqa + # add and commit files to branch_to_pull_request, then push + commit_message = ( + f"Request to publish changes to {process_model_id}, " + f"from {g.user.username} on {current_app.config['ENV_IDENTIFIER']}" + ) + cls.commit(commit_message, destination_process_root, branch_to_pull_request) # build url for github page to open PR - output = os.popen("git config --get remote.origin.url").read() # noqa - remote_url = output.strip().replace(".git", "") - pr_url = f"{remote_url}/compare/{publish_branch}?expand=1" + git_remote = cls.run_shell_command_to_get_stdout( + ["git", "config", "--get", "remote.origin.url"] + ) + remote_url = git_remote.strip().replace(".git", "") + pr_url = f"{remote_url}/compare/{branch_to_update}...{branch_to_pull_request}?expand=1" # try to clean up if os.path.exists(destination_process_root): diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py index aa560009e..911d41ac4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/group_service.py @@ -4,6 +4,7 @@ from typing import Optional from flask_bpmn.models.db import db from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.user_service import UserService @@ -22,3 +23,15 @@ class GroupService: db.session.commit() UserService.create_principal(group.id, id_column_name="group_id") return group + + @classmethod + def add_user_to_group_or_add_to_waiting( + cls, username: str, group_identifier: str + ) -> None: + """Add_user_to_group_or_add_to_waiting.""" + group = cls.find_or_create_group(group_identifier) + user = UserModel.query.filter_by(username=username).first() + if user: + UserService.add_user_to_group(user, group) + else: + UserService.add_waiting_group_assignment(username, group) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py index dd34cb3fd..599d5228d 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/logging_service.py @@ -122,7 +122,8 @@ def setup_logger(app: Flask) -> None: if upper_log_level_string not in log_levels: raise InvalidLogLevelError( - f"Log level given is invalid: '{upper_log_level_string}'. Valid options are {log_levels}" + f"Log level given is invalid: '{upper_log_level_string}'. Valid options are" + f" {log_levels}" ) log_level = getattr(logging, upper_log_level_string) @@ -176,7 +177,8 @@ def setup_logger(app: Flask) -> None: spiff_logger = logging.getLogger("spiff") spiff_logger.setLevel(spiff_log_level) spiff_formatter = logging.Formatter( - "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s" + "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s |" + " %(process)s | %(processName)s | %(process_instance_id)s" ) # if you add a handler to spiff, it will be used/inherited by spiff.metrics diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py index cfb42c836..b3d1e831f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/message_service.py @@ -145,8 +145,11 @@ class MessageService: if process_instance_receive is None: raise MessageServiceError( ( - f"Process instance cannot be found for queued message: {message_instance_receive.id}." - f"Tried with id {message_instance_receive.process_instance_id}", + ( + "Process instance cannot be found for queued message:" + f" {message_instance_receive.id}.Tried with id" + f" {message_instance_receive.process_instance_id}" + ), ) ) @@ -182,7 +185,6 @@ class MessageService: ) for message_instance_receive in message_instances_receive: - # sqlalchemy supports select / where statements like active record apparantly # https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions message_correlation_select = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index ffe69fd72..4a4f99a47 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -17,6 +17,7 @@ from typing import Optional from typing import Tuple from typing import TypedDict from typing import Union +from uuid import UUID import dateparser import pytz @@ -43,6 +44,9 @@ from SpiffWorkflow.spiff.serializer.task_spec_converters import ( CallActivityTaskConverter, ) from SpiffWorkflow.spiff.serializer.task_spec_converters import EndEventConverter +from SpiffWorkflow.spiff.serializer.task_spec_converters import ( + EventBasedGatewayConverter, +) from SpiffWorkflow.spiff.serializer.task_spec_converters import ( IntermediateCatchEventConverter, ) @@ -65,11 +69,11 @@ from SpiffWorkflow.task import Task as SpiffTask # type: ignore from SpiffWorkflow.task import TaskState from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.file import File from spiffworkflow_backend.models.file import FileType from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel from spiffworkflow_backend.models.message_correlation_message_instance import ( MessageCorrelationMessageInstanceModel, @@ -151,6 +155,9 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore "time": time, "decimal": decimal, "_strptime": _strptime, + "enumerate": enumerate, + "list": list, + "map": map, } # This will overwrite the standard builtins @@ -209,14 +216,14 @@ class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore except Exception as exception: if task is None: raise ProcessInstanceProcessorError( - "Error evaluating expression: " - "'%s', exception: %s" % (expression, str(exception)), + "Error evaluating expression: '%s', exception: %s" + % (expression, str(exception)), ) from exception else: raise WorkflowTaskExecException( task, - "Error evaluating expression " - "'%s', %s" % (expression, str(exception)), + "Error evaluating expression '%s', %s" + % (expression, str(exception)), ) from exception def execute( @@ -263,6 +270,7 @@ class ProcessInstanceProcessor: EndEventConverter, IntermediateCatchEventConverter, IntermediateThrowEventConverter, + EventBasedGatewayConverter, ManualTaskConverter, NoneTaskConverter, ReceiveTaskConverter, @@ -276,6 +284,7 @@ class ProcessInstanceProcessor: ] ) _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) + _event_serializer = EventBasedGatewayConverter() PROCESS_INSTANCE_ID_KEY = "process_instance_id" VALIDATION_PROCESS_KEY = "validate_only" @@ -292,9 +301,7 @@ class ProcessInstanceProcessor: tld.spiff_step = process_instance_model.spiff_step # we want this to be the fully qualified path to the process model including all group subcomponents - current_app.config[ - "THREAD_LOCAL_DATA" - ].process_model_identifier = ( + current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = ( f"{process_instance_model.process_model_identifier}" ) @@ -375,8 +382,10 @@ class ProcessInstanceProcessor: except MissingSpecError as ke: raise ApiError( error_code="unexpected_process_instance_structure", - message="Failed to deserialize process_instance" - " '%s' due to a mis-placed or missing task '%s'" + message=( + "Failed to deserialize process_instance" + " '%s' due to a mis-placed or missing task '%s'" + ) % (self.process_model_identifier, str(ke)), ) from ke @@ -392,7 +401,10 @@ class ProcessInstanceProcessor: raise ( ApiError( "process_model_not_found", - f"The given process model was not found: {process_model_identifier}.", + ( + "The given process model was not found:" + f" {process_model_identifier}." + ), ) ) spec_files = SpecFileService.get_files(process_model_info) @@ -522,8 +534,11 @@ class ProcessInstanceProcessor: potential_owner_ids.append(lane_owner_user.id) self.raise_if_no_potential_owners( potential_owner_ids, - f"No users found in task data lane owner list for lane: {task_lane}. " - f"The user list used: {task.data['lane_owners'][task_lane]}", + ( + "No users found in task data lane owner list for lane:" + f" {task_lane}. The user list used:" + f" {task.data['lane_owners'][task_lane]}" + ), ) else: group_model = GroupModel.query.filter_by(identifier=task_lane).first() @@ -551,14 +566,14 @@ class ProcessInstanceProcessor: """SaveSpiffStepDetails.""" bpmn_json = self.serialize() wf_json = json.loads(bpmn_json) - task_json = wf_json["tasks"] + task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]} return { "process_instance_id": self.process_instance_model.id, "spiff_step": self.process_instance_model.spiff_step or 1, "task_json": task_json, "timestamp": round(time.time()), - "completed_by_user_id": self.current_user().id, + # "completed_by_user_id": self.current_user().id, } def spiff_step_details(self) -> SpiffStepDetailsModel: @@ -569,17 +584,10 @@ class ProcessInstanceProcessor: spiff_step=details_mapping["spiff_step"], task_json=details_mapping["task_json"], timestamp=details_mapping["timestamp"], - completed_by_user_id=details_mapping["completed_by_user_id"], + # completed_by_user_id=details_mapping["completed_by_user_id"], ) return details_model - def save_spiff_step_details(self, active_task: ActiveTaskModel) -> None: - """SaveSpiffStepDetails.""" - details_model = self.spiff_step_details() - details_model.lane_assignment_id = active_task.lane_assignment_id - db.session.add(details_model) - db.session.commit() - def extract_metadata(self, process_model_info: ProcessModelInfo) -> None: """Extract_metadata.""" metadata_extraction_paths = process_model_info.metadata_extraction_paths @@ -615,7 +623,7 @@ class ProcessInstanceProcessor: db.session.add(pim) db.session.commit() - def save(self) -> None: + def _save(self) -> None: """Saves the current state of this processor to the database.""" self.process_instance_model.bpmn_json = self.serialize() @@ -637,7 +645,10 @@ class ProcessInstanceProcessor: db.session.add(self.process_instance_model) db.session.commit() - active_tasks = ActiveTaskModel.query.filter_by( + def save(self) -> None: + """Saves the current state and moves on to the next state.""" + self._save() + human_tasks = HumanTaskModel.query.filter_by( process_instance_id=self.process_instance_model.id ).all() ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks() @@ -668,14 +679,14 @@ class ProcessInstanceProcessor: if "formUiSchemaFilename" in properties: ui_form_file_name = properties["formUiSchemaFilename"] - active_task = None - for at in active_tasks: + human_task = None + for at in human_tasks: if at.task_id == str(ready_or_waiting_task.id): - active_task = at - active_tasks.remove(at) + human_task = at + human_tasks.remove(at) - if active_task is None: - active_task = ActiveTaskModel( + if human_task is None: + human_task = HumanTaskModel( process_instance_id=self.process_instance_model.id, process_model_display_name=process_model_display_name, form_file_name=form_file_name, @@ -687,23 +698,65 @@ class ProcessInstanceProcessor: task_status=ready_or_waiting_task.get_state_name(), lane_assignment_id=potential_owner_hash["lane_assignment_id"], ) - db.session.add(active_task) + db.session.add(human_task) db.session.commit() for potential_owner_id in potential_owner_hash[ "potential_owner_ids" ]: - active_task_user = ActiveTaskUserModel( - user_id=potential_owner_id, active_task_id=active_task.id + human_task_user = HumanTaskUserModel( + user_id=potential_owner_id, human_task_id=human_task.id ) - db.session.add(active_task_user) + db.session.add(human_task_user) db.session.commit() - if len(active_tasks) > 0: - for at in active_tasks: - db.session.delete(at) + if len(human_tasks) > 0: + for at in human_tasks: + at.completed = True + db.session.add(at) db.session.commit() + def serialize_task_spec(self, task_spec: SpiffTask) -> Any: + """Get a serialized version of a task spec.""" + # The task spec is NOT actually a SpiffTask, it is the task spec attached to a SpiffTask + # Not sure why mypy accepts this but whatever. + return self._serializer.spec_converter.convert(task_spec) + + def send_bpmn_event(self, event_data: dict[str, Any]) -> None: + """Send an event to the workflow.""" + payload = event_data.pop("payload", None) + event_definition = self._event_serializer.restore(event_data) + if payload is not None: + event_definition.payload = payload + current_app.logger.info( + f"Event of type {event_definition.event_type} sent to process instance" + f" {self.process_instance_model.id}" + ) + self.bpmn_process_instance.catch(event_definition) + self.do_engine_steps(save=True) + + def manual_complete_task(self, task_id: str, execute: bool) -> None: + """Mark the task complete optionally executing it.""" + spiff_task = self.bpmn_process_instance.get_task(UUID(task_id)) + if execute: + current_app.logger.info( + f"Manually executing Task {spiff_task.task_spec.name} of process" + f" instance {self.process_instance_model.id}" + ) + spiff_task.complete() + else: + current_app.logger.info( + f"Skipping Task {spiff_task.task_spec.name} of process instance" + f" {self.process_instance_model.id}" + ) + spiff_task._set_state(TaskState.COMPLETED) + for child in spiff_task.children: + child.task_spec._update(child) + self.bpmn_process_instance.last_task = spiff_task + self._save() + # Saving the workflow seems to reset the status + self.suspend() + @staticmethod def get_parser() -> MyCustomParser: """Get_parser.""" @@ -738,14 +791,13 @@ class ProcessInstanceProcessor: """Bpmn_file_full_path_from_bpmn_process_identifier.""" if bpmn_process_identifier is None: raise ValueError( - "bpmn_file_full_path_from_bpmn_process_identifier: bpmn_process_identifier is unexpectedly None" + "bpmn_file_full_path_from_bpmn_process_identifier:" + " bpmn_process_identifier is unexpectedly None" ) - spec_reference = ( - SpecReferenceCache.query.filter_by(identifier=bpmn_process_identifier) - .filter_by(type="process") - .first() - ) + spec_reference = SpecReferenceCache.query.filter_by( + identifier=bpmn_process_identifier, type="process" + ).first() bpmn_file_full_path = None if spec_reference is None: bpmn_file_full_path = ( @@ -762,7 +814,10 @@ class ProcessInstanceProcessor: raise ( ApiError( error_code="could_not_find_bpmn_process_identifier", - message="Could not find the the given bpmn process identifier from any sources: %s" + message=( + "Could not find the the given bpmn process identifier from any" + " sources: %s" + ) % bpmn_process_identifier, ) ) @@ -786,7 +841,6 @@ class ProcessInstanceProcessor: new_bpmn_files = set() for bpmn_process_identifier in processor_dependencies_new: - # ignore identifiers that spiff already knows about if bpmn_process_identifier in bpmn_process_identifiers_in_parser: continue @@ -829,7 +883,10 @@ class ProcessInstanceProcessor: raise ( ApiError( error_code="no_primary_bpmn_error", - message="There is no primary BPMN process id defined for process_model %s" + message=( + "There is no primary BPMN process id defined for" + " process_model %s" + ) % process_model_info.id, ) ) @@ -890,7 +947,10 @@ class ProcessInstanceProcessor: if not bpmn_message.correlations: raise ApiError( "message_correlations_missing", - f"Could not find any message correlations bpmn_message: {bpmn_message.name}", + ( + "Could not find any message correlations bpmn_message:" + f" {bpmn_message.name}" + ), ) message_correlations = [] @@ -910,12 +970,16 @@ class ProcessInstanceProcessor: if message_correlation_property is None: raise ApiError( "message_correlations_missing_from_process", - "Could not find a known message correlation with identifier:" - f"{message_correlation_property_identifier}", + ( + "Could not find a known message correlation with" + f" identifier:{message_correlation_property_identifier}" + ), ) message_correlations.append( { - "message_correlation_property": message_correlation_property, + "message_correlation_property": ( + message_correlation_property + ), "name": message_correlation_key, "value": message_correlation_property_value, } @@ -972,7 +1036,10 @@ class ProcessInstanceProcessor: if message_model is None: raise ApiError( "invalid_message_name", - f"Invalid message name: {waiting_task.task_spec.event_definition.name}.", + ( + "Invalid message name:" + f" {waiting_task.task_spec.event_definition.name}." + ), ) # Ensure we are only creating one message instance for each waiting message @@ -1179,11 +1246,20 @@ class ProcessInstanceProcessor: ) return user_tasks # type: ignore - def complete_task(self, task: SpiffTask, active_task: ActiveTaskModel) -> None: + def complete_task( + self, task: SpiffTask, human_task: HumanTaskModel, user: UserModel + ) -> None: """Complete_task.""" self.increment_spiff_step() self.bpmn_process_instance.complete_task_from_id(task.id) - self.save_spiff_step_details(active_task) + human_task.completed_by_user_id = user.id + human_task.completed = True + db.session.add(human_task) + details_model = self.spiff_step_details() + db.session.add(details_model) + + # this is the thing that actually commits the db transaction (on behalf of the other updates above as well) + self.save() def get_data(self) -> dict[str, Any]: """Get_data.""" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py index 84d5d6752..cd20b9b57 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_report_service.py @@ -1,14 +1,31 @@ """Process_instance_report_service.""" +import re from dataclasses import dataclass +from typing import Any from typing import Optional import sqlalchemy +from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db +from sqlalchemy import and_ +from sqlalchemy import func +from sqlalchemy import or_ +from sqlalchemy.orm import aliased +from sqlalchemy.orm import selectinload +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_metadata import ( + ProcessInstanceMetadataModel, +) from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.services.process_model_service import ProcessModelService @dataclass @@ -16,14 +33,17 @@ class ProcessInstanceReportFilter: """ProcessInstanceReportFilter.""" process_model_identifier: Optional[str] = None + user_group_identifier: Optional[str] = None start_from: Optional[int] = None start_to: Optional[int] = None end_from: Optional[int] = None end_to: Optional[int] = None process_status: Optional[list[str]] = None initiated_by_me: Optional[bool] = None + has_terminal_status: Optional[bool] = None with_tasks_completed_by_me: Optional[bool] = None - with_tasks_completed_by_my_group: Optional[bool] = None + with_tasks_assigned_to_my_group: Optional[bool] = None + with_relation_to_me: Optional[bool] = None def to_dict(self) -> dict[str, str]: """To_dict.""" @@ -31,6 +51,8 @@ class ProcessInstanceReportFilter: if self.process_model_identifier is not None: d["process_model_identifier"] = self.process_model_identifier + if self.user_group_identifier is not None: + d["user_group_identifier"] = self.user_group_identifier if self.start_from is not None: d["start_from"] = str(self.start_from) if self.start_to is not None: @@ -43,14 +65,18 @@ class ProcessInstanceReportFilter: d["process_status"] = ",".join(self.process_status) if self.initiated_by_me is not None: d["initiated_by_me"] = str(self.initiated_by_me).lower() + if self.has_terminal_status is not None: + d["has_terminal_status"] = str(self.has_terminal_status).lower() if self.with_tasks_completed_by_me is not None: d["with_tasks_completed_by_me"] = str( self.with_tasks_completed_by_me ).lower() - if self.with_tasks_completed_by_my_group is not None: - d["with_tasks_completed_by_my_group"] = str( - self.with_tasks_completed_by_my_group + if self.with_tasks_assigned_to_my_group is not None: + d["with_tasks_assigned_to_my_group"] = str( + self.with_tasks_assigned_to_my_group ).lower() + if self.with_relation_to_me is not None: + d["with_relation_to_me"] = str(self.with_relation_to_me).lower() return d @@ -58,6 +84,55 @@ class ProcessInstanceReportFilter: class ProcessInstanceReportService: """ProcessInstanceReportService.""" + @classmethod + def system_metadata_map(cls, metadata_key: str) -> dict[str, Any]: + """System_metadata_map.""" + # TODO replace with system reports that are loaded on launch (or similar) + temp_system_metadata_map = { + "default": { + "columns": cls.builtin_column_options(), + "filter_by": [], + "order_by": ["-start_in_seconds", "-id"], + }, + "system_report_completed_instances_initiated_by_me": { + "columns": [ + {"Header": "id", "accessor": "id"}, + { + "Header": "process_model_display_name", + "accessor": "process_model_display_name", + }, + {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, + {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, + {"Header": "status", "accessor": "status"}, + ], + "filter_by": [ + {"field_name": "initiated_by_me", "field_value": True}, + {"field_name": "has_terminal_status", "field_value": True}, + ], + "order_by": ["-start_in_seconds", "-id"], + }, + "system_report_completed_instances_with_tasks_completed_by_me": { + "columns": cls.builtin_column_options(), + "filter_by": [ + {"field_name": "with_tasks_completed_by_me", "field_value": True}, + {"field_name": "has_terminal_status", "field_value": True}, + ], + "order_by": ["-start_in_seconds", "-id"], + }, + "system_report_completed_instances_with_tasks_completed_by_my_groups": { + "columns": cls.builtin_column_options(), + "filter_by": [ + { + "field_name": "with_tasks_assigned_to_my_group", + "field_value": True, + }, + {"field_name": "has_terminal_status", "field_value": True}, + ], + "order_by": ["-start_in_seconds", "-id"], + }, + } + return temp_system_metadata_map[metadata_key] + @classmethod def report_with_identifier( cls, @@ -82,50 +157,10 @@ class ProcessInstanceReportService: if process_instance_report is not None: return process_instance_report # type: ignore - # TODO replace with system reports that are loaded on launch (or similar) - temp_system_metadata_map = { - "default": { - "columns": cls.builtin_column_options(), - "filter_by": [], - "order_by": ["-start_in_seconds", "-id"], - }, - "system_report_instances_initiated_by_me": { - "columns": [ - {"Header": "id", "accessor": "id"}, - { - "Header": "process_model_display_name", - "accessor": "process_model_display_name", - }, - {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, - {"Header": "end_in_seconds", "accessor": "end_in_seconds"}, - {"Header": "status", "accessor": "status"}, - ], - "filter_by": [{"field_name": "initiated_by_me", "field_value": True}], - "order_by": ["-start_in_seconds", "-id"], - }, - "system_report_instances_with_tasks_completed_by_me": { - "columns": cls.builtin_column_options(), - "filter_by": [ - {"field_name": "with_tasks_completed_by_me", "field_value": True} - ], - "order_by": ["-start_in_seconds", "-id"], - }, - "system_report_instances_with_tasks_completed_by_my_groups": { - "columns": cls.builtin_column_options(), - "filter_by": [ - { - "field_name": "with_tasks_completed_by_my_group", - "field_value": True, - } - ], - "order_by": ["-start_in_seconds", "-id"], - }, - } - process_instance_report = ProcessInstanceReportModel( identifier=report_identifier, created_by_id=user.id, - report_metadata=temp_system_metadata_map[report_identifier], + report_metadata=cls.system_metadata_map(report_identifier), ) return process_instance_report # type: ignore @@ -164,27 +199,31 @@ class ProcessInstanceReportService: return filters[key].split(",") if key in filters else None process_model_identifier = filters.get("process_model_identifier") + user_group_identifier = filters.get("user_group_identifier") start_from = int_value("start_from") start_to = int_value("start_to") end_from = int_value("end_from") end_to = int_value("end_to") process_status = list_value("process_status") initiated_by_me = bool_value("initiated_by_me") + has_terminal_status = bool_value("has_terminal_status") with_tasks_completed_by_me = bool_value("with_tasks_completed_by_me") - with_tasks_completed_by_my_group = bool_value( - "with_tasks_completed_by_my_group" - ) + with_tasks_assigned_to_my_group = bool_value("with_tasks_assigned_to_my_group") + with_relation_to_me = bool_value("with_relation_to_me") report_filter = ProcessInstanceReportFilter( process_model_identifier, + user_group_identifier, start_from, start_to, end_from, end_to, process_status, initiated_by_me, + has_terminal_status, with_tasks_completed_by_me, - with_tasks_completed_by_my_group, + with_tasks_assigned_to_my_group, + with_relation_to_me, ) return report_filter @@ -194,20 +233,25 @@ class ProcessInstanceReportService: cls, process_instance_report: ProcessInstanceReportModel, process_model_identifier: Optional[str] = None, + user_group_identifier: Optional[str] = None, start_from: Optional[int] = None, start_to: Optional[int] = None, end_from: Optional[int] = None, end_to: Optional[int] = None, process_status: Optional[str] = None, initiated_by_me: Optional[bool] = None, + has_terminal_status: Optional[bool] = None, with_tasks_completed_by_me: Optional[bool] = None, - with_tasks_completed_by_my_group: Optional[bool] = None, + with_tasks_assigned_to_my_group: Optional[bool] = None, + with_relation_to_me: Optional[bool] = None, ) -> ProcessInstanceReportFilter: """Filter_from_metadata_with_overrides.""" report_filter = cls.filter_from_metadata(process_instance_report) if process_model_identifier is not None: report_filter.process_model_identifier = process_model_identifier + if user_group_identifier is not None: + report_filter.user_group_identifier = user_group_identifier if start_from is not None: report_filter.start_from = start_from if start_to is not None: @@ -220,12 +264,16 @@ class ProcessInstanceReportService: report_filter.process_status = process_status.split(",") if initiated_by_me is not None: report_filter.initiated_by_me = initiated_by_me + if has_terminal_status is not None: + report_filter.has_terminal_status = has_terminal_status if with_tasks_completed_by_me is not None: report_filter.with_tasks_completed_by_me = with_tasks_completed_by_me - if with_tasks_completed_by_my_group is not None: - report_filter.with_tasks_completed_by_my_group = ( - with_tasks_completed_by_my_group + if with_tasks_assigned_to_my_group is not None: + report_filter.with_tasks_assigned_to_my_group = ( + with_tasks_assigned_to_my_group ) + if with_relation_to_me is not None: + report_filter.with_relation_to_me = with_relation_to_me return report_filter @@ -241,9 +289,9 @@ class ProcessInstanceReportService: process_instance_dict = process_instance["ProcessInstanceModel"].serialized for metadata_column in metadata_columns: if metadata_column["accessor"] not in process_instance_dict: - process_instance_dict[ - metadata_column["accessor"] - ] = process_instance[metadata_column["accessor"]] + process_instance_dict[metadata_column["accessor"]] = ( + process_instance[metadata_column["accessor"]] + ) results.append(process_instance_dict) return results @@ -268,3 +316,207 @@ class ProcessInstanceReportService: {"Header": "Username", "accessor": "username", "filterable": False}, {"Header": "Status", "accessor": "status", "filterable": False}, ] + + @classmethod + def run_process_instance_report( + cls, + report_filter: ProcessInstanceReportFilter, + process_instance_report: ProcessInstanceReportModel, + user: UserModel, + page: int = 1, + per_page: int = 100, + ) -> dict: + """Run_process_instance_report.""" + process_instance_query = ProcessInstanceModel.query + # Always join that hot user table for good performance at serialization time. + process_instance_query = process_instance_query.options( + selectinload(ProcessInstanceModel.process_initiator) + ) + + if report_filter.process_model_identifier is not None: + process_model = ProcessModelService.get_process_model( + f"{report_filter.process_model_identifier}", + ) + + process_instance_query = process_instance_query.filter_by( + process_model_identifier=process_model.id + ) + + # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. + if ( + ProcessInstanceModel.start_in_seconds is None + or ProcessInstanceModel.end_in_seconds is None + ): + raise ( + ApiError( + error_code="unexpected_condition", + message="Something went very wrong", + status_code=500, + ) + ) + + if report_filter.start_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds >= report_filter.start_from + ) + if report_filter.start_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds <= report_filter.start_to + ) + if report_filter.end_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds >= report_filter.end_from + ) + if report_filter.end_to is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds <= report_filter.end_to + ) + if report_filter.process_status is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(report_filter.process_status) # type: ignore + ) + + if report_filter.initiated_by_me is True: + process_instance_query = process_instance_query.filter_by( + process_initiator=user + ) + + if report_filter.has_terminal_status is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(ProcessInstanceModel.terminal_statuses()) # type: ignore + ) + + if ( + not report_filter.with_tasks_completed_by_me + and not report_filter.with_tasks_assigned_to_my_group + and report_filter.with_relation_to_me is True + ): + process_instance_query = process_instance_query.outerjoin( + HumanTaskModel + ).outerjoin( + HumanTaskUserModel, + and_( + HumanTaskModel.id == HumanTaskUserModel.human_task_id, + HumanTaskUserModel.user_id == user.id, + ), + ) + process_instance_query = process_instance_query.filter( + or_( + HumanTaskUserModel.id.is_not(None), + ProcessInstanceModel.process_initiator_id == user.id, + ) + ) + + if report_filter.with_tasks_completed_by_me is True: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.process_initiator_id != user.id + ) + process_instance_query = process_instance_query.join( + HumanTaskModel, + and_( + HumanTaskModel.process_instance_id == ProcessInstanceModel.id, + HumanTaskModel.completed_by_user_id == user.id, + ), + ) + + if report_filter.with_tasks_assigned_to_my_group is True: + group_model_join_conditions = [ + GroupModel.id == HumanTaskModel.lane_assignment_id + ] + if report_filter.user_group_identifier: + group_model_join_conditions.append( + GroupModel.identifier == report_filter.user_group_identifier + ) + process_instance_query = process_instance_query.join(HumanTaskModel) + process_instance_query = process_instance_query.join( + GroupModel, and_(*group_model_join_conditions) + ) + process_instance_query = process_instance_query.join( + UserGroupAssignmentModel, + UserGroupAssignmentModel.group_id == GroupModel.id, + ) + process_instance_query = process_instance_query.filter( + UserGroupAssignmentModel.user_id == user.id + ) + + instance_metadata_aliases = {} + stock_columns = ProcessInstanceReportService.get_column_names_for_model( + ProcessInstanceModel + ) + for column in process_instance_report.report_metadata["columns"]: + if column["accessor"] in stock_columns: + continue + instance_metadata_alias = aliased(ProcessInstanceMetadataModel) + instance_metadata_aliases[column["accessor"]] = instance_metadata_alias + + filter_for_column = None + if "filter_by" in process_instance_report.report_metadata: + filter_for_column = next( + ( + f + for f in process_instance_report.report_metadata["filter_by"] + if f["field_name"] == column["accessor"] + ), + None, + ) + isouter = True + conditions = [ + ProcessInstanceModel.id == instance_metadata_alias.process_instance_id, + instance_metadata_alias.key == column["accessor"], + ] + if filter_for_column: + isouter = False + conditions.append( + instance_metadata_alias.value == filter_for_column["field_value"] + ) + process_instance_query = process_instance_query.join( + instance_metadata_alias, and_(*conditions), isouter=isouter + ).add_columns( + func.max(instance_metadata_alias.value).label(column["accessor"]) + ) + + order_by_query_array = [] + order_by_array = process_instance_report.report_metadata["order_by"] + if len(order_by_array) < 1: + order_by_array = ProcessInstanceReportModel.default_order_by() + for order_by_option in order_by_array: + attribute = re.sub("^-", "", order_by_option) + if attribute in stock_columns: + if order_by_option.startswith("-"): + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).desc() + ) + else: + order_by_query_array.append( + getattr(ProcessInstanceModel, attribute).asc() + ) + elif attribute in instance_metadata_aliases: + if order_by_option.startswith("-"): + order_by_query_array.append( + func.max(instance_metadata_aliases[attribute].value).desc() + ) + else: + order_by_query_array.append( + func.max(instance_metadata_aliases[attribute].value).asc() + ) + # return process_instance_query + process_instances = ( + process_instance_query.group_by(ProcessInstanceModel.id) + .add_columns(ProcessInstanceModel.id) + .order_by(*order_by_query_array) + .paginate(page=page, per_page=per_page, error_out=False) + ) + results = ProcessInstanceReportService.add_metadata_columns_to_process_instance( + process_instances.items, process_instance_report.report_metadata["columns"] + ) + response_json = { + "report": process_instance_report, + "results": results, + "filters": report_filter.to_dict(), + "pagination": { + "count": len(results), + "total": process_instances.total, + "pages": process_instances.pages, + }, + } + return response_json diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py index 46bd252b9..c6e3db42e 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_service.py @@ -8,7 +8,7 @@ from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db from SpiffWorkflow.task import Task as SpiffTask # type: ignore -from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_instance import ProcessInstanceApi from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -17,6 +17,7 @@ from spiffworkflow_backend.models.task import MultiInstanceType from spiffworkflow_backend.models.task import Task from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.git_service import GitCommandError from spiffworkflow_backend.services.git_service import GitService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, @@ -36,7 +37,10 @@ class ProcessInstanceService: user: UserModel, ) -> ProcessInstanceModel: """Get_process_instance_from_spec.""" - current_git_revision = GitService.get_current_revision() + try: + current_git_revision = GitService.get_current_revision() + except GitCommandError: + current_git_revision = "" process_instance_model = ProcessInstanceModel( status=ProcessInstanceStatus.not_started.value, process_initiator=user, @@ -81,7 +85,8 @@ class ProcessInstanceService: db.session.add(process_instance) db.session.commit() error_message = ( - f"Error running waiting task for process_instance {process_instance.id}" + "Error running waiting task for process_instance" + f" {process_instance.id}" + f"({process_instance.process_model_identifier}). {str(e)}" ) current_app.logger.error(error_message) @@ -121,7 +126,7 @@ class ProcessInstanceService: if next_task_trying_again is not None: process_instance_api.next_task = ( ProcessInstanceService.spiff_task_to_api_task( - next_task_trying_again, add_docs_and_forms=True + processor, next_task_trying_again, add_docs_and_forms=True ) ) @@ -174,7 +179,10 @@ class ProcessInstanceService: else: raise ApiError.from_task( error_code="task_lane_user_error", - message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." + message=( + "Spiff Task %s lane user dict must have a key called" + " 'value' with the user's uid in it." + ) % spiff_task.task_spec.name, task=spiff_task, ) @@ -196,7 +204,7 @@ class ProcessInstanceService: spiff_task: SpiffTask, data: dict[str, Any], user: UserModel, - active_task: ActiveTaskModel, + human_task: HumanTaskModel, ) -> None: """All the things that need to happen when we complete a form. @@ -210,7 +218,7 @@ class ProcessInstanceService: dot_dct = ProcessInstanceService.create_dot_dict(data) spiff_task.update_data(dot_dct) # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. - processor.complete_task(spiff_task, active_task) + processor.complete_task(spiff_task, human_task, user=user) processor.do_engine_steps(save=True) @staticmethod @@ -277,7 +285,9 @@ class ProcessInstanceService: @staticmethod def spiff_task_to_api_task( - spiff_task: SpiffTask, add_docs_and_forms: bool = False + processor: ProcessInstanceProcessor, + spiff_task: SpiffTask, + add_docs_and_forms: bool = False, ) -> Task: """Spiff_task_to_api_task.""" task_type = spiff_task.task_spec.spec_type @@ -302,10 +312,17 @@ class ProcessInstanceService: else: lane = None + if hasattr(spiff_task.task_spec, "spec"): + call_activity_process_identifier = spiff_task.task_spec.spec + else: + call_activity_process_identifier = None + parent_id = None if spiff_task.parent: parent_id = spiff_task.parent.id + serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec) + task = Task( spiff_task.id, spiff_task.task_spec.name, @@ -316,9 +333,11 @@ class ProcessInstanceService: multi_instance_type=mi_type, multi_instance_count=info["mi_count"], multi_instance_index=info["mi_index"], - process_name=spiff_task.task_spec._wf_spec.description, + process_identifier=spiff_task.task_spec._wf_spec.name, properties=props, parent=parent_id, + event_definition=serialized_task_spec.get("event_definition"), + call_activity_process_identifier=call_activity_process_identifier, ) return task diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py index d4fa5647b..f9f346314 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_model_service.py @@ -146,7 +146,10 @@ class ProcessModelService(FileSystemService): if len(instances) > 0: raise ApiError( error_code="existing_instances", - message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", + message=( + f"We cannot delete the model `{process_model_id}`, there are" + " existing instances that depend on it." + ), ) process_model = self.get_process_model(process_model_id) path = self.workflow_path(process_model) @@ -172,7 +175,6 @@ class ProcessModelService(FileSystemService): cls, relative_path: str ) -> ProcessModelInfo: """Get_process_model_from_relative_path.""" - process_group_identifier, _ = os.path.split(relative_path) path = os.path.join(FileSystemService.root_path(), relative_path) return cls.__scan_process_model(path) @@ -224,11 +226,11 @@ class ProcessModelService(FileSystemService): user = UserService.current_user() new_process_model_list = [] for process_model in process_models: - uri = f"/v1.0/process-models/{process_model.id.replace('/', ':')}/process-instances" - result = AuthorizationService.user_has_permission( + uri = f"/v1.0/process-instances/{process_model.id.replace('/', ':')}" + has_permission = AuthorizationService.user_has_permission( user=user, permission="create", target_uri=uri ) - if result: + if has_permission: new_process_model_list.append(process_model) return new_process_model_list @@ -340,8 +342,11 @@ class ProcessModelService(FileSystemService): if len(problem_models) > 0: raise ApiError( error_code="existing_instances", - message=f"We cannot delete the group `{process_group_id}`, " - f"there are models with existing instances inside the group. {problem_models}", + message=( + f"We cannot delete the group `{process_group_id}`, there are" + " models with existing instances inside the group." + f" {problem_models}" + ), ) shutil.rmtree(path) self.cleanup_process_group_display_order() @@ -393,7 +398,10 @@ class ProcessModelService(FileSystemService): if process_group is None: raise ApiError( error_code="process_group_could_not_be_loaded_from_disk", - message=f"We could not load the process_group from disk from: {dir_path}", + message=( + "We could not load the process_group from disk from:" + f" {dir_path}" + ), ) else: process_group_id = dir_path.replace(FileSystemService.root_path(), "") @@ -430,6 +438,9 @@ class ProcessModelService(FileSystemService): # process_group.process_groups.sort() return process_group + # path might have backslashes on windows, not sure + # not sure if os.path.join converts forward slashes in the relative_path argument to backslashes: + # path = os.path.join(FileSystemService.root_path(), relative_path) @classmethod def __scan_process_model( cls, @@ -446,12 +457,19 @@ class ProcessModelService(FileSystemService): data.pop("process_group_id") # we don't save `id` in the json file, so we add it back in here. relative_path = os.path.relpath(path, FileSystemService.root_path()) + + # even on windows, use forward slashes for ids + relative_path = relative_path.replace("\\", "/") + data["id"] = relative_path process_model_info = ProcessModelInfo(**data) if process_model_info is None: raise ApiError( error_code="process_model_could_not_be_loaded_from_disk", - message=f"We could not load the process_model from disk with data: {data}", + message=( + "We could not load the process_model from disk with data:" + f" {data}" + ), ) else: if name is None: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py index 9112e20f0..ed331672c 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/script_unit_test_runner.py @@ -112,7 +112,10 @@ class ScriptUnitTestRunner: except json.decoder.JSONDecodeError as ex: return ScriptUnitTestResult( result=False, - error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}", + error=( + "Failed to parse expectedOutputJson:" + f" {unit_test['expectedOutputJson']}: {str(ex)}" + ), ) script = task.task_spec.script diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py index e4dee4913..aa9e6d147 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/secret_service.py @@ -44,8 +44,10 @@ class SecretService: except Exception as e: raise ApiError( error_code="create_secret_error", - message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. " - f"Original error is {e}", + message=( + f"There was an error creating a secret with key: {key} and value" + f" ending with: {value[:-4]}. Original error is {e}" + ), ) from e return secret_model @@ -89,7 +91,9 @@ class SecretService: else: raise ApiError( error_code="update_secret_error", - message=f"Cannot update secret with key: {key}. Resource does not exist.", + message=( + f"Cannot update secret with key: {key}. Resource does not exist." + ), status_code=404, ) @@ -104,11 +108,16 @@ class SecretService: except Exception as e: raise ApiError( error_code="delete_secret_error", - message=f"Could not delete secret with key: {key}. Original error is: {e}", + message=( + f"Could not delete secret with key: {key}. Original error" + f" is: {e}" + ), ) from e else: raise ApiError( error_code="delete_secret_error", - message=f"Cannot delete secret with key: {key}. Resource does not exist.", + message=( + f"Cannot delete secret with key: {key}. Resource does not exist." + ), status_code=404, ) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py index 15e25a759..6fec8b796 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/service_task_service.py @@ -31,7 +31,6 @@ class ServiceTaskDelegate: if value.startswith(secret_prefix): key = value.removeprefix(secret_prefix) secret = SecretService().get_secret(key) - assert secret # noqa: S101 return secret.value file_prefix = "file:" diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py index c69f41c30..4fdfbd6d1 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/spec_file_service.py @@ -171,13 +171,18 @@ class SpecFileService(FileSystemService): ref.is_primary = True if ref.is_primary: - ProcessModelService.update_process_model( - process_model_info, - { - "primary_process_id": ref.identifier, - "primary_file_name": file_name, - }, - ) + update_hash = {} + if not process_model_info.primary_file_name: + update_hash["primary_process_id"] = ref.identifier + update_hash["primary_file_name"] = file_name + elif file_name == process_model_info.primary_file_name: + update_hash["primary_process_id"] = ref.identifier + + if len(update_hash) > 0: + ProcessModelService.update_process_model( + process_model_info, + update_hash, + ) SpecFileService.update_caches(ref) return file @@ -187,7 +192,8 @@ class SpecFileService(FileSystemService): full_file_path = SpecFileService.full_file_path(process_model_info, file_name) if not os.path.exists(full_file_path): raise ProcessModelFileNotFoundError( - f"No file found with name {file_name} in {process_model_info.display_name}" + f"No file found with name {file_name} in" + f" {process_model_info.display_name}" ) with open(full_file_path, "rb") as f_handle: spec_file_data = f_handle.read() @@ -309,8 +315,9 @@ class SpecFileService(FileSystemService): ).first() if message_model is None: raise ValidationException( - f"Could not find message model with identifier '{message_model_identifier}'" - f"Required by a Start Event in : {ref.file_name}" + "Could not find message model with identifier" + f" '{message_model_identifier}'Required by a Start Event in :" + f" {ref.file_name}" ) message_triggerable_process_model = ( MessageTriggerableProcessModel.query.filter_by( @@ -330,7 +337,8 @@ class SpecFileService(FileSystemService): != ref.process_model_id ): raise ValidationException( - f"Message model is already used to start process model {ref.process_model_id}" + "Message model is already used to start process model" + f" {ref.process_model_id}" ) @staticmethod @@ -348,8 +356,9 @@ class SpecFileService(FileSystemService): ).first() if message_model is None: raise ValidationException( - f"Could not find message model with identifier '{message_model_identifier}'" - f"specified by correlation property: {cpre}" + "Could not find message model with identifier" + f" '{message_model_identifier}'specified by correlation" + f" property: {cpre}" ) # fixme: I think we are currently ignoring the correction properties. message_correlation_property = ( diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py index 0e8e65c2c..20412e549 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/user_service.py @@ -7,12 +7,15 @@ from flask import g from flask_bpmn.api.api_error import ApiError from flask_bpmn.models.db import db -from spiffworkflow_backend.models.active_task import ActiveTaskModel -from spiffworkflow_backend.models.active_task_user import ActiveTaskUserModel from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel +from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel from spiffworkflow_backend.models.principal import PrincipalModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel +from spiffworkflow_backend.models.user_group_assignment_waiting import ( + UserGroupAssignmentWaitingModel, +) class UserService: @@ -21,11 +24,11 @@ class UserService: @classmethod def create_user( cls, + username: str, service: str, service_id: str, - name: Optional[str] = "", - username: Optional[str] = "", email: Optional[str] = "", + display_name: Optional[str] = "", ) -> UserModel: """Create_user.""" user_model: Optional[UserModel] = ( @@ -41,8 +44,8 @@ class UserService: username=username, service=service, service_id=service_id, - name=name, email=email, + display_name=display_name, ) db.session.add(user_model) @@ -55,6 +58,7 @@ class UserService: message=f"Could not add user {username}", ) from e cls.create_principal(user_model.id) + UserService().apply_waiting_group_assignments(user_model) return user_model else: @@ -69,45 +73,12 @@ class UserService: ) ) - @classmethod - def find_or_create_user( - cls, - service: str, - service_id: str, - name: Optional[str] = None, - username: Optional[str] = None, - email: Optional[str] = None, - ) -> UserModel: - """Find_or_create_user.""" - user_model: UserModel - try: - user_model = cls.create_user( - service=service, - service_id=service_id, - name=name, - username=username, - email=email, - ) - except ApiError: - user_model = ( - UserModel.query.filter(UserModel.service == service) - .filter(UserModel.service_id == service_id) - .first() - ) - return user_model - # Returns true if the current user is logged in. @staticmethod def has_user() -> bool: """Has_user.""" return "token" in g and bool(g.token) and "user" in g and bool(g.user) - # Returns true if the given user uid is different from the current user's uid. - @staticmethod - def is_different_user(uid: str) -> bool: - """Is_different_user.""" - return UserService.has_user() and uid is not None and uid is not g.user.uid - @staticmethod def current_user() -> Any: """Current_user.""" @@ -117,20 +88,6 @@ class UserService: ) return g.user - @staticmethod - def in_list(uids: list[str]) -> bool: - """Returns true if the current user's id is in the given list of ids. - - False if there is no user, or the user is not in the list. - """ - if ( - UserService.has_user() - ): # If someone is logged in, lock tasks that don't belong to them. - user = UserService.current_user() - if user.uid in uids: - return True - return False - @staticmethod def get_principal_by_user_id(user_id: int) -> PrincipalModel: """Get_principal_by_user_id.""" @@ -173,8 +130,57 @@ class UserService: @classmethod def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None: """Add_user_to_group.""" - ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) - db.session.add(ugam) + exists = ( + UserGroupAssignmentModel() + .query.filter_by(user_id=user.id) + .filter_by(group_id=group.id) + .count() + ) + if not exists: + ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) + db.session.add(ugam) + db.session.commit() + + @classmethod + def add_waiting_group_assignment(cls, username: str, group: GroupModel) -> None: + """Add_waiting_group_assignment.""" + wugam = ( + UserGroupAssignmentWaitingModel() + .query.filter_by(username=username) + .filter_by(group_id=group.id) + .first() + ) + if not wugam: + wugam = UserGroupAssignmentWaitingModel( + username=username, group_id=group.id + ) + db.session.add(wugam) + db.session.commit() + if wugam.is_match_all(): + for user in UserModel.query.all(): + cls.add_user_to_group(user, group) + + @classmethod + def apply_waiting_group_assignments(cls, user: UserModel) -> None: + """Apply_waiting_group_assignments.""" + waiting = ( + UserGroupAssignmentWaitingModel() + .query.filter(UserGroupAssignmentWaitingModel.username == user.username) + .all() + ) + for assignment in waiting: + cls.add_user_to_group(user, assignment.group) + db.session.delete(assignment) + wildcard = ( + UserGroupAssignmentWaitingModel() + .query.filter( + UserGroupAssignmentWaitingModel.username + == UserGroupAssignmentWaitingModel.MATCH_ALL_USERS + ) + .all() + ) + for assignment in wildcard: + cls.add_user_to_group(user, assignment.group) db.session.commit() @staticmethod @@ -192,15 +198,15 @@ class UserService: return None @classmethod - def add_user_to_active_tasks_if_appropriate(cls, user: UserModel) -> None: - """Add_user_to_active_tasks_if_appropriate.""" + def add_user_to_human_tasks_if_appropriate(cls, user: UserModel) -> None: + """Add_user_to_human_tasks_if_appropriate.""" group_ids = [g.id for g in user.groups] - active_tasks = ActiveTaskModel.query.filter( - ActiveTaskModel.lane_assignment_id.in_(group_ids) # type: ignore + human_tasks = HumanTaskModel.query.filter( + HumanTaskModel.lane_assignment_id.in_(group_ids) # type: ignore ).all() - for active_task in active_tasks: - active_task_user = ActiveTaskUserModel( - user_id=user.id, active_task_id=active_task.id + for human_task in human_tasks: + human_task_user = HumanTaskUserModel( + user_id=user.id, human_task_id=human_task.id ) - db.session.add(active_task_user) + db.session.add(human_task_user) db.session.commit() diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn b/spiffworkflow-backend/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn new file mode 100644 index 000000000..2e33d429b --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/templates/basic_with_user_task_template.bpmn @@ -0,0 +1,45 @@ + + + + + Flow_0gixxkm + + + + + + + + + + Flow_0gixxkm + Flow_1oi9nsn + + + Flow_1oi9nsn + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json new file mode 100644 index 000000000..ae61e4963 --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-schema.json @@ -0,0 +1,6 @@ +{ + "title": "{FORM_IDENTIFIER}", + "description": "", + "properties": {}, + "required": [] +} diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json new file mode 100644 index 000000000..654ce121f --- /dev/null +++ b/spiffworkflow-backend/src/spiffworkflow_backend/templates/form-identifier-id-template-uischema.json @@ -0,0 +1,3 @@ +{ + "ui:order": [] +} diff --git a/spiffworkflow-backend/tests/data/data_object_test/data_object.bpmn b/spiffworkflow-backend/tests/data/data_object_test/data_object.bpmn new file mode 100644 index 000000000..c112339e2 --- /dev/null +++ b/spiffworkflow-backend/tests/data/data_object_test/data_object.bpmn @@ -0,0 +1,75 @@ + + + + + Flow_0hnphp9 + + + + Flow_0hnphp9 + Flow_0amajxh + + DataObjectReference_10g8dit + + the_data_object_var = 'hey' + + + + Flow_1ifqo6o + + + + Flow_0amajxh + Flow_1ifqo6o + + + DataObjectReference_10g8dit + Property_0a8w16m + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/error/script_error_with_task_data.bpmn b/spiffworkflow-backend/tests/data/error/script_error_with_task_data.bpmn new file mode 100644 index 000000000..cd5f58aa4 --- /dev/null +++ b/spiffworkflow-backend/tests/data/error/script_error_with_task_data.bpmn @@ -0,0 +1,86 @@ + + + + + Flow_10jwwqy + + + + Flow_1axnzv6 + + + + + + { + "current_user": { + "id": "2", + "username": "ciadmin1" + }, + "num": 0 +} + { + "Mike": "Awesome", + "i": 2, + "current_user": { + "id": "2", + "username": "ciadmin1" + }, + "num": 0, + "my_var": "whatwhat", + "person": "Kevin" +} + + + {} + {} + + + {"current_user": {"id": "1", "username": "kb"}} + {"Mike": "Awesome", "current_user": {"id": "1", "username": "kb"}, "heyhey": "https://demo.spiffworkflow.org", "i": 2, "members": [], "my_var": "whatwhat", "person": "Kevin"} + + + + Flow_10jwwqy + Flow_1utkzvj + my_var = 'THE VAR' + + + + + Flow_1utkzvj + Flow_1axnzv6 + hey + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn new file mode 100644 index 000000000..9f2f26bf4 --- /dev/null +++ b/spiffworkflow-backend/tests/data/process_navigation/process_navigation.bpmn @@ -0,0 +1,137 @@ + + + + + + + + Flow_1l15rbh + + + + Flow_1l15rbh + Flow_0d35i06 + Flow_0tzaigt + Flow_1vld4r2 + + + + Flow_0d35i06 + Flow_1w3n49n + + + + Flow_0tzaigt + Flow_1q47ol8 + + + + + + + Flow_1q47ol8 + + + + + Flow_1w3n49n + + + + Flow_1vld4r2 + Flow_13ai5vv + + timedelta(hours=1) + + + + + Click the button. + + Flow_13ai5vv + Flow_1vwnf3n + + + Flow_1vwnf3n + + + + + result + + + + + result + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/data/script_refresh_permissions/refresh_permisions.bpmn b/spiffworkflow-backend/tests/data/script_refresh_permissions/refresh_permisions.bpmn new file mode 100644 index 000000000..630cd1221 --- /dev/null +++ b/spiffworkflow-backend/tests/data/script_refresh_permissions/refresh_permisions.bpmn @@ -0,0 +1,39 @@ + + + + + Flow_01cweoc + + + + Flow_1xle2yo + + + + Flow_01cweoc + Flow_1xle2yo + refresh_permissions([]) + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py index 48982fc60..47cf2d876 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/helpers/base_test.py @@ -41,7 +41,7 @@ class BaseTest: if isinstance(user, UserModel): return user - user = UserService.create_user("internal", username, username=username) + user = UserService.create_user(username, "internal", username) if isinstance(user, UserModel): return user @@ -133,7 +133,6 @@ class BaseTest: ) -> TestResponse: """Create_process_model.""" if process_model_id is not None: - # make sure we have a group process_group_id, _ = os.path.split(process_model_id) modified_process_group_id = process_group_id.replace("/", ":") @@ -141,7 +140,6 @@ class BaseTest: os.path.join(FileSystemService.root_path(), process_group_id) ) if ProcessModelService.is_group(process_group_path): - if exception_notification_addresses is None: exception_notification_addresses = [] @@ -171,7 +169,8 @@ class BaseTest: raise Exception("You must create the group first") else: raise Exception( - "You must include the process_model_id, which must be a path to the model" + "You must include the process_model_id, which must be a path to the" + " model" ) def get_test_data_file_contents( @@ -243,7 +242,7 @@ class BaseTest: return file @staticmethod - def create_process_instance_from_process_model_id( + def create_process_instance_from_process_model_id_with_api( client: FlaskClient, test_process_model_id: str, headers: Dict[str, str], @@ -324,13 +323,9 @@ class BaseTest: permission_names: Optional[list[str]] = None, ) -> UserModel: """Add_permissions_to_user.""" - permission_target = PermissionTargetModel.query.filter_by( - uri=target_uri - ).first() - if permission_target is None: - permission_target = PermissionTargetModel(uri=target_uri) - db.session.add(permission_target) - db.session.commit() + permission_target = AuthorizationService.find_or_create_permission_target( + target_uri + ) if permission_names is None: permission_names = [member.name for member in Permission] diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py index f9dd44522..d27bbdc7c 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -45,7 +45,7 @@ class TestLoggingService(BaseTest): user=with_super_admin_user, ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py index 3983f9be8..90b5af88d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_nested_groups.py @@ -38,7 +38,7 @@ class TestNestedGroups(BaseTest): bpmn_file_name=bpmn_file_name, bpmn_file_location=bpmn_file_location, ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), @@ -99,7 +99,7 @@ class TestNestedGroups(BaseTest): bpmn_file_name=bpmn_file_name, bpmn_file_location=bpmn_file_location, ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py index 20a0bb67b..ce1655cb9 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_openid_blueprint.py @@ -1,4 +1,7 @@ """Test_authentication.""" +import base64 + +import jwt from flask import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest @@ -44,13 +47,16 @@ class TestFlaskOpenId(BaseTest): client: FlaskClient, with_db_and_bpmn_file_cleanup: None, ) -> None: + """Test_get_token.""" + code = "testadmin1:1234123412341234" + """It should be possible to get a token.""" - code = ( - "c3BpZmZ3b3JrZmxvdy1iYWNrZW5kOkpYZVFFeG0wSmhRUEx1bWdIdElJcWY1MmJEYWxIejBx" - ) + backend_basic_auth_string = code + backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") + backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) headers = { "Content-Type": "application/x-www-form-urlencoded", - "Authorization": f"Basic {code}", + "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}", } data = { "grant_type": "authorization_code", @@ -59,3 +65,13 @@ class TestFlaskOpenId(BaseTest): } response = client.post("/openid/token", data=data, headers=headers) assert response + assert response.is_json + assert "access_token" in response.json + assert "id_token" in response.json + assert "refresh_token" in response.json + + decoded_token = jwt.decode( + response.json["id_token"], options={"verify_signature": False} + ) + assert "iss" in decoded_token + assert "email" in decoded_token diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py index e7edae605..ef34fe060 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/integration/test_process_api.py @@ -4,6 +4,7 @@ import json import os import time from typing import Any +from typing import Dict import pytest from flask.app import Flask @@ -15,8 +16,8 @@ from tests.spiffworkflow_backend.helpers.test_data import load_test_spec from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( ProcessEntityNotFoundError, ) -from spiffworkflow_backend.models.active_task import ActiveTaskModel from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_group import ProcessGroup from spiffworkflow_backend.models.process_instance import ProcessInstanceModel from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus @@ -162,6 +163,83 @@ class TestProcessApi(BaseTest): assert process_model.primary_file_name == bpmn_file_name assert process_model.primary_process_id == "sample" + def test_process_model_create_with_natural_language( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_model_create_with_natural_language.""" + process_group_id = "test_process_group" + process_group_description = "Test Process Group" + process_model_id = "sample" + process_model_identifier = f"{process_group_id}/{process_model_id}" + self.create_process_group( + client, with_super_admin_user, process_group_id, process_group_description + ) + + text = "Create a Bug Tracker process model " + text += ( + "with a Bug Details form that collects summary, description, and priority" + ) + body = {"natural_language_text": text} + self.create_process_model_with_api( + client, + process_model_id=process_model_identifier, + user=with_super_admin_user, + ) + response = client.post( + f"/v1.0/process-models-natural-language/{process_group_id}", + content_type="application/json", + data=json.dumps(body), + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 201 + assert response.json is not None + assert response.json["id"] == f"{process_group_id}/bug-tracker" + assert response.json["display_name"] == "Bug Tracker" + assert response.json["metadata_extraction_paths"] == [ + {"key": "summary", "path": "summary"}, + {"key": "description", "path": "description"}, + {"key": "priority", "path": "priority"}, + ] + + process_model = ProcessModelService.get_process_model(response.json["id"]) + process_model_path = os.path.join( + FileSystemService.root_path(), + FileSystemService.id_string_to_relative_path(process_model.id), + ) + + process_model_diagram = os.path.join(process_model_path, "bug-tracker.bpmn") + assert os.path.exists(process_model_diagram) + form_schema_json = os.path.join(process_model_path, "bug-details-schema.json") + assert os.path.exists(form_schema_json) + form_uischema_json = os.path.join( + process_model_path, "bug-details-uischema.json" + ) + assert os.path.exists(form_uischema_json) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier="bug-tracker" + ).first() + assert process_instance_report is not None + report_column_accessors = [ + i["accessor"] for i in process_instance_report.report_metadata["columns"] + ] + expected_column_accessors = [ + "id", + "process_model_display_name", + "start_in_seconds", + "end_in_seconds", + "username", + "status", + "summary", + "description", + "priority", + ] + assert report_column_accessors == expected_column_accessors + def test_primary_process_id_updates_via_xml( self, app: Flask, @@ -249,10 +327,6 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.json["ok"] is True - # assert we no longer have a model - with pytest.raises(ProcessEntityNotFoundError): - ProcessModelService.get_process_model(process_model_identifier) - def test_process_model_delete_with_instances( self, app: Flask, @@ -284,7 +358,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) # create an instance from a model - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) @@ -304,7 +378,8 @@ class TestProcessApi(BaseTest): assert data["error_code"] == "existing_instances" assert ( data["message"] - == f"We cannot delete the model `{process_model_identifier}`, there are existing instances that depend on it." + == f"We cannot delete the model `{process_model_identifier}`, there are" + " existing instances that depend on it." ) def test_process_model_update( @@ -1072,7 +1147,7 @@ class TestProcessApi(BaseTest): """Test_process_instance_create.""" test_process_model_id = "runs_without_input/sample" headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, test_process_model_id, headers ) assert response.json is not None @@ -1102,7 +1177,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -1144,7 +1219,7 @@ class TestProcessApi(BaseTest): self.modify_process_identifier_for_path_param(process_model_identifier) ) headers = self.logged_in_headers(with_super_admin_user) - create_response = self.create_process_instance_from_process_model_id( + create_response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert create_response.json is not None @@ -1167,6 +1242,60 @@ class TestProcessApi(BaseTest): xml_file_contents = f_open.read() assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents + def test_process_instance_show_with_specified_process_identifier( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_instance_show_with_specified_process_identifier.""" + process_model_id = "call_activity_nested" + process_model_identifier = self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id="test_group_two", + process_model_id=process_model_id, + bpmn_file_location="call_activity_nested", + ) + spec_reference = SpecReferenceCache.query.filter_by( + identifier="Level2b" + ).first() + assert spec_reference + modified_process_model_identifier = ( + self.modify_process_identifier_for_path_param(process_model_identifier) + ) + headers = self.logged_in_headers(with_super_admin_user) + create_response = self.create_process_instance_from_process_model_id_with_api( + client, process_model_identifier, headers + ) + assert create_response.json is not None + assert create_response.status_code == 201 + process_instance_id = create_response.json["id"] + client.post( + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + show_response = client.get( + f"/v1.0/process-instances/{modified_process_model_identifier}/{process_instance_id}?process_identifier={spec_reference.identifier}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert show_response.json is not None + assert show_response.status_code == 200 + file_system_root = FileSystemService.root_path() + process_instance_file_path = ( + f"{file_system_root}/{process_model_identifier}/{process_model_id}.bpmn" + ) + with open(process_instance_file_path) as f_open: + xml_file_contents = f_open.read() + assert show_response.json["bpmn_xml_file_contents"] != xml_file_contents + spec_reference_file_path = os.path.join( + file_system_root, spec_reference.relative_path + ) + with open(spec_reference_file_path) as f_open: + xml_file_contents = f_open.read() + assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents + def test_message_start_when_starting_process_instance( self, app: Flask, @@ -1245,7 +1374,7 @@ class TestProcessApi(BaseTest): "andThis": "another_item_non_key", } } - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), @@ -1305,7 +1434,7 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, self.logged_in_headers(with_super_admin_user), @@ -1321,7 +1450,7 @@ class TestProcessApi(BaseTest): assert response.json is not None response = client.post( - f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/terminate", + f"/v1.0/process-instance-terminate/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.status_code == 200 @@ -1342,20 +1471,18 @@ class TestProcessApi(BaseTest): ) -> None: """Test_process_instance_delete.""" process_group_id = "my_process_group" - process_model_id = "user_task" - bpmn_file_name = "user_task.bpmn" - bpmn_file_location = "user_task" + process_model_id = "sample" + bpmn_file_location = "sample" process_model_identifier = self.create_group_and_model_with_bpmn( client, with_super_admin_user, process_group_id=process_group_id, process_model_id=process_model_id, - bpmn_file_name=bpmn_file_name, bpmn_file_location=bpmn_file_location, ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -1366,11 +1493,13 @@ class TestProcessApi(BaseTest): headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None + assert response.status_code == 200 delete_response = client.delete( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) + assert delete_response.json["ok"] is True assert delete_response.status_code == 200 def test_task_show( @@ -1394,7 +1523,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -1408,15 +1537,15 @@ class TestProcessApi(BaseTest): assert response.json is not None assert response.json["next_task"] is not None - active_tasks = ( - db.session.query(ActiveTaskModel) - .filter(ActiveTaskModel.process_instance_id == process_instance_id) + human_tasks = ( + db.session.query(HumanTaskModel) + .filter(HumanTaskModel.process_instance_id == process_instance_id) .all() ) - assert len(active_tasks) == 1 - active_task = active_tasks[0] + assert len(human_tasks) == 1 + human_task = human_tasks[0] response = client.get( - f"/v1.0/tasks/{process_instance_id}/{active_task.task_id}", + f"/v1.0/tasks/{process_instance_id}/{human_task.task_id}", headers=self.logged_in_headers(with_super_admin_user), ) assert response.json is not None @@ -1445,7 +1574,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) @@ -1492,19 +1621,19 @@ class TestProcessApi(BaseTest): bpmn_file_location=bpmn_file_location, ) headers = self.logged_in_headers(with_super_admin_user) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) - self.create_process_instance_from_process_model_id( + self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) @@ -1818,7 +1947,7 @@ class TestProcessApi(BaseTest): ) -> Any: """Setup_testing_instance.""" headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_id, headers ) process_instance = response.json @@ -1965,7 +2094,6 @@ class TestProcessApi(BaseTest): mail = app.config["MAIL_APP"] with mail.record_messages() as outbox: - response = client.post( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), @@ -1987,6 +2115,36 @@ class TestProcessApi(BaseTest): assert process is not None assert process.status == "error" + def test_task_data_is_set_even_if_process_instance_errors( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_task_data_is_set_even_if_process_instance_errors.""" + process_model = load_test_spec( + process_model_id="group/error_with_task_data", + bpmn_file_name="script_error_with_task_data.bpmn", + process_model_source_directory="error", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=with_super_admin_user + ) + + response = client.post( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance.id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 400 + assert process_instance.status == "error" + processor = ProcessInstanceProcessor(process_instance) + spiff_task = processor.get_task_by_bpmn_identifier( + "script_task_one", processor.bpmn_process_instance + ) + assert spiff_task is not None + assert spiff_task.data != {} + def test_process_model_file_create( self, app: Flask, @@ -2141,7 +2299,7 @@ class TestProcessApi(BaseTest): # process_group_id="finance", # ) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, # process_model.process_group_id, process_model_identifier, @@ -2350,7 +2508,7 @@ class TestProcessApi(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) assert response.json is not None @@ -2367,7 +2525,7 @@ class TestProcessApi(BaseTest): assert process_instance.status == "user_input_required" client.post( - f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/suspend", + f"/v1.0/process-instance-suspend/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", headers=self.logged_in_headers(with_super_admin_user), ) process_instance = ProcessInstanceService().get_process_instance( @@ -2375,15 +2533,25 @@ class TestProcessApi(BaseTest): ) assert process_instance.status == "suspended" - # TODO: Why can I run a suspended process instance? response = client.post( f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", headers=self.logged_in_headers(with_super_admin_user), ) + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + assert process_instance.status == "suspended" + assert response.status_code == 400 - # task = response.json['next_task'] - - print("test_process_instance_suspend") + response = client.post( + f"/v1.0/process-instance-resume/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + assert process_instance.status == "waiting" def test_script_unit_test_run( self, @@ -2443,6 +2611,148 @@ class TestProcessApi(BaseTest): print("test_script_unit_test_run") + def test_send_event( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_script_unit_test_run.""" + process_group_id = "test_group" + process_model_id = "process_navigation" + bpmn_file_name = "process_navigation.bpmn" + bpmn_file_location = "process_navigation" + process_model_identifier = self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id=process_group_id, + process_model_id=process_model_id, + bpmn_file_name=bpmn_file_name, + bpmn_file_location=bpmn_file_location, + ) + + bpmn_file_data_bytes = self.get_test_data_file_contents( + bpmn_file_name, bpmn_file_location + ) + self.create_spec_file( + client=client, + process_model_id=process_model_identifier, + process_model_location=process_model_identifier, + file_name=bpmn_file_name, + file_data=bpmn_file_data_bytes, + user=with_super_admin_user, + ) + + headers = self.logged_in_headers(with_super_admin_user) + response = self.create_process_instance_from_process_model_id_with_api( + client, process_model_identifier, headers + ) + process_instance_id = response.json["id"] + + client.post( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + + # This is exactly the same the test above, but some reason I to a totally irrelevant type. + data: Dict = { + "correlation_properties": [], + "expression": None, + "external": True, + "internal": False, + "payload": {"message": "message 1"}, + "name": "Message 1", + "typename": "MessageEventDefinition", + } + response = client.post( + f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + content_type="application/json", + data=json.dumps(data), + ) + assert response.json["status"] == "complete" + + response = client.get( + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}?all_tasks=true", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert response.status_code == 200 + end = next(task for task in response.json if task["name"] == "End") + assert end["data"]["result"] == {"message": "message 1"} + + def test_manual_complete_task( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_script_unit_test_run.""" + process_group_id = "test_group" + process_model_id = "process_navigation" + bpmn_file_name = "process_navigation.bpmn" + bpmn_file_location = "process_navigation" + process_model_identifier = self.create_group_and_model_with_bpmn( + client=client, + user=with_super_admin_user, + process_group_id=process_group_id, + process_model_id=process_model_id, + bpmn_file_name=bpmn_file_name, + bpmn_file_location=bpmn_file_location, + ) + + bpmn_file_data_bytes = self.get_test_data_file_contents( + bpmn_file_name, bpmn_file_location + ) + self.create_spec_file( + client=client, + process_model_id=process_model_identifier, + process_model_location=process_model_identifier, + file_name=bpmn_file_name, + file_data=bpmn_file_data_bytes, + user=with_super_admin_user, + ) + + headers = self.logged_in_headers(with_super_admin_user) + response = self.create_process_instance_from_process_model_id_with_api( + client, process_model_identifier, headers + ) + process_instance_id = response.json["id"] + + client.post( + f"/v1.0/process-instances/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/run", + headers=self.logged_in_headers(with_super_admin_user), + ) + + data = { + "dateTime": "timedelta(hours=1)", + "external": True, + "internal": True, + "label": "Event_0e4owa3", + "typename": "TimerEventDefinition", + } + response = client.post( + f"/v1.0/send-event/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + content_type="application/json", + data=json.dumps(data), + ) + + response = client.get( + f"/v1.0/task-data/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}", + headers=self.logged_in_headers(with_super_admin_user), + ) + assert len(response.json) == 1 + task = response.json[0] + + response = client.post( + f"/v1.0/task-complete/{self.modify_process_identifier_for_path_param(process_model_identifier)}/{process_instance_id}/{task['id']}", + headers=self.logged_in_headers(with_super_admin_user), + content_type="application/json", + ) + assert response.json["status"] == "suspended" + def setup_initial_groups_for_move_tests( self, client: FlaskClient, with_super_admin_user: UserModel ) -> None: @@ -2496,7 +2806,7 @@ class TestProcessApi(BaseTest): f"/v1.0/process-models/{modified_original_process_model_id}/move?new_location={new_location}", headers=self.logged_in_headers(with_super_admin_user), ) - assert response.status_code == 201 + assert response.status_code == 200 assert response.json["id"] == new_process_model_path # make sure the original model does not exist @@ -2541,7 +2851,7 @@ class TestProcessApi(BaseTest): f"/v1.0/process-groups/{modified_original_process_group_id}/move?new_location={new_location}", headers=self.logged_in_headers(with_super_admin_user), ) - assert response.status_code == 201 + assert response.status_code == 200 assert response.json["id"] == new_sub_path # make sure the original subgroup does not exist @@ -2555,122 +2865,127 @@ class TestProcessApi(BaseTest): new_process_group = ProcessModelService.get_process_group(new_sub_path) assert new_process_group.id == new_sub_path - def test_process_model_publish( - self, - app: Flask, - client: FlaskClient, - with_db_and_bpmn_file_cleanup: None, - with_super_admin_user: UserModel, - ) -> None: - """Test_process_model_publish.""" - bpmn_root = FileSystemService.root_path() - shell_command = f"git init {bpmn_root}" - output = os.popen(shell_command).read() # noqa: S605 - assert output == f"Initialized empty Git repository in {bpmn_root}/.git/\n" - os.chdir(bpmn_root) - output = os.popen("git status").read() # noqa: S605 - assert "On branch main" in output - assert "No commits yet" in output - assert ( - 'nothing to commit (create/copy files and use "git add" to track)' in output - ) - - process_group_id = "test_group" - self.create_process_group( - client, with_super_admin_user, process_group_id, process_group_id - ) - - sub_process_group_id = "test_group/test_sub_group" - process_model_id = "hello_world" - bpmn_file_name = "hello_world.bpmn" - bpmn_file_location = "hello_world" - process_model_identifier = self.create_group_and_model_with_bpmn( - client=client, - user=with_super_admin_user, - process_group_id=sub_process_group_id, - process_model_id=process_model_id, - bpmn_file_name=bpmn_file_name, - bpmn_file_location=bpmn_file_location, - ) - process_model_absolute_dir = os.path.join(bpmn_root, process_model_identifier) - - output = os.popen("git status").read() # noqa: S605 - test_string = 'Untracked files:\n (use "git add ..." to include in what will be committed)\n\ttest_group' - assert test_string in output - - os.system("git add .") - output = os.popen("git commit -m 'Initial Commit'").read() - assert "Initial Commit" in output - assert "4 files changed" in output - assert "test_group/process_group.json" in output - assert "test_group/test_sub_group/hello_world/hello_world.bpmn" in output - assert "test_group/test_sub_group/hello_world/process_model.json" in output - assert "test_group/test_sub_group/process_group.json" in output - - output = os.popen("git status").read() # noqa: S605 - assert "On branch main" in output - assert "nothing to commit" in output - assert "working tree clean" in output - - output = os.popen("git branch --list").read() # noqa: S605 - assert output == "* main\n" - os.system("git branch staging") - output = os.popen("git branch --list").read() # noqa: S605 - assert output == "* main\n staging\n" - - os.system("git checkout staging") - - output = os.popen("git status").read() # noqa: S605 - assert "On branch staging" in output - assert "nothing to commit" in output - assert "working tree clean" in output - - # process_model = ProcessModelService.get_process_model(process_model_identifier) - - listing = os.listdir(process_model_absolute_dir) - assert len(listing) == 2 - assert "hello_world.bpmn" in listing - assert "process_model.json" in listing - - os.system("git checkout main") - - output = os.popen("git status").read() # noqa: S605 - assert "On branch main" in output - assert "nothing to commit" in output - assert "working tree clean" in output - - file_data = b"abc123" - new_file_path = os.path.join(process_model_absolute_dir, "new_file.txt") - with open(new_file_path, "wb") as f_open: - f_open.write(file_data) - - output = os.popen("git status").read() # noqa: S605 - assert "On branch main" in output - assert "Untracked files:" in output - assert "test_group/test_sub_group/hello_world/new_file.txt" in output - - os.system( - "git add test_group/test_sub_group/hello_world/new_file.txt" - ) # noqa: S605 - output = os.popen("git commit -m 'add new_file.txt'").read() # noqa: S605 - - assert "add new_file.txt" in output - assert "1 file changed, 1 insertion(+)" in output - assert "test_group/test_sub_group/hello_world/new_file.txt" in output - - listing = os.listdir(process_model_absolute_dir) - assert len(listing) == 3 - assert "hello_world.bpmn" in listing - assert "process_model.json" in listing - assert "new_file.txt" in listing - - # modified_process_model_id = process_model_identifier.replace("/", ":") - # response = client.post( - # f"/v1.0/process-models/{modified_process_model_id}/publish?branch_to_update=staging", - # headers=self.logged_in_headers(with_super_admin_user), - # ) - - print("test_process_model_publish") + # this doesn't work in CI + # assert "Initial Commit" in output + # def test_process_model_publish( + # self, + # app: Flask, + # client: FlaskClient, + # with_db_and_bpmn_file_cleanup: None, + # with_super_admin_user: UserModel, + # ) -> None: + # """Test_process_model_publish.""" + # bpmn_root = FileSystemService.root_path() + # shell_command = ["git", "init", "--initial-branch=main", bpmn_root] + # output = GitService.run_shell_command_to_get_stdout(shell_command) + # assert output == f"Initialized empty Git repository in {bpmn_root}/.git/\n" + # with FileSystemService.cd(bpmn_root): + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "No commits yet" in output + # assert ( + # 'nothing to commit (create/copy files and use "git add" to track)' + # in output + # ) + # + # process_group_id = "test_group" + # self.create_process_group( + # client, with_super_admin_user, process_group_id, process_group_id + # ) + # + # sub_process_group_id = "test_group/test_sub_group" + # process_model_id = "hello_world" + # bpmn_file_name = "hello_world.bpmn" + # bpmn_file_location = "hello_world" + # process_model_identifier = self.create_group_and_model_with_bpmn( + # client=client, + # user=with_super_admin_user, + # process_group_id=sub_process_group_id, + # process_model_id=process_model_id, + # bpmn_file_name=bpmn_file_name, + # bpmn_file_location=bpmn_file_location, + # ) + # process_model_absolute_dir = os.path.join( + # bpmn_root, process_model_identifier + # ) + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # test_string = 'Untracked files:\n (use "git add ..." to include in what will be committed)\n\ttest_group' + # assert test_string in output + # + # os.system("git add .") + # output = os.popen("git commit -m 'Initial Commit'").read() + # assert "Initial Commit" in output + # assert "4 files changed" in output + # assert "test_group/process_group.json" in output + # assert "test_group/test_sub_group/hello_world/hello_world.bpmn" in output + # assert "test_group/test_sub_group/hello_world/process_model.json" in output + # assert "test_group/test_sub_group/process_group.json" in output + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "nothing to commit" in output + # assert "working tree clean" in output + # + # output = os.popen("git branch --list").read() # noqa: S605 + # assert output == "* main\n" + # os.system("git branch staging") + # output = os.popen("git branch --list").read() # noqa: S605 + # assert output == "* main\n staging\n" + # + # os.system("git checkout staging") + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch staging" in output + # assert "nothing to commit" in output + # assert "working tree clean" in output + # + # # process_model = ProcessModelService.get_process_model(process_model_identifier) + # + # listing = os.listdir(process_model_absolute_dir) + # assert len(listing) == 2 + # assert "hello_world.bpmn" in listing + # assert "process_model.json" in listing + # + # os.system("git checkout main") + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "nothing to commit" in output + # assert "working tree clean" in output + # + # file_data = b"abc123" + # new_file_path = os.path.join(process_model_absolute_dir, "new_file.txt") + # with open(new_file_path, "wb") as f_open: + # f_open.write(file_data) + # + # output = GitService.run_shell_command_to_get_stdout(["git", "status"]) + # assert "On branch main" in output + # assert "Untracked files:" in output + # assert "test_group/test_sub_group/hello_world/new_file.txt" in output + # + # os.system( + # "git add test_group/test_sub_group/hello_world/new_file.txt" + # ) # noqa: S605 + # output = os.popen("git commit -m 'add new_file.txt'").read() # noqa: S605 + # + # assert "add new_file.txt" in output + # assert "1 file changed, 1 insertion(+)" in output + # assert "test_group/test_sub_group/hello_world/new_file.txt" in output + # + # listing = os.listdir(process_model_absolute_dir) + # assert len(listing) == 3 + # assert "hello_world.bpmn" in listing + # assert "process_model.json" in listing + # assert "new_file.txt" in listing + # + # # modified_process_model_id = process_model_identifier.replace("/", ":") + # # response = client.post( + # # f"/v1.0/process-models/{modified_process_model_id}/publish?branch_to_update=staging", + # # headers=self.logged_in_headers(with_super_admin_user), + # # ) + # + # print("test_process_model_publish") def test_can_get_process_instance_list_with_report_metadata( self, @@ -2681,7 +2996,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id="save_process_instance_metadata/save_process_instance_metadata", + process_model_id=( + "save_process_instance_metadata/save_process_instance_metadata" + ), bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) @@ -2738,7 +3055,9 @@ class TestProcessApi(BaseTest): ) -> None: """Test_can_get_process_instance_list_with_report_metadata.""" process_model = load_test_spec( - process_model_id="save_process_instance_metadata/save_process_instance_metadata", + process_model_id=( + "save_process_instance_metadata/save_process_instance_metadata" + ), bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) @@ -2859,3 +3178,31 @@ class TestProcessApi(BaseTest): assert len(response.json["results"]) == 2 assert response.json["results"][1]["id"] == process_instance_one.id assert response.json["results"][0]["id"] == process_instance_two.id + + def test_process_data_show( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_process_data_show.""" + process_model = load_test_spec( + "test_group/data_object_test", + process_model_source_directory="data_object_test", + ) + process_instance_one = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance_one) + processor.do_engine_steps(save=True) + assert process_instance_one.status == "user_input_required" + + response = client.get( + f"/v1.0/process-data/{self.modify_process_identifier_for_path_param(process_model.id)}/{process_instance_one.id}/the_data_object_var", + headers=self.logged_in_headers(with_super_admin_user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert response.json["process_data_value"] == "hey" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py new file mode 100644 index 000000000..cbf625168 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_all_permissions.py @@ -0,0 +1,60 @@ +"""Test_get_localtime.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.script_attributes_context import ( + ScriptAttributesContext, +) +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.scripts.get_all_permissions import GetAllPermissions +from spiffworkflow_backend.services.authorization_service import AuthorizationService + + +class TestGetAllPermissions(BaseTest): + """TestGetAllPermissions.""" + + def test_can_get_all_permissions( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + with_super_admin_user: UserModel, + ) -> None: + """Test_can_get_all_permissions.""" + self.find_or_create_user("test_user") + + # now that we have everything, try to clear it out... + script_attributes_context = ScriptAttributesContext( + task=None, + environment_identifier="testing", + process_instance_id=1, + process_model_identifier="my_test_user", + ) + AuthorizationService.add_permission_from_uri_or_macro( + permission="start", target="PG:hey:group", group_identifier="my_test_group" + ) + AuthorizationService.add_permission_from_uri_or_macro( + permission="all", target="/tasks", group_identifier="my_test_group" + ) + + expected_permissions = [ + { + "group_identifier": "my_test_group", + "uri": "/process-instances/hey:group:*", + "permissions": ["create"], + }, + { + "group_identifier": "my_test_group", + "uri": "/process-instances/for-me/hey:group:*", + "permissions": ["read"], + }, + { + "group_identifier": "my_test_group", + "uri": "/tasks", + "permissions": ["create", "read", "update", "delete"], + }, + ] + + permissions = GetAllPermissions().run(script_attributes_context) + assert permissions == expected_permissions diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py index f1834ab3a..90e4158da 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_get_localtime.py @@ -68,9 +68,9 @@ class TestGetLocaltime(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - active_task = process_instance.active_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( @@ -78,12 +78,12 @@ class TestGetLocaltime(BaseTest): spiff_task, {"timezone": "US/Pacific"}, initiator_user, - active_task, + human_task, ) - active_task = process_instance.active_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) assert spiff_task diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py new file mode 100644 index 000000000..67cf55c85 --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_refresh_permissions.py @@ -0,0 +1,50 @@ +"""Test_get_localtime.""" +import pytest +from flask.app import Flask +from flask.testing import FlaskClient +from flask_bpmn.api.api_error import ApiError +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestRefreshPermissions(BaseTest): + """TestRefreshPermissions.""" + + def test_refresh_permissions_requires_elevated_permission( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_refresh_permissions_requires_elevated_permission.""" + basic_user = self.find_or_create_user("basic_user") + privileged_user = self.find_or_create_user("privileged_user") + self.add_permissions_to_user( + privileged_user, + target_uri="/can-run-privileged-script/refresh_permissions", + permission_names=["create"], + ) + process_model = load_test_spec( + process_model_id="refresh_permissions", + process_model_source_directory="script_refresh_permissions", + ) + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=basic_user + ) + + processor = ProcessInstanceProcessor(process_instance) + + with pytest.raises(ApiError) as exception: + processor.do_engine_steps(save=True) + assert "ScriptUnauthorizedForUserError" in str(exception) + + process_instance = self.create_process_instance_from_process_model( + process_model=process_model, user=privileged_user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py index 96eb62970..738896cd7 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/scripts/test_save_process_instance_metadata.py @@ -24,17 +24,18 @@ class TestSaveProcessInstanceMetadata(BaseTest): with_super_admin_user: UserModel, ) -> None: """Test_can_save_process_instance_metadata.""" - initiator_user = self.find_or_create_user("initiator_user") self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) process_model = load_test_spec( - process_model_id="save_process_instance_metadata/save_process_instance_metadata", + process_model_id=( + "save_process_instance_metadata/save_process_instance_metadata" + ), bpmn_file_name="save_process_instance_metadata.bpmn", process_model_source_directory="save_process_instance_metadata", ) process_instance = self.create_process_instance_from_process_model( - process_model=process_model, user=initiator_user + process_model=process_model, user=with_super_admin_user ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py index 00622a1f7..83ed7fd8e 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_authorization_service.py @@ -4,9 +4,12 @@ from flask import Flask from flask.testing import FlaskClient from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from spiffworkflow_backend.models.group import GroupModel from spiffworkflow_backend.models.user import UserModel from spiffworkflow_backend.models.user import UserNotFoundError from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.authorization_service import InvalidPermissionError +from spiffworkflow_backend.services.group_service import GroupService from spiffworkflow_backend.services.process_instance_processor import ( ProcessInstanceProcessor, ) @@ -14,6 +17,7 @@ from spiffworkflow_backend.services.process_instance_service import ( ProcessInstanceService, ) from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.user_service import UserService class TestAuthorizationService(BaseTest): @@ -90,14 +94,14 @@ class TestAuthorizationService(BaseTest): users["testuser2"], "read", "/v1.0/process-groups/" ) - def test_user_can_be_added_to_active_task_on_first_login( + def test_user_can_be_added_to_human_task_on_first_login( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_user_can_be_added_to_active_task_on_first_login.""" + """Test_user_can_be_added_to_human_task_on_first_login.""" initiator_user = self.find_or_create_user("initiator_user") assert initiator_user.principal is not None # to ensure there is a user that can be assigned to the task @@ -121,21 +125,294 @@ class TestAuthorizationService(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - active_task = process_instance.active_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) - active_task = process_instance.active_tasks[0] + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) finance_user = AuthorizationService.create_user_from_sign_in( - {"username": "testuser2", "sub": "open_id"} + { + "username": "testuser2", + "sub": "testuser2", + "iss": "https://test.stuff", + "email": "testuser2", + } ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, active_task + processor, spiff_task, {}, finance_user, human_task ) + + def test_explode_permissions_all_on_process_group( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_all_on_process_group.""" + expected_permissions = [ + ("/logs/some-process-group:some-process-model:*", "read"), + ("/process-data/some-process-group:some-process-model:*", "read"), + ("/process-groups/some-process-group:some-process-model:*", "create"), + ("/process-groups/some-process-group:some-process-model:*", "delete"), + ("/process-groups/some-process-group:some-process-model:*", "read"), + ("/process-groups/some-process-group:some-process-model:*", "update"), + ( + "/process-instance-suspend/some-process-group:some-process-model:*", + "create", + ), + ( + "/process-instance-terminate/some-process-group:some-process-model:*", + "create", + ), + ("/process-instances/some-process-group:some-process-model:*", "create"), + ("/process-instances/some-process-group:some-process-model:*", "delete"), + ("/process-instances/some-process-group:some-process-model:*", "read"), + ("/process-models/some-process-group:some-process-model:*", "create"), + ("/process-models/some-process-group:some-process-model:*", "delete"), + ("/process-models/some-process-group:some-process-model:*", "read"), + ("/process-models/some-process-group:some-process-model:*", "update"), + ("/task-data/some-process-group:some-process-model:*", "read"), + ("/task-data/some-process-group:some-process-model:*", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "all", "PG:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_start_on_process_group( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_start_on_process_group.""" + expected_permissions = [ + ( + "/process-instances/for-me/some-process-group:some-process-model:*", + "read", + ), + ("/process-instances/some-process-group:some-process-model:*", "create"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "start", "PG:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_all_on_process_model( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_all_on_process_model.""" + expected_permissions = [ + ("/logs/some-process-group:some-process-model/*", "read"), + ("/process-data/some-process-group:some-process-model/*", "read"), + ( + "/process-instance-suspend/some-process-group:some-process-model/*", + "create", + ), + ( + "/process-instance-terminate/some-process-group:some-process-model/*", + "create", + ), + ("/process-instances/some-process-group:some-process-model/*", "create"), + ("/process-instances/some-process-group:some-process-model/*", "delete"), + ("/process-instances/some-process-group:some-process-model/*", "read"), + ("/process-models/some-process-group:some-process-model/*", "create"), + ("/process-models/some-process-group:some-process-model/*", "delete"), + ("/process-models/some-process-group:some-process-model/*", "read"), + ("/process-models/some-process-group:some-process-model/*", "update"), + ("/task-data/some-process-group:some-process-model/*", "read"), + ("/task-data/some-process-group:some-process-model/*", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "all", "PM:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_start_on_process_model( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_start_on_process_model.""" + expected_permissions = [ + ( + "/process-instances/for-me/some-process-group:some-process-model/*", + "read", + ), + ("/process-instances/some-process-group:some-process-model/*", "create"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "start", "PM:/some-process-group/some-process-model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_basic( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_basic.""" + expected_permissions = [ + ("/process-instances/for-me", "read"), + ("/process-instances/reports/*", "create"), + ("/process-instances/reports/*", "delete"), + ("/process-instances/reports/*", "read"), + ("/process-instances/reports/*", "update"), + ("/processes", "read"), + ("/service-tasks", "read"), + ("/tasks/*", "create"), + ("/tasks/*", "delete"), + ("/tasks/*", "read"), + ("/tasks/*", "update"), + ("/user-groups/for-current-user", "read"), + ] + permissions_to_assign = AuthorizationService.explode_permissions("all", "BASIC") + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_all( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_all.""" + expected_permissions = [ + ("/*", "create"), + ("/*", "delete"), + ("/*", "read"), + ("/*", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions("all", "ALL") + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_explode_permissions_with_target_uri( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_with_target_uri.""" + expected_permissions = [ + ("/hey/model", "create"), + ("/hey/model", "delete"), + ("/hey/model", "read"), + ("/hey/model", "update"), + ] + permissions_to_assign = AuthorizationService.explode_permissions( + "all", "/hey/model" + ) + permissions_to_assign_tuples = sorted( + [(p.target_uri, p.permission) for p in permissions_to_assign] + ) + assert permissions_to_assign_tuples == expected_permissions + + def test_granting_access_to_group_gives_access_to_group_and_subgroups( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_granting_access_to_group_gives_access_to_group_and_subgroups.""" + user = self.find_or_create_user(username="user_one") + user_group = GroupService.find_or_create_group("group_one") + UserService.add_user_to_group(user, user_group) + AuthorizationService.add_permission_from_uri_or_macro( + user_group.identifier, "read", "PG:hey" + ) + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + + def test_explode_permissions_with_invalid_target_uri( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_with_invalid_target_uri.""" + with pytest.raises(InvalidPermissionError): + AuthorizationService.explode_permissions("all", "BAD_MACRO") + + def test_explode_permissions_with_start_to_incorrect_target( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_explode_permissions_with_start_to_incorrect_target.""" + with pytest.raises(InvalidPermissionError): + AuthorizationService.explode_permissions("start", "/hey/model") + + def test_can_refresh_permissions( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_refresh_permissions.""" + user = self.find_or_create_user(username="user_one") + admin_user = self.find_or_create_user(username="testadmin1") + + # this group is not mentioned so it will get deleted + GroupService.find_or_create_group("group_two") + assert GroupModel.query.filter_by(identifier="group_two").first() is not None + + group_info = [ + { + "users": ["user_one"], + "name": "group_one", + "permissions": [{"actions": ["create", "read"], "uri": "PG:hey"}], + } + ] + AuthorizationService.refresh_permissions(group_info) + assert GroupModel.query.filter_by(identifier="group_two").first() is None + assert GroupModel.query.filter_by(identifier="group_one").first() is not None + self.assert_user_has_permission(admin_user, "create", "/anything-they-want") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + self.assert_user_has_permission(user, "create", "/v1.0/process-groups/hey:yo") + + group_info = [ + { + "users": ["user_one"], + "name": "group_one", + "permissions": [{"actions": ["read"], "uri": "PG:hey"}], + } + ] + AuthorizationService.refresh_permissions(group_info) + assert GroupModel.query.filter_by(identifier="group_one").first() is not None + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey") + self.assert_user_has_permission(user, "read", "/v1.0/process-groups/hey:yo") + self.assert_user_has_permission( + user, "create", "/v1.0/process-groups/hey:yo", expected_result=False + ) + self.assert_user_has_permission(admin_user, "create", "/anything-they-want") diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py index 80b052544..59a0fee8d 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_dot_notation.py @@ -37,7 +37,7 @@ class TestDotNotation(BaseTest): ) headers = self.logged_in_headers(with_super_admin_user) - response = self.create_process_instance_from_process_model_id( + response = self.create_process_instance_from_process_model_id_with_api( client, process_model_identifier, headers ) process_instance_id = response.json["id"] @@ -47,7 +47,7 @@ class TestDotNotation(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - active_task = process_instance.active_tasks[0] + human_task = process_instance.human_tasks[0] user_task = processor.get_ready_user_tasks()[0] form_data = { @@ -58,7 +58,7 @@ class TestDotNotation(BaseTest): "invoice.dueDate": "09/30/2022", } ProcessInstanceService.complete_form_task( - processor, user_task, form_data, with_super_admin_user, active_task + processor, user_task, form_data, with_super_admin_user, human_task ) expected = { diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_git_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_git_service.py new file mode 100644 index 000000000..ed1e24e1e --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_git_service.py @@ -0,0 +1,22 @@ +"""Process Model.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.services.git_service import GitService + + +class TestGitService(BaseTest): + """TestGitService.""" + + def test_strips_output_of_stdout_from_command( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_strips_output_of_stdout_from_command.""" + output = GitService.run_shell_command_to_get_stdout( + ["echo", " This output should not end in space or newline \n"] + ) + assert output == "This output should not end in space or newline" diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py index b66f32370..a96989697 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_permissions.py @@ -16,6 +16,7 @@ from spiffworkflow_backend.services.user_service import UserService # we think we can get the list of roles for a user. # spiff needs a way to determine what each role allows. + # user role allows list and read of all process groups/models # super-admin role allows create, update, and delete of all process groups/models # * super-admins users maybe conventionally get the user role as well diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py index 3e0107957..b4a650dc6 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -31,10 +31,14 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, ) -> None: """Test_script_engine_takes_data_and_returns_expected_results.""" + app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey" + app.config["THREAD_LOCAL_DATA"].process_instance_id = 0 script_engine = ProcessInstanceProcessor._script_engine result = script_engine._evaluate("a", {"a": 1}) assert result == 1 + app.config["THREAD_LOCAL_DATA"].process_model_identifier = None + app.config["THREAD_LOCAL_DATA"].process_instance_id = None def test_script_engine_can_use_custom_scripts( self, @@ -42,21 +46,26 @@ class TestProcessInstanceProcessor(BaseTest): with_db_and_bpmn_file_cleanup: None, ) -> None: """Test_script_engine_takes_data_and_returns_expected_results.""" + app.config["THREAD_LOCAL_DATA"].process_model_identifier = "hey" + app.config["THREAD_LOCAL_DATA"].process_instance_id = 0 script_engine = ProcessInstanceProcessor._script_engine result = script_engine._evaluate("fact_service(type='norris')", {}) assert ( result - == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." + == "Chuck Norris doesn’t read books. He stares them down until he gets the" + " information he wants." ) + app.config["THREAD_LOCAL_DATA"].process_model_identifier = None + app.config["THREAD_LOCAL_DATA"].process_instance_id = None - def test_sets_permission_correctly_on_active_task( + def test_sets_permission_correctly_on_human_task( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_sets_permission_correctly_on_active_task.""" + """Test_sets_permission_correctly_on_human_task.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -80,63 +89,63 @@ class TestProcessInstanceProcessor(BaseTest): processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, active_task + processor, spiff_task, {}, finance_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id == finance_group.id - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == finance_user + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id == finance_group.id + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == finance_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user, active_task + processor, spiff_task, {}, finance_user, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) assert process_instance.status == ProcessInstanceStatus.complete.value - def test_sets_permission_correctly_on_active_task_when_using_dict( + def test_sets_permission_correctly_on_human_task_when_using_dict( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_sets_permission_correctly_on_active_task_when_using_dict.""" + """Test_sets_permission_correctly_on_human_task_when_using_dict.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -163,94 +172,97 @@ class TestProcessInstanceProcessor(BaseTest): processor.do_engine_steps(save=True) processor.save() - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_three, active_task + processor, spiff_task, {}, finance_user_three, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) + assert human_task.completed_by_user_id == initiator_user.id - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 2 - assert active_task.potential_owners == [finance_user_three, finance_user_four] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 2 + assert human_task.potential_owners == [finance_user_three, finance_user_four] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) g.user = finance_user_three ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_three, active_task + processor, spiff_task, {}, finance_user_three, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == finance_user_four + assert human_task.completed_by_user_id == finance_user_three.id + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == finance_user_four spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, finance_user_four, active_task + processor, spiff_task, {}, finance_user_four, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] - assert active_task.lane_assignment_id is None - assert len(active_task.potential_owners) == 1 - assert active_task.potential_owners[0] == initiator_user + assert human_task.completed_by_user_id == finance_user_four.id + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] + assert human_task.lane_assignment_id is None + assert len(human_task.potential_owners) == 1 + assert human_task.potential_owners[0] == initiator_user spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) - assert len(process_instance.active_tasks) == 1 - active_task = process_instance.active_tasks[0] + assert len(process_instance.active_human_tasks) == 1 + human_task = process_instance.active_human_tasks[0] spiff_task = processor.__class__.get_task_by_bpmn_identifier( - active_task.task_name, processor.bpmn_process_instance + human_task.task_name, processor.bpmn_process_instance ) with pytest.raises(UserDoesNotHaveAccessToTaskError): ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, initiator_user, active_task + processor, spiff_task, {}, initiator_user, human_task ) ProcessInstanceService.complete_form_task( - processor, spiff_task, {}, testadmin1, active_task + processor, spiff_task, {}, testadmin1, human_task ) assert process_instance.status == ProcessInstanceStatus.complete.value - def test_does_not_recreate_active_tasks_on_multiple_saves( + def test_does_not_recreate_human_tasks_on_multiple_saves( self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None, with_super_admin_user: UserModel, ) -> None: - """Test_sets_permission_correctly_on_active_task_when_using_dict.""" + """Test_does_not_recreate_human_tasks_on_multiple_saves.""" self.create_process_group( client, with_super_admin_user, "test_group", "test_group" ) @@ -273,11 +285,11 @@ class TestProcessInstanceProcessor(BaseTest): ) processor = ProcessInstanceProcessor(process_instance) processor.do_engine_steps(save=True) - assert len(process_instance.active_tasks) == 1 - initial_active_task_id = process_instance.active_tasks[0].id + assert len(process_instance.active_human_tasks) == 1 + initial_human_task_id = process_instance.active_human_tasks[0].id - # save again to ensure we go attempt to process the active tasks again + # save again to ensure we go attempt to process the human tasks again processor.save() - assert len(process_instance.active_tasks) == 1 - assert initial_active_task_id == process_instance.active_tasks[0].id + assert len(process_instance.active_human_tasks) == 1 + assert initial_human_task_id == process_instance.active_human_tasks[0].id diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py index 98412faa3..b40412ff8 100644 --- a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_process_instance_report_service.py @@ -3,8 +3,12 @@ from typing import Optional from flask import Flask from flask.testing import FlaskClient +from flask_bpmn.models.db import db from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.human_task import HumanTaskModel from spiffworkflow_backend.models.process_instance_report import ( ProcessInstanceReportModel, ) @@ -15,6 +19,7 @@ from spiffworkflow_backend.services.process_instance_report_service import ( from spiffworkflow_backend.services.process_instance_report_service import ( ProcessInstanceReportService, ) +from spiffworkflow_backend.services.user_service import UserService class TestProcessInstanceReportFilter(BaseTest): @@ -122,13 +127,13 @@ class TestProcessInstanceReportService(BaseTest): report_metadata=report_metadata, ) return ProcessInstanceReportService.filter_from_metadata_with_overrides( - report, - process_model_identifier, - start_from, - start_to, - end_from, - end_to, - process_status, + process_instance_report=report, + process_model_identifier=process_model_identifier, + start_from=start_from, + start_to=start_to, + end_from=end_from, + end_to=end_to, + process_status=process_status, ) def _filter_by_dict_from_metadata(self, report_metadata: dict) -> dict[str, str]: @@ -743,3 +748,387 @@ class TestProcessInstanceReportService(BaseTest): assert report_filter.end_from is None assert report_filter.end_to is None assert report_filter.process_status == ["sue"] + + def test_can_filter_by_completed_instances_initiated_by_me( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_completed_instances_initiated_by_me.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + + # Several processes to ensure they do not return in the result + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one, + report_identifier="system_report_completed_instances_initiated_by_me", + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 2 + assert response_json["results"][0]["process_initiator_id"] == user_one.id + assert response_json["results"][1]["process_initiator_id"] == user_one.id + assert response_json["results"][0]["status"] == "complete" + assert response_json["results"][1]["status"] == "complete" + + def test_can_filter_by_completed_instances_with_tasks_completed_by_me( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_completed_instances_with_tasks_completed_by_me.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + + # Several processes to ensure they do not return in the result + process_instance_created_by_user_one_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + process_instance_created_by_user_one_three = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + ) + process_instance_created_by_user_two_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_two + ) + + human_task_for_user_one_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + completed_by_user_id=user_one.id, + ) + human_task_for_user_one_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + completed_by_user_id=user_one.id, + ) + human_task_for_user_one_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + completed_by_user_id=user_one.id, + ) + human_task_for_user_two_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + completed_by_user_id=user_two.id, + ) + human_task_for_user_two_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + completed_by_user_id=user_two.id, + ) + human_task_for_user_two_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + completed_by_user_id=user_two.id, + ) + db.session.add(human_task_for_user_one_one) + db.session.add(human_task_for_user_one_two) + db.session.add(human_task_for_user_one_three) + db.session.add(human_task_for_user_two_one) + db.session.add(human_task_for_user_two_two) + db.session.add(human_task_for_user_two_three) + db.session.commit() + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one, + report_identifier=( + "system_report_completed_instances_with_tasks_completed_by_me" + ), + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 1 + assert response_json["results"][0]["process_initiator_id"] == user_two.id + assert ( + response_json["results"][0]["id"] + == process_instance_created_by_user_two_one.id + ) + assert response_json["results"][0]["status"] == "complete" + + def test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_completed_instances_with_tasks_completed_by_my_groups.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_group_one = GroupModel(identifier="group_one") + user_group_two = GroupModel(identifier="group_two") + db.session.add(user_group_one) + db.session.add(user_group_two) + db.session.commit() + + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + user_three = self.find_or_create_user(username="user_three") + UserService.add_user_to_group(user_one, user_group_one) + UserService.add_user_to_group(user_two, user_group_one) + UserService.add_user_to_group(user_three, user_group_two) + + # Several processes to ensure they do not return in the result + process_instance_created_by_user_one_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + process_instance_created_by_user_one_three = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + ) + process_instance_created_by_user_two_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_two + ) + + human_task_for_user_group_one_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_two_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_two.id, + ) + human_task_for_user_group_two_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_two.id, + ) + db.session.add(human_task_for_user_group_one_one) + db.session.add(human_task_for_user_group_one_two) + db.session.add(human_task_for_user_group_one_three) + db.session.add(human_task_for_user_group_two_one) + db.session.add(human_task_for_user_group_two_two) + db.session.commit() + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one, + report_identifier=( + "system_report_completed_instances_with_tasks_completed_by_my_groups" + ), + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 2 + assert response_json["results"][0]["process_initiator_id"] == user_two.id + assert ( + response_json["results"][0]["id"] + == process_instance_created_by_user_two_one.id + ) + assert response_json["results"][0]["status"] == "complete" + assert response_json["results"][1]["process_initiator_id"] == user_one.id + assert ( + response_json["results"][1]["id"] + == process_instance_created_by_user_one_one.id + ) + assert response_json["results"][1]["status"] == "complete" + + def test_can_filter_by_with_relation_to_me( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_can_filter_by_with_relation_to_me.""" + process_model_id = "runs_without_input/sample" + bpmn_file_location = "sample" + process_model = load_test_spec( + process_model_id, + process_model_source_directory=bpmn_file_location, + ) + user_group_one = GroupModel(identifier="group_one") + user_group_two = GroupModel(identifier="group_two") + db.session.add(user_group_one) + db.session.add(user_group_two) + db.session.commit() + + user_one = self.find_or_create_user(username="user_one") + user_two = self.find_or_create_user(username="user_two") + user_three = self.find_or_create_user(username="user_three") + UserService.add_user_to_group(user_one, user_group_one) + UserService.add_user_to_group(user_two, user_group_one) + UserService.add_user_to_group(user_three, user_group_two) + + # Several processes to ensure they do not return in the result + process_instance_created_by_user_one_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + process_instance_created_by_user_one_two = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_one + ) + ) + process_instance_created_by_user_one_three = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_one + ) + ) + process_instance_created_by_user_two_one = ( + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="complete", user=user_two + ) + self.create_process_instance_from_process_model( + process_model=process_model, status="waiting", user=user_two + ) + + human_task_for_user_group_one_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_three.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_one_three = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_one.id, + ) + human_task_for_user_group_two_one = HumanTaskModel( + process_instance_id=process_instance_created_by_user_two_one.id, + lane_assignment_id=user_group_two.id, + ) + human_task_for_user_group_two_two = HumanTaskModel( + process_instance_id=process_instance_created_by_user_one_one.id, + lane_assignment_id=user_group_two.id, + ) + db.session.add(human_task_for_user_group_one_one) + db.session.add(human_task_for_user_group_one_two) + db.session.add(human_task_for_user_group_one_three) + db.session.add(human_task_for_user_group_two_one) + db.session.add(human_task_for_user_group_two_two) + db.session.commit() + + UserService.add_user_to_human_tasks_if_appropriate(user_one) + + process_instance_report = ProcessInstanceReportService.report_with_identifier( + user=user_one + ) + report_filter = ( + ProcessInstanceReportService.filter_from_metadata_with_overrides( + process_instance_report=process_instance_report, + process_model_identifier=process_model.id, + with_relation_to_me=True, + ) + ) + response_json = ProcessInstanceReportService.run_process_instance_report( + report_filter=report_filter, + process_instance_report=process_instance_report, + user=user_one, + ) + + assert len(response_json["results"]) == 4 + process_instance_ids_in_results = [r["id"] for r in response_json["results"]] + assert ( + process_instance_created_by_user_one_one.id + in process_instance_ids_in_results + ) + assert ( + process_instance_created_by_user_one_two.id + in process_instance_ids_in_results + ) + assert ( + process_instance_created_by_user_one_three.id + in process_instance_ids_in_results + ) + assert ( + process_instance_created_by_user_two_one.id + in process_instance_ids_in_results + ) diff --git a/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py new file mode 100644 index 000000000..959975d5b --- /dev/null +++ b/spiffworkflow-backend/tests/spiffworkflow_backend/unit/test_user_service.py @@ -0,0 +1,54 @@ +"""Process Model.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.user_group_assignment_waiting import ( + UserGroupAssignmentWaitingModel, +) +from spiffworkflow_backend.services.group_service import GroupService +from spiffworkflow_backend.services.user_service import UserService + + +class TestUserService(BaseTest): + """TestUserService.""" + + def test_assigning_a_group_to_a_user_before_the_user_is_created( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_waiting_group_assignments.""" + a_test_group = GroupService.find_or_create_group("aTestGroup") + UserService.add_waiting_group_assignment("initiator_user", a_test_group) + initiator_user = self.find_or_create_user("initiator_user") + assert initiator_user.groups[0] == a_test_group + + def test_assigning_a_group_to_all_users_updates_new_users( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_waiting_group_assignments.""" + everybody_group = GroupService.find_or_create_group("everybodyGroup") + UserService.add_waiting_group_assignment( + UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group + ) + initiator_user = self.find_or_create_user("initiator_user") + assert initiator_user.groups[0] == everybody_group + + def test_assigning_a_group_to_all_users_updates_existing_users( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_waiting_group_assignments.""" + initiator_user = self.find_or_create_user("initiator_user") + everybody_group = GroupService.find_or_create_group("everybodyGroup") + UserService.add_waiting_group_assignment( + UserGroupAssignmentWaitingModel.MATCH_ALL_USERS, everybody_group + ) + assert initiator_user.groups[0] == everybody_group diff --git a/spiffworkflow-frontend/.gitignore b/spiffworkflow-frontend/.gitignore index 8ff3e35ce..c0316f7ea 100644 --- a/spiffworkflow-frontend/.gitignore +++ b/spiffworkflow-frontend/.gitignore @@ -8,6 +8,9 @@ # testing /coverage +# in case we accidentally run backend tests in frontend. :D +/.coverage.* + # production /build diff --git a/spiffworkflow-frontend/bin/collect_cypress_stats b/spiffworkflow-frontend/bin/collect_cypress_stats new file mode 100755 index 000000000..150efc800 --- /dev/null +++ b/spiffworkflow-frontend/bin/collect_cypress_stats @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +# see also: npx cypress run --env grep="can filter",grepFilterSpecs=true +# https://github.com/cypress-io/cypress/tree/develop/npm/grep#pre-filter-specs-grepfilterspecs + +iterations="${1:-10}" + +test_case_matches="$(rg '^ it\(')" + +stats_file="/var/tmp/cypress_stats.txt" + +function run_all_test_cases() { + local stat_index="$1" + + pushd "$NO_TERM_LIMITS_PROJECTS_DIR/github/sartography/sample-process-models" + gitc + popd + + while read -r test_case_line; do + test_case_file="$(awk -F: '{print $1}' <<< "$test_case_line")" + test_case_name_side="$(awk -F: '{print $2}' <<< "$test_case_line")" + test_case_name=$(hot_sed -E "s/^\s+it\('(.+)'.*/\1/" <<< "$test_case_name_side") + echo "running test case: $test_case_file::$test_case_name" + if ./node_modules/.bin/cypress run --e2e --browser chrome --spec "$test_case_file" --env grep="$test_case_name"; then + echo "$stat_index:::$test_case_file:::$test_case_name: PASS" >> "$stats_file" + else + echo "$stat_index:::$test_case_file:::$test_case_name: FAIL" >> "$stats_file" + fi + done <<< "$test_case_matches" +} + +# clear the stats file +echo > "$stats_file" + +for ((global_stat_index=1;global_stat_index<=$iterations;global_stat_index++)); do +# for global_stat_index in {1..$iterations}; do + run_all_test_cases "$global_stat_index" +done + +# prints summary of most-failing test cases +grep FAIL "$stats_file" | awk -F ':::' '{for (i=2; i { + const filesToDelete = [] + on('after:spec', (_spec, results) => { + if (results.stats.failures === 0 && results.video) { + filesToDelete.push(results.video) + } + }) + on('after:run', async () => { + if (filesToDelete.length) { + console.log( + 'after:run hook: Deleting %d video(s) from successful specs', + filesToDelete.length + ) + await Promise.all(filesToDelete.map((videoFile) => rm(videoFile))) + } + }) +} module.exports = defineConfig({ projectId: 'crax1q', + + // since it's slow + videoCompression: useVideoCompression, + + videoUploadOnPasses: false, chromeWebSecurity: false, e2e: { baseUrl: 'http://localhost:7001', - setupNodeEvents(_on, config) { + setupNodeEvents(on, config) { + deleteVideosOnSuccess(on) require('@cypress/grep/src/plugin')(config); return config; }, diff --git a/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js b/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js index bef0e5603..e10c48575 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_groups.cy.js @@ -30,7 +30,10 @@ describe('process-groups', () => { .find('.cds--btn--danger') .click(); cy.url().should('include', `process-groups`); - cy.contains(groupId).should('not.exist'); + cy.contains(newGroupDisplayName).should('not.exist'); + + // meaning the process group list page is loaded, so we can sign out safely without worrying about ajax requests failing + cy.get('.tile-process-group-content-container').should('exist'); }); // process groups no longer has pagination post-tiles diff --git a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js index 4d33d13f1..e582dcbba 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_instances.cy.js @@ -68,8 +68,7 @@ describe('process-instances', () => { cy.login(); cy.navigateToProcessModel( 'Acceptance Tests Group One', - 'Acceptance Tests Model 1', - 'acceptance-tests-model-1' + 'Acceptance Tests Model 1' ); }); afterEach(() => { @@ -80,6 +79,7 @@ describe('process-instances', () => { const originalDmnOutputForKevin = 'Very wonderful'; const newDmnOutputForKevin = 'The new wonderful'; const dmnOutputForDan = 'pretty wonderful'; + const acceptanceTestOneDisplayName = 'Acceptance Tests Model 1'; const originalPythonScript = 'person = "Kevin"'; const newPythonScript = 'person = "Dan"'; @@ -95,13 +95,13 @@ describe('process-instances', () => { cy.getBySel(`edit-file-${dmnFile.replace('.', '-')}`).click(); updateDmnText(originalDmnOutputForKevin, newDmnOutputForKevin); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); cy.getBySel('files-accordion').click(); cy.getBySel(`edit-file-${dmnFile.replace('.', '-')}`).click(); updateDmnText(newDmnOutputForKevin, originalDmnOutputForKevin); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); // Change bpmn @@ -109,13 +109,13 @@ describe('process-instances', () => { cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); cy.contains(`Process Model File: ${bpmnFile}`); updateBpmnPythonScript(newPythonScript); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); cy.getBySel('files-accordion').click(); cy.getBySel(`edit-file-${bpmnFile.replace('.', '-')}`).click(); updateBpmnPythonScript(originalPythonScript); - cy.contains('acceptance-tests-model-1').click(); + cy.contains(acceptanceTestOneDisplayName).click(); cy.runPrimaryBpmnFile(); }); @@ -160,6 +160,7 @@ describe('process-instances', () => { cy.getBySel('process-instance-list-link').click(); cy.getBySel('process-instance-show-link').first().click(); cy.getBySel('process-instance-log-list-link').click(); + cy.getBySel('process-instance-log-detailed').click(); cy.contains('process_model_one'); cy.contains('State change to COMPLETED'); cy.basicPaginationTest(); @@ -167,6 +168,8 @@ describe('process-instances', () => { it('can filter', () => { cy.getBySel('process-instance-list-link').click(); + cy.getBySel('process-instance-list-all').click(); + cy.contains('All Process Instances'); cy.assertAtLeastOneItemInPaginatedResults(); const statusSelect = '#process-instance-status-select'; @@ -174,6 +177,7 @@ describe('process-instances', () => { if (!['all', 'waiting'].includes(processStatus)) { cy.get(statusSelect).click(); cy.get(statusSelect).contains(processStatus).click(); + cy.get(statusSelect).click(); cy.getBySel('filter-button').click(); // FIXME: wait a little bit for the useEffects to be able to fully set processInstanceFilters cy.wait(1000); diff --git a/spiffworkflow-frontend/cypress/e2e/process_models.cy.js b/spiffworkflow-frontend/cypress/e2e/process_models.cy.js index 4fd1b4810..cdb334ef5 100644 --- a/spiffworkflow-frontend/cypress/e2e/process_models.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/process_models.cy.js @@ -1,4 +1,5 @@ import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; +import { miscDisplayName } from '../support/helpers'; describe('process-models', () => { beforeEach(() => { @@ -16,7 +17,7 @@ describe('process-models', () => { const modelDisplayName = `Test Model 2 ${id}`; const modelId = `test-model-2-${id}`; const newModelDisplayName = `${modelDisplayName} edited`; - cy.contains('99-Shared Resources').click(); + cy.contains(miscDisplayName).click(); cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); @@ -34,7 +35,7 @@ describe('process-models', () => { cy.contains(`Process Model: ${newModelDisplayName}`); // go back to process model show by clicking on the breadcrumb - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); @@ -46,9 +47,10 @@ describe('process-models', () => { `process-groups/${modifyProcessIdentifierForPathParam(groupId)}` ); cy.contains(modelId).should('not.exist'); + cy.contains(modelDisplayName).should('not.exist'); }); - it('can create new bpmn, dmn, and json files', () => { + it('can create new bpmn and dmn and json files', () => { const uuid = () => Cypress._.random(0, 1e6); const id = uuid(); const directParentGroupId = 'acceptance-tests-group-one'; @@ -61,11 +63,11 @@ describe('process-models', () => { const dmnFileName = `dmn_test_file_${id}`; const jsonFileName = `json_test_file_${id}`; - cy.contains('99-Shared Resources').click(); + cy.contains(miscDisplayName).click(); cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.contains(directParentGroupId).click(); + cy.contains(groupDisplayName).click(); cy.contains(modelDisplayName).click(); cy.url().should( 'include', @@ -90,7 +92,7 @@ describe('process-models', () => { cy.get('input[name=file_name]').type(bpmnFileName); cy.contains('Save Changes').click(); cy.contains(`Process Model File: ${bpmnFileName}`); - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); // cy.getBySel('files-accordion').click(); cy.contains(`${bpmnFileName}.bpmn`).should('exist'); @@ -108,7 +110,7 @@ describe('process-models', () => { cy.get('input[name=file_name]').type(dmnFileName); cy.contains('Save Changes').click(); cy.contains(`Process Model File: ${dmnFileName}`); - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); // cy.getBySel('files-accordion').click(); cy.contains(`${dmnFileName}.dmn`).should('exist'); @@ -124,7 +126,7 @@ describe('process-models', () => { cy.contains(`Process Model File: ${jsonFileName}`); // wait for json to load before clicking away to avoid network errors cy.wait(500); - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.contains(`Process Model: ${modelDisplayName}`); // cy.getBySel('files-accordion').click(); cy.contains(`${jsonFileName}.json`).should('exist'); @@ -140,6 +142,9 @@ describe('process-models', () => { ); cy.contains(modelId).should('not.exist'); cy.contains(modelDisplayName).should('not.exist'); + + // we go back to the parent process group after deleting the model + cy.get('.tile-process-group-content-container').should('exist'); }); it('can upload and run a bpmn file', () => { @@ -151,12 +156,12 @@ describe('process-models', () => { const modelDisplayName = `Test Model 2 ${id}`; const modelId = `test-model-2-${id}`; cy.contains('Add a process group'); - cy.contains('99-Shared Resources').click(); + cy.contains(miscDisplayName).click(); cy.wait(500); cy.contains(groupDisplayName).click(); cy.createModel(groupId, modelId, modelDisplayName); - cy.contains(`${directParentGroupId}`).click(); + cy.contains(`${groupDisplayName}`).click(); cy.contains('Add a process model'); cy.contains(modelDisplayName).click(); cy.url().should( @@ -186,7 +191,7 @@ describe('process-models', () => { .click(); // in breadcrumb - cy.contains(modelId).click(); + cy.contains(modelDisplayName).click(); cy.getBySel('delete-process-model-button').click(); cy.contains('Are you sure'); @@ -203,7 +208,7 @@ describe('process-models', () => { // process models no longer has pagination post-tiles // it.only('can paginate items', () => { - // cy.contains('99-Shared Resources').click(); + // cy.contains(miscDisplayName).click(); // cy.wait(500); // cy.contains('Acceptance Tests Group One').click(); // cy.basicPaginationTest(); diff --git a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js index e58566b8c..922c42091 100644 --- a/spiffworkflow-frontend/cypress/e2e/tasks.cy.js +++ b/spiffworkflow-frontend/cypress/e2e/tasks.cy.js @@ -13,11 +13,10 @@ const checkTaskHasClass = (taskName, className) => { cy.get(`g[data-element-id=${taskName}]`).should('have.class', className); }; -const kickOffModelWithForm = (modelId, formName) => { +const kickOffModelWithForm = () => { cy.navigateToProcessModel( 'Acceptance Tests Group One', - 'Acceptance Tests Model 2', - 'acceptance-tests-model-2' + 'Acceptance Tests Model 2' ); cy.runPrimaryBpmnFile(true); }; @@ -32,12 +31,11 @@ describe('tasks', () => { it('can complete and navigate a form', () => { const groupDisplayName = 'Acceptance Tests Group One'; - const modelId = `acceptance-tests-model-2`; const modelDisplayName = `Acceptance Tests Model 2`; const completedTaskClassName = 'completed-task-highlight'; const activeTaskClassName = 'active-task-highlight'; - cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); + cy.navigateToProcessModel(groupDisplayName, modelDisplayName); cy.runPrimaryBpmnFile(true); submitInputIntoFormField( @@ -71,7 +69,7 @@ describe('tasks', () => { ); cy.contains('Task: get_user_generated_number_four'); - cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); + cy.navigateToProcessModel(groupDisplayName, modelDisplayName); cy.getBySel('process-instance-list-link').click(); cy.assertAtLeastOneItemInPaginatedResults(); @@ -94,7 +92,7 @@ describe('tasks', () => { cy.contains('Tasks').should('exist'); // FIXME: this will probably need a better way to link to the proper form that we want - cy.contains('Complete Task').click(); + cy.contains('Go').click(); submitInputIntoFormField( 'get_user_generated_number_four', @@ -103,7 +101,7 @@ describe('tasks', () => { ); cy.url().should('include', '/tasks'); - cy.navigateToProcessModel(groupDisplayName, modelDisplayName, modelId); + cy.navigateToProcessModel(groupDisplayName, modelDisplayName); cy.getBySel('process-instance-list-link').click(); cy.assertAtLeastOneItemInPaginatedResults(); @@ -122,6 +120,6 @@ describe('tasks', () => { kickOffModelWithForm(); cy.navigateToHome(); - cy.basicPaginationTest(); + cy.basicPaginationTest('process-instance-show-link'); }); }); diff --git a/spiffworkflow-frontend/cypress/support/commands.js b/spiffworkflow-frontend/cypress/support/commands.js index f0034168c..838978600 100644 --- a/spiffworkflow-frontend/cypress/support/commands.js +++ b/spiffworkflow-frontend/cypress/support/commands.js @@ -1,5 +1,6 @@ import { string } from 'prop-types'; import { modifyProcessIdentifierForPathParam } from '../../src/helpers'; +import { miscDisplayName } from './helpers'; // *********************************************** // This example commands.js shows you how to @@ -86,25 +87,27 @@ Cypress.Commands.add('createModel', (groupId, modelId, modelDisplayName) => { Cypress.Commands.add( 'runPrimaryBpmnFile', (expectAutoRedirectToHumanTask = false) => { - cy.contains('Run').click(); + cy.contains('Start').click(); if (expectAutoRedirectToHumanTask) { // the url changes immediately, so also make sure we get some content from the next page, "Task:", or else when we try to interact with the page, it'll re-render and we'll get an error with cypress. cy.url().should('include', `/tasks/`); cy.contains('Task: '); } else { - cy.contains(/Process Instance.*kicked off/); + cy.contains(/Process Instance.*[kK]icked [oO]ff/); cy.reload(true); - cy.contains(/Process Instance.*kicked off/).should('not.exist'); + cy.contains('Process Model:').should('exist'); + cy.contains(/Process Instance.*[kK]icked [oO]ff/).should('not.exist'); + cy.contains('[data-qa=process-model-show-permissions-loaded]', 'true'); } } ); Cypress.Commands.add( 'navigateToProcessModel', - (groupDisplayName, modelDisplayName, modelIdentifier) => { + (groupDisplayName, modelDisplayName) => { cy.navigateToAdmin(); - cy.contains('99-Shared Resources').click(); - cy.contains(`Process Group: 99-Shared Resources`, { timeout: 10000 }); + cy.contains(miscDisplayName).click(); + cy.contains(`Process Group: ${miscDisplayName}`, { timeout: 10000 }); cy.contains(groupDisplayName).click(); cy.contains(`Process Group: ${groupDisplayName}`); // https://stackoverflow.com/q/51254946/6090676 @@ -113,17 +116,33 @@ Cypress.Commands.add( } ); -Cypress.Commands.add('basicPaginationTest', () => { - cy.getBySel('pagination-options').scrollIntoView(); - cy.get('.cds--select__item-count').find('.cds--select-input').select('2'); +Cypress.Commands.add( + 'basicPaginationTest', + (dataQaTagToUseToEnsureTableHasLoaded = 'paginated-entity-id') => { + cy.getBySel('pagination-options').scrollIntoView(); + cy.get('.cds--select__item-count').find('.cds--select-input').select('2'); - // NOTE: this is a em dash instead of en dash - cy.contains(/\b1–2 of \d+/); - cy.get('.cds--pagination__button--forward').click(); - cy.contains(/\b3–4 of \d+/); - cy.get('.cds--pagination__button--backward').click(); - cy.contains(/\b1–2 of \d+/); -}); + // NOTE: this is a em dash instead of en dash + cy.contains(/\b1–2 of \d+/); + + // ok, trying to ensure that we have everything loaded before we leave this + // function and try to sign out. Just showing results 1-2 of blah is not good enough, + // since the ajax request may not have finished yet. + // to be sure it's finished, grab the log id from page 1. remember it. + // then use the magical contains command that waits for the element to exist AND + // for that element to contain the text we're looking for. + cy.getBySel(dataQaTagToUseToEnsureTableHasLoaded) + .first() + .then(($element) => { + const oldId = $element.text().trim(); + cy.get('.cds--pagination__button--forward').click(); + cy.contains(/\b3–4 of \d+/); + cy.get('.cds--pagination__button--backward').click(); + cy.contains(/\b1–2 of \d+/); + cy.contains(`[data-qa=${dataQaTagToUseToEnsureTableHasLoaded}]`, oldId); + }); + } +); Cypress.Commands.add('assertAtLeastOneItemInPaginatedResults', () => { cy.contains(/\b[1-9]\d*–[1-9]\d* of [1-9]\d*/); diff --git a/spiffworkflow-frontend/cypress/support/helpers.js b/spiffworkflow-frontend/cypress/support/helpers.js new file mode 100644 index 000000000..b3ae449ed --- /dev/null +++ b/spiffworkflow-frontend/cypress/support/helpers.js @@ -0,0 +1 @@ +export const miscDisplayName = 'Shared Resources'; diff --git a/spiffworkflow-frontend/package-lock.json b/spiffworkflow-frontend/package-lock.json index ba2339983..4ccea1922 100644 --- a/spiffworkflow-frontend/package-lock.json +++ b/spiffworkflow-frontend/package-lock.json @@ -68,7 +68,7 @@ "@cypress/grep": "^3.1.0", "@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/parser": "^5.30.6", - "cypress": "^10.8.0", + "cypress": "^12", "eslint": "^8.19.0", "eslint_d": "^12.2.0", "eslint-config-airbnb": "^19.0.4", @@ -9850,9 +9850,9 @@ "integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==" }, "node_modules/cypress": { - "version": "10.11.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz", - "integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz", + "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==", "dev": true, "hasInstallScript": true, "dependencies": { @@ -9903,7 +9903,7 @@ "cypress": "bin/cypress" }, "engines": { - "node": ">=12.0.0" + "node": "^14.0.0 || ^16.0.0 || >=18.0.0" } }, "node_modules/cypress/node_modules/@types/node": { @@ -38586,9 +38586,9 @@ "integrity": "sha512-NJGVKPS81XejHcLhaLJS7plab0fK3slPh11mESeeDq2W4ZI5kUKK/LRRdVDvjJseojbPB7ZwjnyOybg3Igea/A==" }, "cypress": { - "version": "10.11.0", - "resolved": "https://registry.npmjs.org/cypress/-/cypress-10.11.0.tgz", - "integrity": "sha512-lsaE7dprw5DoXM00skni6W5ElVVLGAdRUUdZjX2dYsGjbY/QnpzWZ95Zom1mkGg0hAaO/QVTZoFVS7Jgr/GUPA==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/cypress/-/cypress-12.1.0.tgz", + "integrity": "sha512-7fz8N84uhN1+ePNDsfQvoWEl4P3/VGKKmAg+bJQFY4onhA37Ys+6oBkGbNdwGeC7n2QqibNVPhk8x3YuQLwzfw==", "dev": true, "requires": { "@cypress/request": "^2.88.10", diff --git a/spiffworkflow-frontend/package.json b/spiffworkflow-frontend/package.json index b896bdcec..6a84cea9e 100644 --- a/spiffworkflow-frontend/package.json +++ b/spiffworkflow-frontend/package.json @@ -104,7 +104,7 @@ "@cypress/grep": "^3.1.0", "@typescript-eslint/eslint-plugin": "^5.30.5", "@typescript-eslint/parser": "^5.30.6", - "cypress": "^10.8.0", + "cypress": "^12", "eslint": "^8.19.0", "eslint_d": "^12.2.0", "eslint-config-airbnb": "^19.0.4", diff --git a/spiffworkflow-frontend/public/index.html b/spiffworkflow-frontend/public/index.html index ae3a23076..1a7cafa95 100644 --- a/spiffworkflow-frontend/public/index.html +++ b/spiffworkflow-frontend/public/index.html @@ -7,7 +7,7 @@ - spiffworkflow-frontend + SpiffWorkflow diff --git a/spiffworkflow-frontend/src/App.tsx b/spiffworkflow-frontend/src/App.tsx index deb38410d..ecf9fc54c 100644 --- a/spiffworkflow-frontend/src/App.tsx +++ b/spiffworkflow-frontend/src/App.tsx @@ -13,42 +13,24 @@ import AdminRoutes from './routes/AdminRoutes'; import { ErrorForDisplay } from './interfaces'; import { AbilityContext } from './contexts/Can'; +import UserService from './services/UserService'; +import ErrorDisplay from './components/ErrorDisplay'; export default function App() { - const [errorMessage, setErrorMessage] = useState( - null - ); + const [errorObject, setErrorObject] = useState(null); const errorContextValueArray = useMemo( - () => [errorMessage, setErrorMessage], - [errorMessage] + () => [errorObject, setErrorObject], + [errorObject] ); - const ability = defineAbility(() => {}); - - let errorTag = null; - if (errorMessage) { - let sentryLinkTag = null; - if (errorMessage.sentry_link) { - sentryLinkTag = ( - - { - ': Find details about this error here (it may take a moment to become available): ' - } - - {errorMessage.sentry_link} - - - ); - } - errorTag = ( - - ); + if (!UserService.isLoggedIn()) { + UserService.doLogin(); + return null; } + const ability = defineAbility(() => {}); + return (
{/* @ts-ignore */} @@ -57,7 +39,7 @@ export default function App() { - {errorTag} + } /> diff --git a/spiffworkflow-frontend/src/classes/ProcessInstanceClass.tsx b/spiffworkflow-frontend/src/classes/ProcessInstanceClass.tsx new file mode 100644 index 000000000..d44569cd4 --- /dev/null +++ b/spiffworkflow-frontend/src/classes/ProcessInstanceClass.tsx @@ -0,0 +1,5 @@ +export default class ProcessInstanceClass { + static terminalStatuses() { + return ['complete', 'error', 'terminated']; + } +} diff --git a/spiffworkflow-frontend/src/components/ErrorDisplay.tsx b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx new file mode 100644 index 000000000..cdbed75a0 --- /dev/null +++ b/spiffworkflow-frontend/src/components/ErrorDisplay.tsx @@ -0,0 +1,55 @@ +import { useContext } from 'react'; +import ErrorContext from '../contexts/ErrorContext'; +import { Notification } from './Notification'; + +export default function ErrorDisplay() { + const [errorObject, setErrorObject] = (useContext as any)(ErrorContext); + + let errorTag = null; + if (errorObject) { + let sentryLinkTag = null; + if (errorObject.sentry_link) { + sentryLinkTag = ( + + { + ': Find details about this error here (it may take a moment to become available): ' + } + + {errorObject.sentry_link} + + + ); + } + + let message =
{errorObject.message}
; + let title = 'Error:'; + if ('task_name' in errorObject && errorObject.task_name) { + title = 'Error in python script:'; + message = ( + <> +
+
+ Task: {errorObject.task_name} ({errorObject.task_id}) +
+
File name: {errorObject.file_name}
+
Line number in script task: {errorObject.line_number}
+
+
{errorObject.message}
+ + ); + } + + errorTag = ( + setErrorObject(null)} + type="error" + > + {message} + {sentryLinkTag} + + ); + } + + return errorTag; +} diff --git a/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx b/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx index 2d0fe26a7..47042e910 100644 --- a/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx +++ b/spiffworkflow-frontend/src/components/MyCompletedInstances.tsx @@ -8,7 +8,7 @@ export default function MyCompletedInstances() { filtersEnabled={false} paginationQueryParamPrefix={paginationQueryParamPrefix} perPageOptions={[2, 5, 25]} - reportIdentifier="system_report_instances_initiated_by_me" + reportIdentifier="system_report_completed_instances_initiated_by_me" showReports={false} /> ); diff --git a/spiffworkflow-frontend/src/components/NavigationBar.tsx b/spiffworkflow-frontend/src/components/NavigationBar.tsx index 47e0de998..e482ae526 100644 --- a/spiffworkflow-frontend/src/components/NavigationBar.tsx +++ b/spiffworkflow-frontend/src/components/NavigationBar.tsx @@ -24,6 +24,7 @@ import UserService from '../services/UserService'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; import { PermissionsToCheck } from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; +import { UnauthenticatedError } from '../services/HttpService'; // for ref: https://react-bootstrap.github.io/components/navbar/ export default function NavigationBar() { @@ -39,6 +40,11 @@ export default function NavigationBar() { const [activeKey, setActiveKey] = useState(''); const { targetUris } = useUriListForPermissions(); + + // App.jsx forces login (which redirects to keycloak) so we should never get here if we're not logged in. + if (!UserService.isLoggedIn()) { + throw new UnauthenticatedError('You must be authenticated to do this.'); + } const permissionRequestData: PermissionsToCheck = { [targetUris.authenticationListPath]: ['GET'], [targetUris.messageInstanceListPath]: ['GET'], @@ -75,7 +81,7 @@ export default function NavigationBar() { return ( <> - {UserService.getUsername()} + {UserService.getPreferredUsername()} { + if (!UserService.isLoggedIn()) { + return null; + } return ( <> diff --git a/spiffworkflow-frontend/src/components/Notification.tsx b/spiffworkflow-frontend/src/components/Notification.tsx index d7f14e026..38191de7b 100644 --- a/spiffworkflow-frontend/src/components/Notification.tsx +++ b/spiffworkflow-frontend/src/components/Notification.tsx @@ -1,6 +1,10 @@ import React from 'react'; -// @ts-ignore -import { Close, CheckmarkFilled } from '@carbon/icons-react'; +import { + Close, + Checkmark, + Error, + // @ts-ignore +} from '@carbon/icons-react'; // @ts-ignore import { Button } from '@carbon/react'; @@ -17,9 +21,9 @@ export function Notification({ onClose, type = 'success', }: OwnProps) { - let iconClassName = 'green-icon'; + let iconComponent = ; if (type === 'error') { - iconClassName = 'red-icon'; + iconComponent = ; } return (
- + {iconComponent}
{title}
{children}
diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListDeleteReport.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListDeleteReport.tsx new file mode 100644 index 000000000..ca04d516c --- /dev/null +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListDeleteReport.tsx @@ -0,0 +1,29 @@ +import { ProcessInstanceReport } from '../interfaces'; +import HttpService from '../services/HttpService'; +import ButtonWithConfirmation from './ButtonWithConfirmation'; + +type OwnProps = { + onSuccess: (..._args: any[]) => any; + processInstanceReportSelection: ProcessInstanceReport; +}; + +export default function ProcessInstanceListDeleteReport({ + onSuccess, + processInstanceReportSelection, +}: OwnProps) { + const deleteProcessInstanceReport = () => { + HttpService.makeCallToBackend({ + path: `/process-instances/reports/${processInstanceReportSelection.id}`, + successCallback: onSuccess, + httpMethod: 'DELETE', + }); + }; + + return ( + + ); +} diff --git a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx index 98b76df39..2fc812659 100644 --- a/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx +++ b/spiffworkflow-frontend/src/components/ProcessInstanceListTable.tsx @@ -40,6 +40,7 @@ import { getProcessModelFullIdentifierFromSearchParams, modifyProcessIdentifierForPathParam, refreshAtInterval, + setErrorMessageSafely, } from '../helpers'; import PaginationForTable from './PaginationForTable'; @@ -62,6 +63,7 @@ import { } from '../interfaces'; import ProcessModelSearch from './ProcessModelSearch'; import ProcessInstanceReportSearch from './ProcessInstanceReportSearch'; +import ProcessInstanceListDeleteReport from './ProcessInstanceListDeleteReport'; import ProcessInstanceListSaveAsReport from './ProcessInstanceListSaveAsReport'; import { FormatProcessModelDisplayName } from './MiniComponents'; import { Notification } from './Notification'; @@ -79,6 +81,8 @@ type OwnProps = { textToShowIfEmpty?: string; paginationClassName?: string; autoReload?: boolean; + additionalParams?: string; + variant?: string; }; interface dateParameters { @@ -90,12 +94,18 @@ export default function ProcessInstanceListTable({ processModelFullIdentifier, paginationQueryParamPrefix, perPageOptions, + additionalParams, showReports = true, reportIdentifier, textToShowIfEmpty, paginationClassName, autoReload = false, + variant = 'for-me', }: OwnProps) { + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } const params = useParams(); const [searchParams] = useSearchParams(); const navigate = useNavigate(); @@ -122,7 +132,12 @@ export default function ProcessInstanceListTable({ const [endFromTimeInvalid, setEndFromTimeInvalid] = useState(false); const [endToTimeInvalid, setEndToTimeInvalid] = useState(false); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const [errorObject, setErrorObject] = (useContext as any)(ErrorContext); + + const processInstancePathPrefix = + variant === 'all' + ? '/admin/process-instances/all' + : '/admin/process-instances/for-me'; const [processStatusAllOptions, setProcessStatusAllOptions] = useState( [] @@ -253,8 +268,12 @@ export default function ProcessInstanceListTable({ } ); + if (additionalParams) { + queryParamString += `&${additionalParams}`; + } + HttpService.makeCallToBackend({ - path: `/process-instances?${queryParamString}`, + path: `${apiPath}?${queryParamString}`, successCallback: setProcessInstancesFromResult, }); } @@ -300,8 +319,13 @@ export default function ProcessInstanceListTable({ checkFiltersAndRun(); if (autoReload) { - refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, checkFiltersAndRun); + return refreshAtInterval( + REFRESH_INTERVAL, + REFRESH_TIMEOUT, + checkFiltersAndRun + ); } + return undefined; }, [ autoReload, searchParams, @@ -315,6 +339,8 @@ export default function ProcessInstanceListTable({ processModelFullIdentifier, perPageOptions, reportIdentifier, + additionalParams, + apiPath, ]); // This sets the filter data using the saved reports returned from the initial instance_list query. @@ -404,8 +430,11 @@ export default function ProcessInstanceListTable({ } }; - // TODO: after factoring this out page hangs when invalid date ranges and applying the filter - const calculateStartAndEndSeconds = () => { + // jasquat/burnettk - 2022-12-28 do not check the validity of the dates when rendering components to avoid the page being + // re-rendered while the user is still typing. NOTE that we also prevented rerendering + // with the use of the setErrorMessageSafely function. we are not sure why the context not + // changing still causes things to rerender when we call its setter without our extra check. + const calculateStartAndEndSeconds = (validate: boolean = true) => { const startFromSeconds = convertDateAndTimeStringsToSeconds( startFromDate, startFromTime || '00:00:00' @@ -423,29 +452,25 @@ export default function ProcessInstanceListTable({ endToTime || '00:00:00' ); let valid = true; - if (isTrueComparison(startFromSeconds, '>', startToSeconds)) { - setErrorMessage({ - message: '"Start date from" cannot be after "start date to"', - }); - valid = false; - } - if (isTrueComparison(endFromSeconds, '>', endToSeconds)) { - setErrorMessage({ - message: '"End date from" cannot be after "end date to"', - }); - valid = false; - } - if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) { - setErrorMessage({ - message: '"Start date from" cannot be after "end date from"', - }); - valid = false; - } - if (isTrueComparison(startToSeconds, '>', endToSeconds)) { - setErrorMessage({ - message: '"Start date to" cannot be after "end date to"', - }); - valid = false; + + if (validate) { + let message = ''; + if (isTrueComparison(startFromSeconds, '>', startToSeconds)) { + message = '"Start date from" cannot be after "start date to"'; + } + if (isTrueComparison(endFromSeconds, '>', endToSeconds)) { + message = '"End date from" cannot be after "end date to"'; + } + if (isTrueComparison(startFromSeconds, '>', endFromSeconds)) { + message = '"Start date from" cannot be after "end date from"'; + } + if (isTrueComparison(startToSeconds, '>', endToSeconds)) { + message = '"Start date to" cannot be after "end date to"'; + } + if (message !== '') { + valid = false; + setErrorMessageSafely(message, errorObject, setErrorObject); + } } return { @@ -502,9 +527,9 @@ export default function ProcessInstanceListTable({ queryParamString += `&report_id=${processInstanceReportSelection.id}`; } - setErrorMessage(null); + setErrorObject(null); setProcessInstanceReportJustSaved(null); - navigate(`/admin/process-instances?${queryParamString}`); + navigate(`${processInstancePathPrefix}?${queryParamString}`); }; const dateComponent = ( @@ -601,9 +626,9 @@ export default function ProcessInstanceListTable({ queryParamString = `?report_id=${selectedReport.id}`; } - setErrorMessage(null); + setErrorObject(null); setProcessInstanceReportJustSaved(mode || null); - navigate(`/admin/process-instances${queryParamString}`); + navigate(`${processInstancePathPrefix}${queryParamString}`); }; const reportColumns = () => { @@ -633,7 +658,7 @@ export default function ProcessInstanceListTable({ startToSeconds, endFromSeconds, endToSeconds, - } = calculateStartAndEndSeconds(); + } = calculateStartAndEndSeconds(false); if (!valid || !reportMetadata) { return null; @@ -657,6 +682,19 @@ export default function ProcessInstanceListTable({ ); }; + const onDeleteReportSuccess = () => { + processInstanceReportDidChange({ selectedItem: null }); + }; + + const deleteReportComponent = () => { + return processInstanceReportSelection ? ( + + ) : null; + }; + const removeColumn = (reportColumn: ReportColumn) => { if (reportMetadata) { const reportMetadataCopy = { ...reportMetadata }; @@ -736,7 +774,6 @@ export default function ProcessInstanceListTable({ setReportMetadata(reportMetadataCopy); setReportColumnToOperateOn(null); setShowReportColumnForm(false); - setShowReportColumnForm(false); } }; @@ -757,9 +794,12 @@ export default function ProcessInstanceListTable({ }; const updateReportColumn = (event: any) => { - const reportColumnForEditing = reportColumnToReportColumnForEditing( - event.selectedItem - ); + let reportColumnForEditing = null; + if (event.selectedItem) { + reportColumnForEditing = reportColumnToReportColumnForEditing( + event.selectedItem + ); + } setReportColumnToOperateOn(reportColumnForEditing); }; @@ -789,7 +829,29 @@ export default function ProcessInstanceListTable({ if (reportColumnFormMode === '') { return null; } - const formElements = [ + const formElements = []; + if (reportColumnFormMode === 'new') { + formElements.push( + { + if (reportColumn) { + return reportColumn.accessor; + } + return null; + }} + shouldFilterItem={shouldFilterReportColumn} + placeholder="Choose a column to show" + titleText="Column" + selectedItem={reportColumnToOperateOn} + /> + ); + } + formElements.push([ , - ]; + ]); if (reportColumnToOperateOn && reportColumnToOperateOn.filterable) { formElements.push( ); } - if (reportColumnFormMode === 'new') { - formElements.push( - { - if (reportColumn) { - return reportColumn.accessor; - } - return null; - }} - shouldFilterItem={shouldFilterReportColumn} - placeholder="Choose a report column" - titleText="Report Column" - /> - ); - } + formElements.push( +
+ ); const modalHeading = reportColumnFormMode === 'new' ? 'Add Column' @@ -888,7 +932,7 @@ export default function ProcessInstanceListTable({ kind="ghost" size="sm" className={`button-tag-icon ${tagTypeClass}`} - title={`Edit ${reportColumnForEditing.accessor}`} + title={`Edit ${reportColumnForEditing.accessor} column`} onClick={() => { setReportColumnToOperateOn(reportColumnForEditing); setShowReportColumnForm(true); @@ -916,7 +960,7 @@ export default function ProcessInstanceListTable({ + + {canViewXml && ( + + )} + ); } diff --git a/spiffworkflow-frontend/src/components/TaskListTable.tsx b/spiffworkflow-frontend/src/components/TaskListTable.tsx new file mode 100644 index 000000000..2e53bcea6 --- /dev/null +++ b/spiffworkflow-frontend/src/components/TaskListTable.tsx @@ -0,0 +1,233 @@ +import { useEffect, useState } from 'react'; +// @ts-ignore +import { Button, Table } from '@carbon/react'; +import { Link, useSearchParams } from 'react-router-dom'; +import UserService from '../services/UserService'; +import PaginationForTable from './PaginationForTable'; +import { + convertSecondsToFormattedDateTime, + getPageInfoFromSearchParams, + modifyProcessIdentifierForPathParam, + refreshAtInterval, +} from '../helpers'; +import HttpService from '../services/HttpService'; +import { PaginationObject, ProcessInstanceTask } from '../interfaces'; +import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; + +const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; +const REFRESH_INTERVAL = 5; +const REFRESH_TIMEOUT = 600; + +type OwnProps = { + apiPath: string; + tableTitle: string; + tableDescription: string; + additionalParams?: string; + paginationQueryParamPrefix?: string; + paginationClassName?: string; + autoReload?: boolean; + showStartedBy?: boolean; + showWaitingOn?: boolean; + textToShowIfEmpty?: string; +}; + +export default function TaskListTable({ + apiPath, + tableTitle, + tableDescription, + additionalParams, + paginationQueryParamPrefix, + paginationClassName, + textToShowIfEmpty, + autoReload = false, + showStartedBy = true, + showWaitingOn = true, +}: OwnProps) { + const [searchParams] = useSearchParams(); + const [tasks, setTasks] = useState(null); + const [pagination, setPagination] = useState(null); + + const preferredUsername = UserService.getPreferredUsername(); + const userEmail = UserService.getUserEmail(); + + useEffect(() => { + const getTasks = () => { + const { page, perPage } = getPageInfoFromSearchParams( + searchParams, + PER_PAGE_FOR_TASKS_ON_HOME_PAGE, + undefined, + paginationQueryParamPrefix + ); + const setTasksFromResult = (result: any) => { + setTasks(result.results); + setPagination(result.pagination); + }; + let params = `?per_page=${perPage}&page=${page}`; + if (additionalParams) { + params += `&${additionalParams}`; + } + HttpService.makeCallToBackend({ + path: `${apiPath}${params}`, + successCallback: setTasksFromResult, + }); + }; + getTasks(); + if (autoReload) { + return refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); + } + return undefined; + }, [ + searchParams, + additionalParams, + apiPath, + paginationQueryParamPrefix, + autoReload, + ]); + + const getWaitingForTableCellComponent = ( + processInstanceTask: ProcessInstanceTask + ) => { + let fullUsernameString = ''; + let shortUsernameString = ''; + if (processInstanceTask.assigned_user_group_identifier) { + fullUsernameString = processInstanceTask.assigned_user_group_identifier; + shortUsernameString = processInstanceTask.assigned_user_group_identifier; + } + if (processInstanceTask.potential_owner_usernames) { + fullUsernameString = processInstanceTask.potential_owner_usernames; + const usernames = + processInstanceTask.potential_owner_usernames.split(','); + const firstTwoUsernames = usernames.slice(0, 2); + if (usernames.length > 2) { + firstTwoUsernames.push('...'); + } + shortUsernameString = firstTwoUsernames.join(','); + } + return {shortUsernameString}; + }; + + const buildTable = () => { + if (!tasks) { + return null; + } + const rows = tasks.map((row: ProcessInstanceTask) => { + const taskUrl = `/tasks/${row.process_instance_id}/${row.task_id}`; + const modifiedProcessModelIdentifier = + modifyProcessIdentifierForPathParam(row.process_model_identifier); + + const regex = new RegExp(`\\b(${preferredUsername}|${userEmail})\\b`); + let hasAccessToCompleteTask = false; + if (row.potential_owner_usernames.match(regex)) { + hasAccessToCompleteTask = true; + } + return ( + + + + {row.process_instance_id} + + + + + {row.process_model_display_name} + + + + {row.task_title} + + {showStartedBy ? {row.process_initiator_username} : ''} + {showWaitingOn ? {getWaitingForTableCellComponent(row)} : ''} + + {convertSecondsToFormattedDateTime(row.created_at_in_seconds) || + '-'} + + + + + + + ); + }); + let tableHeaders = ['Id', 'Process', 'Task']; + if (showStartedBy) { + tableHeaders.push('Started By'); + } + if (showWaitingOn) { + tableHeaders.push('Waiting For'); + } + tableHeaders = tableHeaders.concat([ + 'Date Started', + 'Last Updated', + 'Actions', + ]); + return ( + + + + {tableHeaders.map((tableHeader: string) => { + return ; + })} + + + {rows} +
{tableHeader}
+ ); + }; + + const tasksComponent = () => { + if (pagination && pagination.total < 1) { + return ( +

+ {textToShowIfEmpty} +

+ ); + } + const { page, perPage } = getPageInfoFromSearchParams( + searchParams, + PER_PAGE_FOR_TASKS_ON_HOME_PAGE, + undefined, + paginationQueryParamPrefix + ); + return ( + + ); + }; + + if (tasks) { + return ( + <> +

{tableTitle}

+

{tableDescription}

+ {tasksComponent()} + + ); + } + return null; +} diff --git a/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx b/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx index deb2030ea..be1d9042a 100644 --- a/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx +++ b/spiffworkflow-frontend/src/components/TasksForMyOpenProcesses.tsx @@ -1,156 +1,18 @@ -import { useEffect, useState } from 'react'; -// @ts-ignore -import { Button, Table } from '@carbon/react'; -import { Link, useSearchParams } from 'react-router-dom'; -import PaginationForTable from './PaginationForTable'; -import { - convertSecondsToFormattedDateTime, - getPageInfoFromSearchParams, - modifyProcessIdentifierForPathParam, - refreshAtInterval, -} from '../helpers'; -import HttpService from '../services/HttpService'; -import { PaginationObject } from '../interfaces'; -import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; +import TaskListTable from './TaskListTable'; -const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; const paginationQueryParamPrefix = 'tasks_for_my_open_processes'; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; export default function MyOpenProcesses() { - const [searchParams] = useSearchParams(); - const [tasks, setTasks] = useState([]); - const [pagination, setPagination] = useState(null); - - useEffect(() => { - const getTasks = () => { - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - paginationQueryParamPrefix - ); - const setTasksFromResult = (result: any) => { - setTasks(result.results); - setPagination(result.pagination); - }; - HttpService.makeCallToBackend({ - path: `/tasks/for-my-open-processes?per_page=${perPage}&page=${page}`, - successCallback: setTasksFromResult, - }); - }; - getTasks(); - refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); - }, [searchParams]); - - const buildTable = () => { - const rows = tasks.map((row) => { - const rowToUse = row as any; - const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; - const modifiedProcessModelIdentifier = - modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); - return ( - - - - {rowToUse.process_instance_id} - - - - - {rowToUse.process_model_display_name} - - - - {rowToUse.task_title} - - {rowToUse.group_identifier || '-'} - - {convertSecondsToFormattedDateTime( - rowToUse.created_at_in_seconds - ) || '-'} - - - - - - - ); - }); - return ( - - - - - - - - - - - - - {rows} -
IdProcessTaskWaiting ForDate StartedLast UpdatedActions
- ); - }; - - const tasksComponent = () => { - if (pagination && pagination.total < 1) { - return ( -

- There are no tasks for processes you started at this time. -

- ); - } - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - paginationQueryParamPrefix - ); - return ( - - ); - }; - return ( - <> -

My open instances

-

- These tasks are for processes you started which are not complete. You - may not have an action to take at this time. See below for tasks waiting - on you. -

- {tasksComponent()} - + ); } diff --git a/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx b/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx index 7d06b7a30..1939e4ba3 100644 --- a/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx +++ b/spiffworkflow-frontend/src/components/TasksWaitingForMe.tsx @@ -1,149 +1,16 @@ -import { useEffect, useState } from 'react'; -// @ts-ignore -import { Button, Table } from '@carbon/react'; -import { Link, useSearchParams } from 'react-router-dom'; -import PaginationForTable from './PaginationForTable'; -import { - convertSecondsToFormattedDateTime, - getPageInfoFromSearchParams, - modifyProcessIdentifierForPathParam, -} from '../helpers'; -import HttpService from '../services/HttpService'; -import { PaginationObject } from '../interfaces'; -import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; - -const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; +import TaskListTable from './TaskListTable'; export default function TasksWaitingForMe() { - const [searchParams] = useSearchParams(); - const [tasks, setTasks] = useState([]); - const [pagination, setPagination] = useState(null); - - useEffect(() => { - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - 'tasks_waiting_for_me' - ); - const setTasksFromResult = (result: any) => { - setTasks(result.results); - setPagination(result.pagination); - }; - HttpService.makeCallToBackend({ - path: `/tasks/for-me?per_page=${perPage}&page=${page}`, - successCallback: setTasksFromResult, - }); - }, [searchParams]); - - const buildTable = () => { - const rows = tasks.map((row) => { - const rowToUse = row as any; - const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; - const modifiedProcessModelIdentifier = - modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); - return ( - - - - {rowToUse.process_instance_id} - - - - - {rowToUse.process_model_display_name} - - - - {rowToUse.task_title} - - {rowToUse.username} - {rowToUse.group_identifier || '-'} - - {convertSecondsToFormattedDateTime( - rowToUse.created_at_in_seconds - ) || '-'} - - - - - - - ); - }); - return ( - - - - - - - - - - - - - - {rows} -
IdProcessTaskStarted ByWaiting ForDate StartedLast UpdatedActions
- ); - }; - - const tasksComponent = () => { - if (pagination && pagination.total < 1) { - return ( -

- You have no task assignments at this time. -

- ); - } - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - 'tasks_waiting_for_me' - ); - return ( - - ); - }; - return ( - <> -

Tasks waiting for me

-

- These processes are waiting on you to complete the next task. All are - processes created by others that are now actionable by you. -

- {tasksComponent()} - + ); } diff --git a/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx b/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx index 565cd4a55..dab0372b8 100644 --- a/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx +++ b/spiffworkflow-frontend/src/components/TasksWaitingForMyGroups.tsx @@ -1,156 +1,41 @@ import { useEffect, useState } from 'react'; -// @ts-ignore -import { Button, Table } from '@carbon/react'; -import { Link, useSearchParams } from 'react-router-dom'; -import PaginationForTable from './PaginationForTable'; -import { - convertSecondsToFormattedDateTime, - getPageInfoFromSearchParams, - modifyProcessIdentifierForPathParam, - refreshAtInterval, -} from '../helpers'; import HttpService from '../services/HttpService'; -import { PaginationObject } from '../interfaces'; -import TableCellWithTimeAgoInWords from './TableCellWithTimeAgoInWords'; - -const PER_PAGE_FOR_TASKS_ON_HOME_PAGE = 5; -const paginationQueryParamPrefix = 'tasks_waiting_for_my_groups'; -const REFRESH_INTERVAL = 5; -const REFRESH_TIMEOUT = 600; +import TaskListTable from './TaskListTable'; export default function TasksWaitingForMyGroups() { - const [searchParams] = useSearchParams(); - const [tasks, setTasks] = useState([]); - const [pagination, setPagination] = useState(null); + const [userGroups, setUserGroups] = useState(null); useEffect(() => { - const getTasks = () => { - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - paginationQueryParamPrefix - ); - const setTasksFromResult = (result: any) => { - setTasks(result.results); - setPagination(result.pagination); - }; - HttpService.makeCallToBackend({ - path: `/tasks/for-my-groups?per_page=${perPage}&page=${page}`, - successCallback: setTasksFromResult, - }); - }; - getTasks(); - refreshAtInterval(REFRESH_INTERVAL, REFRESH_TIMEOUT, getTasks); - }, [searchParams]); + HttpService.makeCallToBackend({ + path: `/user-groups/for-current-user`, + successCallback: setUserGroups, + }); + }, [setUserGroups]); - const buildTable = () => { - const rows = tasks.map((row) => { - const rowToUse = row as any; - const taskUrl = `/tasks/${rowToUse.process_instance_id}/${rowToUse.task_id}`; - const modifiedProcessModelIdentifier = - modifyProcessIdentifierForPathParam(rowToUse.process_model_identifier); + const tableComponents = () => { + if (!userGroups) { + return null; + } + + return userGroups.map((userGroup: string) => { return ( - - - - {rowToUse.process_instance_id} - - - - - {rowToUse.process_model_display_name} - - - - {rowToUse.task_title} - - {rowToUse.username} - {rowToUse.group_identifier || '-'} - - {convertSecondsToFormattedDateTime( - rowToUse.created_at_in_seconds - ) || '-'} - - - - - - + ); }); - return ( - - - - - - - - - - - - - - {rows} -
IdProcessTaskStarted ByWaiting ForDate StartedLast UpdatedActions
- ); }; - const tasksComponent = () => { - if (pagination && pagination.total < 1) { - return ( -

- Your groups have no task assignments at this time. -

- ); - } - const { page, perPage } = getPageInfoFromSearchParams( - searchParams, - PER_PAGE_FOR_TASKS_ON_HOME_PAGE, - undefined, - paginationQueryParamPrefix - ); - return ( - - ); - }; - - return ( - <> -

Tasks waiting for my groups

-

- This is a list of tasks for groups you belong to that can be completed - by any member of the group. -

- {tasksComponent()} - - ); + if (userGroups) { + return <>{tableComponents()}; + } + return null; } diff --git a/spiffworkflow-frontend/src/config.tsx b/spiffworkflow-frontend/src/config.tsx index b0816a39d..abaadd5ef 100644 --- a/spiffworkflow-frontend/src/config.tsx +++ b/spiffworkflow-frontend/src/config.tsx @@ -1,11 +1,23 @@ -const host = window.location.hostname; -let hostAndPort = `api.${host}`; +const { port, hostname } = window.location; +let hostAndPort = `api.${hostname}`; let protocol = 'https'; -if (/^\d+\./.test(host) || host === 'localhost') { - hostAndPort = `${host}:7000`; + +if (/^\d+\./.test(hostname) || hostname === 'localhost') { + let serverPort = 7000; + if (!Number.isNaN(Number(port))) { + serverPort = Number(port) - 1; + } + hostAndPort = `${hostname}:${serverPort}`; protocol = 'http'; } -export const BACKEND_BASE_URL = `${protocol}://${hostAndPort}/v1.0`; + +let url = `${protocol}://${hostAndPort}/v1.0`; +// Allow overriding the backend base url with an environment variable at build time. +if (process.env.REACT_APP_BACKEND_BASE_URL) { + url = process.env.REACT_APP_BACKEND_BASE_URL; +} + +export const BACKEND_BASE_URL = url; export const PROCESS_STATUSES = [ 'not_started', diff --git a/spiffworkflow-frontend/src/helpers.test.tsx b/spiffworkflow-frontend/src/helpers.test.tsx index 5a0352b82..660f65f67 100644 --- a/spiffworkflow-frontend/src/helpers.test.tsx +++ b/spiffworkflow-frontend/src/helpers.test.tsx @@ -1,4 +1,8 @@ -import { convertSecondsToFormattedDateString, slugifyString } from './helpers'; +import { + convertSecondsToFormattedDateString, + slugifyString, + underscorizeString, +} from './helpers'; test('it can slugify a string', () => { expect(slugifyString('hello---world_ and then Some such-')).toEqual( @@ -6,6 +10,12 @@ test('it can slugify a string', () => { ); }); +test('it can underscorize a string', () => { + expect(underscorizeString('hello---world_ and then Some such-')).toEqual( + 'hello_world_and_then_some_such' + ); +}); + test('it can keep the correct date when converting seconds to date', () => { const dateString = convertSecondsToFormattedDateString(1666325400); expect(dateString).toEqual('2022-10-21'); diff --git a/spiffworkflow-frontend/src/helpers.tsx b/spiffworkflow-frontend/src/helpers.tsx index 6781ada97..d91f05439 100644 --- a/spiffworkflow-frontend/src/helpers.tsx +++ b/spiffworkflow-frontend/src/helpers.tsx @@ -8,6 +8,7 @@ import { DEFAULT_PER_PAGE, DEFAULT_PAGE, } from './components/PaginationForTable'; +import { ErrorForDisplay } from './interfaces'; // https://www.30secondsofcode.org/js/s/slugify export const slugifyString = (str: any) => { @@ -20,6 +21,10 @@ export const slugifyString = (str: any) => { .replace(/-+$/g, ''); }; +export const underscorizeString = (inputString: string) => { + return slugifyString(inputString).replace(/-/g, '_'); +}; + export const capitalizeFirstLetter = (string: any) => { return string.charAt(0).toUpperCase() + string.slice(1); }; @@ -208,5 +213,43 @@ export const refreshAtInterval = ( () => clearInterval(intervalRef), timeout * 1000 ); - return [intervalRef, timeoutRef]; + return () => { + clearInterval(intervalRef); + clearTimeout(timeoutRef); + }; +}; + +const getChildProcesses = (bpmnElement: any) => { + let elements: string[] = []; + bpmnElement.children.forEach((c: any) => { + if (c.type === 'bpmn:Participant') { + if (c.businessObject.processRef) { + elements.push(c.businessObject.processRef.id); + } + elements = [...elements, ...getChildProcesses(c)]; + } else if (c.type === 'bpmn:SubProcess') { + elements.push(c.id); + } + }); + return elements; +}; + +export const getBpmnProcessIdentifiers = (rootBpmnElement: any) => { + const childProcesses = getChildProcesses(rootBpmnElement); + childProcesses.push(rootBpmnElement.businessObject.id); + return childProcesses; +}; + +// Setting the error message state to the same string is still considered a change +// and re-renders the page so check the message first to avoid that. +export const setErrorMessageSafely = ( + newErrorMessageString: string, + oldErrorMessage: ErrorForDisplay, + errorMessageSetter: any +) => { + if (oldErrorMessage && oldErrorMessage.message === newErrorMessageString) { + return null; + } + errorMessageSetter({ message: newErrorMessageString }); + return null; }; diff --git a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx index f84465c82..f8e5f07f8 100644 --- a/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx +++ b/spiffworkflow-frontend/src/hooks/UriListForPermissions.tsx @@ -9,12 +9,20 @@ export const useUriListForPermissions = () => { messageInstanceListPath: '/v1.0/messages', processGroupListPath: '/v1.0/process-groups', processGroupShowPath: `/v1.0/process-groups/${params.process_group_id}`, - processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, processInstanceActionPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}`, + processInstanceCreatePath: `/v1.0/process-instances/${params.process_model_id}`, processInstanceListPath: '/v1.0/process-instances', processInstanceLogListPath: `/v1.0/logs/${params.process_model_id}/${params.process_instance_id}`, processInstanceReportListPath: '/v1.0/process-instances/reports', - processInstanceTaskListPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceResumePath: `/v1.0/process-instance-resume/${params.process_model_id}/${params.process_instance_id}`, + processInstanceSuspendPath: `/v1.0/process-instance-suspend/${params.process_model_id}/${params.process_instance_id}`, + processInstanceResetPath: `/v1.0/process-instance-reset/${params.process_model_id}/${params.process_instance_id}`, + processInstanceTaskListDataPath: `/v1.0/task-data/${params.process_model_id}/${params.process_instance_id}`, + processInstanceSendEventPath: `/v1.0/send-event/${params.process_model_id}/${params.process_instance_id}`, + processInstanceCompleteTaskPath: `/v1.0/complete-task/${params.process_model_id}/${params.process_instance_id}`, + processInstanceTaskListPath: `/v1.0/process-instances/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceTaskListForMePath: `/v1.0/process-instances/for-me/${params.process_model_id}/${params.process_instance_id}/task-info`, + processInstanceTerminatePath: `/v1.0/process-instance-terminate/${params.process_model_id}/${params.process_instance_id}`, processModelCreatePath: `/v1.0/process-models/${params.process_group_id}`, processModelFileCreatePath: `/v1.0/process-models/${params.process_model_id}/files`, processModelFileShowPath: `/v1.0/process-models/${params.process_model_id}/files/${params.file_name}`, diff --git a/spiffworkflow-frontend/src/index.css b/spiffworkflow-frontend/src/index.css index 248a23d7d..08e8341cf 100644 --- a/spiffworkflow-frontend/src/index.css +++ b/spiffworkflow-frontend/src/index.css @@ -355,8 +355,8 @@ svg.notification-icon { word-break: normal; } -.combo-box-in-modal { - height: 300px; +.vertical-spacer-to-allow-combo-box-to-expand-in-modal { + height: 250px; } .cds--btn.narrow-button { diff --git a/spiffworkflow-frontend/src/interfaces.ts b/spiffworkflow-frontend/src/interfaces.ts index 079e4cdc7..bb3680133 100644 --- a/spiffworkflow-frontend/src/interfaces.ts +++ b/spiffworkflow-frontend/src/interfaces.ts @@ -5,12 +5,37 @@ export interface Secret { creator_user_id: string; } +export interface ProcessData { + process_data_identifier: string; + process_data_value: any; +} + export interface RecentProcessModel { processGroupIdentifier?: string; processModelIdentifier: string; processModelDisplayName: string; } +export interface ProcessInstanceTask { + id: number; + task_id: string; + process_instance_id: number; + process_model_display_name: string; + process_model_identifier: string; + task_title: string; + lane_assignment_id: string; + process_instance_status: string; + state: string; + process_identifier: string; + name: string; + process_initiator_username: string; + assigned_user_group_identifier: string; + created_at_in_seconds: number; + updated_at_in_seconds: number; + current_user_is_potential_owner: number; + potential_owner_usernames: string; +} + export interface ProcessReference { name: string; // The process or decision Display name. identifier: string; // The unique id of the process @@ -39,6 +64,11 @@ export interface ProcessInstance { id: number; process_model_identifier: string; process_model_display_name: string; + status: string; + start_in_seconds: number | null; + end_in_seconds: number | null; + bpmn_xml_file_contents?: string; + spiff_step?: number; } export interface MessageCorrelationProperties { @@ -135,6 +165,10 @@ export type HotCrumbItem = HotCrumbItemArray | HotCrumbItemObject; export interface ErrorForDisplay { message: string; sentry_link?: string; + task_name?: string; + task_id?: string; + line_number?: number; + file_name?: string; } export interface AuthenticationParam { diff --git a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx index da6cae356..2d61439bf 100644 --- a/spiffworkflow-frontend/src/routes/AdminRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/AdminRoutes.tsx @@ -22,14 +22,15 @@ import ProcessInstanceLogList from './ProcessInstanceLogList'; import MessageInstanceList from './MessageInstanceList'; import Configuration from './Configuration'; import JsonSchemaFormBuilder from './JsonSchemaFormBuilder'; +import ProcessModelNewExperimental from './ProcessModelNewExperimental'; export default function AdminRoutes() { const location = useLocation(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; useEffect(() => { - setErrorMessage(null); - }, [location, setErrorMessage]); + setErrorObject(null); + }, [location, setErrorObject]); if (UserService.hasRole(['admin'])) { return ( @@ -50,6 +51,10 @@ export default function AdminRoutes() { path="process-models/:process_group_id/new" element={} /> + } + /> } @@ -62,21 +67,25 @@ export default function AdminRoutes() { path="process-models/:process_model_id/files/:file_name" element={} /> - } - /> } /> + } + /> + } + /> } + element={} /> } + element={} /> } /> - } /> + } + /> + } + /> + } + /> } /> } /> { if (authenticationList) { diff --git a/spiffworkflow-frontend/src/routes/CompletedInstances.tsx b/spiffworkflow-frontend/src/routes/CompletedInstances.tsx index f97bb5d59..5c7ce445b 100644 --- a/spiffworkflow-frontend/src/routes/CompletedInstances.tsx +++ b/spiffworkflow-frontend/src/routes/CompletedInstances.tsx @@ -1,6 +1,45 @@ +import { useEffect, useState } from 'react'; import ProcessInstanceListTable from '../components/ProcessInstanceListTable'; +import HttpService from '../services/HttpService'; export default function CompletedInstances() { + const [userGroups, setUserGroups] = useState(null); + + useEffect(() => { + HttpService.makeCallToBackend({ + path: `/user-groups/for-current-user`, + successCallback: setUserGroups, + }); + }, [setUserGroups]); + + const groupTableComponents = () => { + if (!userGroups) { + return null; + } + + return userGroups.map((userGroup: string) => { + return ( + <> +

With tasks completed by group: {userGroup}

+

+ This is a list of instances with tasks that were completed by the{' '} + {userGroup} group. +

+ + + ); + }); + }; + return ( <>

My completed instances

@@ -11,13 +50,13 @@ export default function CompletedInstances() { filtersEnabled={false} paginationQueryParamPrefix="my_completed_instances" perPageOptions={[2, 5, 25]} - reportIdentifier="system_report_instances_initiated_by_me" + reportIdentifier="system_report_completed_instances_initiated_by_me" showReports={false} textToShowIfEmpty="You have no completed instances at this time." paginationClassName="with-large-bottom-margin" autoReload /> -

Tasks completed by me

+

With tasks completed by me

This is a list of instances where you have completed tasks.

@@ -25,24 +64,12 @@ export default function CompletedInstances() { filtersEnabled={false} paginationQueryParamPrefix="my_completed_tasks" perPageOptions={[2, 5, 25]} - reportIdentifier="system_report_instances_with_tasks_completed_by_me" + reportIdentifier="system_report_completed_instances_with_tasks_completed_by_me" showReports={false} - textToShowIfEmpty="You have no completed tasks at this time." + textToShowIfEmpty="You have no completed instances at this time." paginationClassName="with-large-bottom-margin" /> -

Tasks completed by my groups

-

- This is a list of instances with tasks that were completed by groups you - belong to. -

- + {groupTableComponents()} ); } diff --git a/spiffworkflow-frontend/src/routes/Configuration.tsx b/spiffworkflow-frontend/src/routes/Configuration.tsx index b2e30416d..bd9e59c50 100644 --- a/spiffworkflow-frontend/src/routes/Configuration.tsx +++ b/spiffworkflow-frontend/src/routes/Configuration.tsx @@ -14,7 +14,7 @@ import { usePermissionFetcher } from '../hooks/PermissionService'; export default function Configuration() { const location = useLocation(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [selectedTabIndex, setSelectedTabIndex] = useState(0); const navigate = useNavigate(); @@ -26,13 +26,13 @@ export default function Configuration() { const { ability } = usePermissionFetcher(permissionRequestData); useEffect(() => { - setErrorMessage(null); + setErrorObject(null); let newSelectedTabIndex = 0; if (location.pathname.match(/^\/admin\/configuration\/authentications\b/)) { newSelectedTabIndex = 1; } setSelectedTabIndex(newSelectedTabIndex); - }, [location, setErrorMessage]); + }, [location, setErrorObject]); return ( <> diff --git a/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx b/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx index 872a7a69c..0475d4c75 100644 --- a/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx +++ b/spiffworkflow-frontend/src/routes/HomePageRoutes.tsx @@ -11,12 +11,12 @@ import CreateNewInstance from './CreateNewInstance'; export default function HomePageRoutes() { const location = useLocation(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [selectedTabIndex, setSelectedTabIndex] = useState(0); const navigate = useNavigate(); useEffect(() => { - setErrorMessage(null); + setErrorObject(null); let newSelectedTabIndex = 0; if (location.pathname.match(/^\/tasks\/completed-instances\b/)) { newSelectedTabIndex = 1; @@ -24,7 +24,7 @@ export default function HomePageRoutes() { newSelectedTabIndex = 2; } setSelectedTabIndex(newSelectedTabIndex); - }, [location, setErrorMessage]); + }, [location, setErrorObject]); const renderTabs = () => { if (location.pathname.match(/^\/tasks\/\d+\/\b/)) { diff --git a/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx b/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx index 6d1011014..d4a9c2b44 100644 --- a/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx +++ b/spiffworkflow-frontend/src/routes/JsonSchemaFormBuilder.tsx @@ -3,7 +3,11 @@ import { useEffect, useState } from 'react'; import { Button, Select, SelectItem, TextInput } from '@carbon/react'; import { useParams } from 'react-router-dom'; import { FormField } from '../interfaces'; -import { modifyProcessIdentifierForPathParam, slugifyString } from '../helpers'; +import { + modifyProcessIdentifierForPathParam, + slugifyString, + underscorizeString, +} from '../helpers'; import HttpService from '../services/HttpService'; export default function JsonSchemaFormBuilder() { @@ -75,7 +79,7 @@ export default function JsonSchemaFormBuilder() { formFieldIdHasBeenUpdatedByUser ); if (!formFieldIdHasBeenUpdatedByUser) { - setFormFieldId(slugifyString(newFormFieldTitle)); + setFormFieldId(underscorizeString(newFormFieldTitle)); } setFormFieldTitle(newFormFieldTitle); }; diff --git a/spiffworkflow-frontend/src/routes/MyTasks.tsx b/spiffworkflow-frontend/src/routes/MyTasks.tsx index 4c1cbc9bf..3daaaef6a 100644 --- a/spiffworkflow-frontend/src/routes/MyTasks.tsx +++ b/spiffworkflow-frontend/src/routes/MyTasks.tsx @@ -2,6 +2,7 @@ import { useEffect, useState } from 'react'; // @ts-ignore import { Button, Table } from '@carbon/react'; import { Link, useSearchParams } from 'react-router-dom'; +import { Notification } from '../components/Notification'; import PaginationForTable from '../components/PaginationForTable'; import { getPageInfoFromSearchParams, @@ -51,20 +52,19 @@ export default function MyTasks() { const processInstanceRunResultTag = () => { if (processInstance) { return ( -
-

- Process Instance {processInstance.id} kicked off ( - - view - - ). -

-
+ setProcessInstance(null)} + > + + view + + ); } return null; diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx index b6c08b213..a18f48c80 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceList.tsx @@ -1,15 +1,33 @@ -import { useSearchParams } from 'react-router-dom'; +import { useNavigate, useSearchParams } from 'react-router-dom'; import 'react-datepicker/dist/react-datepicker.css'; import 'react-bootstrap-typeahead/css/Typeahead.css'; import 'react-bootstrap-typeahead/css/Typeahead.bs5.css'; +// @ts-ignore +import { Tabs, TabList, Tab } from '@carbon/react'; +import { Can } from '@casl/react'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; import ProcessInstanceListTable from '../components/ProcessInstanceListTable'; import { getProcessModelFullIdentifierFromSearchParams } from '../helpers'; +import { useUriListForPermissions } from '../hooks/UriListForPermissions'; +import { PermissionsToCheck } from '../interfaces'; +import { usePermissionFetcher } from '../hooks/PermissionService'; -export default function ProcessInstanceList() { +type OwnProps = { + variant: string; +}; + +export default function ProcessInstanceList({ variant }: OwnProps) { const [searchParams] = useSearchParams(); + const navigate = useNavigate(); + + const { targetUris } = useUriListForPermissions(); + const permissionRequestData: PermissionsToCheck = { + [targetUris.processInstanceListPath]: ['GET'], + }; + const { ability } = usePermissionFetcher(permissionRequestData); + const processInstanceBreadcrumbElement = () => { const processModelFullIdentifier = getProcessModelFullIdentifierFromSearchParams(searchParams); @@ -21,10 +39,11 @@ export default function ProcessInstanceList() { @@ -32,13 +51,46 @@ export default function ProcessInstanceList() { }; const processInstanceTitleElement = () => { - return

Process Instances

; + if (variant === 'all') { + return

All Process Instances

; + } + return

My Process Instances

; }; + + let selectedTabIndex = 0; + if (variant === 'all') { + selectedTabIndex = 1; + } return ( <> + + + { + navigate('/admin/process-instances/for-me'); + }} + > + For Me + + + { + navigate('/admin/process-instances/all'); + }} + > + All + + + + +
{processInstanceBreadcrumbElement()} {processInstanceTitleElement()} - + ); } diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx index 37ef5519c..b4a4f683a 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceLogList.tsx @@ -45,7 +45,7 @@ export default function ProcessInstanceLogList() { const rowToUse = row as any; return ( - {rowToUse.id} + {rowToUse.id} {rowToUse.message} {rowToUse.bpmn_task_name} {isDetailedView && ( @@ -114,6 +114,7 @@ export default function ProcessInstanceLogList() { { searchParams.set('detailed', 'false'); setSearchParams(searchParams); @@ -123,6 +124,7 @@ export default function ProcessInstanceLogList() { { searchParams.set('detailed', 'true'); setSearchParams(searchParams); diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx index 906fb3142..b753d3074 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceReportList.tsx @@ -31,9 +31,7 @@ export default function ProcessInstanceReportList() { return ( - + {rowToUse.identifier} diff --git a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx index 9a0495d1d..678ebdf2a 100644 --- a/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessInstanceShow.tsx @@ -1,6 +1,11 @@ import { useContext, useEffect, useState } from 'react'; import Editor from '@monaco-editor/react'; -import { useParams, useNavigate, Link } from 'react-router-dom'; +import { + useParams, + useNavigate, + Link, + useSearchParams, +} from 'react-router-dom'; import { TrashCan, StopOutline, @@ -20,6 +25,7 @@ import { ButtonSet, Tag, Modal, + Dropdown, Stack, // @ts-ignore } from '@carbon/react'; @@ -34,21 +40,40 @@ import { import ButtonWithConfirmation from '../components/ButtonWithConfirmation'; import ErrorContext from '../contexts/ErrorContext'; import { useUriListForPermissions } from '../hooks/UriListForPermissions'; -import { PermissionsToCheck } from '../interfaces'; +import { + PermissionsToCheck, + ProcessData, + ProcessInstance, + ProcessInstanceTask, +} from '../interfaces'; import { usePermissionFetcher } from '../hooks/PermissionService'; +import ProcessInstanceClass from '../classes/ProcessInstanceClass'; -export default function ProcessInstanceShow() { +type OwnProps = { + variant: string; +}; + +export default function ProcessInstanceShow({ variant }: OwnProps) { const navigate = useNavigate(); const params = useParams(); + const [searchParams] = useSearchParams(); - const [processInstance, setProcessInstance] = useState(null); - const [tasks, setTasks] = useState | null>(null); + const [processInstance, setProcessInstance] = + useState(null); + const [tasks, setTasks] = useState(null); const [tasksCallHadError, setTasksCallHadError] = useState(false); const [taskToDisplay, setTaskToDisplay] = useState(null); const [taskDataToDisplay, setTaskDataToDisplay] = useState(''); + const [processDataToDisplay, setProcessDataToDisplay] = + useState(null); const [editingTaskData, setEditingTaskData] = useState(false); + const [selectingEvent, setSelectingEvent] = useState(false); + const [eventToSend, setEventToSend] = useState({}); + const [eventPayload, setEventPayload] = useState('{}'); + const [eventTextEditorEnabled, setEventTextEditorEnabled] = + useState(false); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const unModifiedProcessModelId = unModifyProcessIdentifierForPathParam( `${params.process_model_id}` @@ -56,14 +81,24 @@ export default function ProcessInstanceShow() { const modifiedProcessModelId = params.process_model_id; const { targetUris } = useUriListForPermissions(); + const taskListPath = + variant === 'all' + ? targetUris.processInstanceTaskListPath + : targetUris.processInstanceTaskListForMePath; + const permissionRequestData: PermissionsToCheck = { + [`${targetUris.processInstanceResumePath}`]: ['POST'], + [`${targetUris.processInstanceSuspendPath}`]: ['POST'], + [`${targetUris.processInstanceTerminatePath}`]: ['POST'], + [targetUris.processInstanceResetPath]: ['POST'], [targetUris.messageInstanceListPath]: ['GET'], - [targetUris.processInstanceTaskListPath]: ['GET'], [targetUris.processInstanceActionPath]: ['DELETE'], [targetUris.processInstanceLogListPath]: ['GET'], - [`${targetUris.processInstanceActionPath}/suspend`]: ['PUT'], - [`${targetUris.processInstanceActionPath}/terminate`]: ['PUT'], - [`${targetUris.processInstanceActionPath}/resume`]: ['PUT'], + [targetUris.processInstanceTaskListDataPath]: ['GET', 'PUT'], + [targetUris.processInstanceSendEventPath]: ['POST'], + [targetUris.processInstanceCompleteTaskPath]: ['POST'], + [targetUris.processModelShowPath]: ['PUT'], + [taskListPath]: ['GET'], }; const { ability, permissionsLoaded } = usePermissionFetcher( permissionRequestData @@ -80,17 +115,32 @@ export default function ProcessInstanceShow() { const processTaskFailure = () => { setTasksCallHadError(true); }; + let queryParams = ''; + const processIdentifier = searchParams.get('process_identifier'); + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } + let apiPath = '/process-instances/for-me'; + if (variant === 'all') { + apiPath = '/process-instances'; + } HttpService.makeCallToBackend({ - path: `/process-instances/${modifiedProcessModelId}/${params.process_instance_id}`, + path: `${apiPath}/${modifiedProcessModelId}/${params.process_instance_id}${queryParams}`, successCallback: setProcessInstance, }); let taskParams = '?all_tasks=true'; if (typeof params.spiff_step !== 'undefined') { taskParams = `${taskParams}&spiff_step=${params.spiff_step}`; } - if (ability.can('GET', targetUris.processInstanceTaskListPath)) { + let taskPath = ''; + if (ability.can('GET', targetUris.processInstanceTaskListDataPath)) { + taskPath = `${targetUris.processInstanceTaskListDataPath}${taskParams}`; + } else if (ability.can('GET', taskListPath)) { + taskPath = `${taskListPath}${taskParams}`; + } + if (taskPath) { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceTaskListPath}${taskParams}`, + path: taskPath, successCallback: setTasks, failureCallback: processTaskFailure, }); @@ -98,7 +148,16 @@ export default function ProcessInstanceShow() { setTasksCallHadError(true); } } - }, [params, modifiedProcessModelId, permissionsLoaded, ability, targetUris]); + }, [ + params, + modifiedProcessModelId, + permissionsLoaded, + ability, + targetUris, + searchParams, + taskListPath, + variant, + ]); const deleteProcessInstance = () => { HttpService.makeCallToBackend({ @@ -115,7 +174,7 @@ export default function ProcessInstanceShow() { const terminateProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceActionPath}/terminate`, + path: `${targetUris.processInstanceTerminatePath}`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -123,7 +182,7 @@ export default function ProcessInstanceShow() { const suspendProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceActionPath}/suspend`, + path: `${targetUris.processInstanceSuspendPath}`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -131,7 +190,7 @@ export default function ProcessInstanceShow() { const resumeProcessInstance = () => { HttpService.makeCallToBackend({ - path: `${targetUris.processInstanceActionPath}/resume`, + path: `${targetUris.processInstanceResumePath}`, successCallback: refreshPage, httpMethod: 'POST', }); @@ -140,75 +199,83 @@ export default function ProcessInstanceShow() { const getTaskIds = () => { const taskIds = { completed: [], readyOrWaiting: [] }; if (tasks) { - tasks.forEach(function getUserTasksElement(task: any) { + tasks.forEach(function getUserTasksElement(task: ProcessInstanceTask) { if (task.state === 'COMPLETED') { - (taskIds.completed as any).push(task.name); + (taskIds.completed as any).push(task); } if (task.state === 'READY' || task.state === 'WAITING') { - (taskIds.readyOrWaiting as any).push(task.name); + (taskIds.readyOrWaiting as any).push(task); } }); } return taskIds; }; - const currentSpiffStep = (processInstanceToUse: any) => { - if (typeof params.spiff_step === 'undefined') { - return processInstanceToUse.spiff_step; + const currentSpiffStep = () => { + if (processInstance && typeof params.spiff_step === 'undefined') { + return processInstance.spiff_step || 0; } return Number(params.spiff_step); }; - const showingFirstSpiffStep = (processInstanceToUse: any) => { - return currentSpiffStep(processInstanceToUse) === 1; + const showingFirstSpiffStep = () => { + return currentSpiffStep() === 1; }; - const showingLastSpiffStep = (processInstanceToUse: any) => { - return ( - currentSpiffStep(processInstanceToUse) === processInstanceToUse.spiff_step - ); + const showingLastSpiffStep = () => { + return processInstance && currentSpiffStep() === processInstance.spiff_step; }; - const spiffStepLink = ( - processInstanceToUse: any, - label: any, - distance: number - ) => { + const spiffStepLink = (label: any, distance: number) => { + const processIdentifier = searchParams.get('process_identifier'); + let queryParams = ''; + if (processIdentifier) { + queryParams = `?process_identifier=${processIdentifier}`; + } return ( {label} ); }; - const previousStepLink = (processInstanceToUse: any) => { - if (showingFirstSpiffStep(processInstanceToUse)) { + const previousStepLink = () => { + if (showingFirstSpiffStep()) { return null; } - return spiffStepLink(processInstanceToUse, , -1); + return spiffStepLink(, -1); }; - const nextStepLink = (processInstanceToUse: any) => { - if (showingLastSpiffStep(processInstanceToUse)) { + const nextStepLink = () => { + if (showingLastSpiffStep()) { return null; } - return spiffStepLink(processInstanceToUse, , 1); + return spiffStepLink(, 1); }; - const getInfoTag = (processInstanceToUse: any) => { + const resetProcessInstance = () => { + HttpService.makeCallToBackend({ + path: `${targetUris.processInstanceResetPath}/${currentSpiffStep()}`, + successCallback: refreshPage, + httpMethod: 'POST', + }); + }; + + const getInfoTag = () => { + if (!processInstance) { + return null; + } const currentEndDate = convertSecondsToFormattedDateTime( - processInstanceToUse.end_in_seconds + processInstance.end_in_seconds || 0 ); let currentEndDateTag; if (currentEndDate) { @@ -219,7 +286,7 @@ export default function ProcessInstanceShow() { {convertSecondsToFormattedDateTime( - processInstanceToUse.end_in_seconds + processInstance.end_in_seconds || 0 ) || 'N/A'} @@ -227,13 +294,13 @@ export default function ProcessInstanceShow() { } let statusIcon = ; - if (processInstanceToUse.status === 'suspended') { + if (processInstance.status === 'suspended') { statusIcon = ; - } else if (processInstanceToUse.status === 'complete') { + } else if (processInstance.status === 'complete') { statusIcon = ; - } else if (processInstanceToUse.status === 'terminated') { + } else if (processInstance.status === 'terminated') { statusIcon = ; - } else if (processInstanceToUse.status === 'error') { + } else if (processInstance.status === 'error') { statusIcon = ; } @@ -245,7 +312,7 @@ export default function ProcessInstanceShow() { {convertSecondsToFormattedDateTime( - processInstanceToUse.start_in_seconds + processInstance.start_in_seconds || 0 )} @@ -256,7 +323,7 @@ export default function ProcessInstanceShow() { - {processInstanceToUse.status} {statusIcon} + {processInstance.status} {statusIcon} @@ -299,11 +366,10 @@ export default function ProcessInstanceShow() { ); }; - const terminateButton = (processInstanceToUse: any) => { + const terminateButton = () => { if ( - ['complete', 'terminated', 'error'].indexOf( - processInstanceToUse.status - ) === -1 + processInstance && + !ProcessInstanceClass.terminalStatuses().includes(processInstance.status) ) { return ( @@ -320,11 +386,12 @@ export default function ProcessInstanceShow() { return
; }; - const suspendButton = (processInstanceToUse: any) => { + const suspendButton = () => { if ( - ['complete', 'terminated', 'error', 'suspended'].indexOf( - processInstanceToUse.status - ) === -1 + processInstance && + !ProcessInstanceClass.terminalStatuses() + .concat(['suspended']) + .includes(processInstance.status) ) { return ( + ); + buttons.push( + + ); + } else if (selectingEvent) { + buttons.push( + + ); + buttons.push( + + ); + } else { + if (canEditTaskData(task)) { buttons.push( - ); - buttons.push( - - ); - } else { - buttons.push( - ); } + if (canCompleteTask(task)) { + buttons.push( + + ); + buttons.push( + + ); + } + if (canSendEvent(task)) { + buttons.push( + + ); + } + if (canResetProcess(task)) { + buttons.push( + + ); + } } return buttons; @@ -518,8 +768,42 @@ export default function ProcessInstanceShow() { ); }; - const taskDataDisplayArea = () => { + const eventSelector = (candidateEvents: any) => { + const editor = ( + setEventPayload(value || '{}')} + options={{ readOnly: !eventTextEditorEnabled }} + /> + ); + return selectingEvent ? ( + + item.name || item.label || item.typename} + onChange={(value: any) => { + setEventToSend(value.selectedItem); + setEventTextEditorEnabled( + value.selectedItem.typename === 'MessageEventDefinition' + ); + }} + /> + {editor} + + ) : ( + taskDataContainer() + ); + }; + + const taskUpdateDisplayArea = () => { const taskToUse: any = { ...taskToDisplay, data: taskDataToDisplay }; + const candidateEvents: any = getEvents(taskToUse); if (taskToDisplay) { return ( {taskToUse.name} ({taskToUse.type}): {taskToUse.state} - {taskDataButtons(taskToUse)} + {taskDisplayButtons(taskToUse)} - {taskDataContainer()} + {selectingEvent + ? eventSelector(candidateEvents) + : taskDataContainer()} ); } return null; }; - const stepsElement = (processInstanceToUse: any) => { + const stepsElement = () => { + if (!processInstance) { + return null; + } return ( - {previousStepLink(processInstanceToUse)} - Step {currentSpiffStep(processInstanceToUse)} of{' '} - {processInstanceToUse.spiff_step} - {nextStepLink(processInstanceToUse)} + {previousStepLink()} + Step {currentSpiffStep()} of {processInstance.spiff_step} + {nextStepLink()} ); }; - const buttonIcons = (processInstanceToUse: any) => { + const buttonIcons = () => { + if (!processInstance) { + return null; + } const elements = []; - if ( - ability.can('POST', `${targetUris.processInstanceActionPath}/terminate`) - ) { - elements.push(terminateButton(processInstanceToUse)); + if (ability.can('POST', `${targetUris.processInstanceTerminatePath}`)) { + elements.push(terminateButton()); + } + if (ability.can('POST', `${targetUris.processInstanceSuspendPath}`)) { + elements.push(suspendButton()); + } + if (ability.can('POST', `${targetUris.processInstanceResumePath}`)) { + elements.push(resumeButton()); } if ( - ability.can('POST', `${targetUris.processInstanceActionPath}/suspend`) + ability.can('DELETE', targetUris.processInstanceActionPath) && + ProcessInstanceClass.terminalStatuses().includes(processInstance.status) ) { - elements.push(suspendButton(processInstanceToUse)); - } - if (ability.can('POST', `${targetUris.processInstanceActionPath}/resume`)) { - elements.push(resumeButton(processInstanceToUse)); - } - if (ability.can('DELETE', targetUris.processInstanceActionPath)) { elements.push( @@ -586,7 +876,6 @@ export default function ProcessInstanceShow() { }; if (processInstance && (tasks || tasksCallHadError)) { - const processInstanceToUse = processInstance as any; const taskIds = getTaskIds(); const processModelId = unModifyProcessIdentifierForPathParam( params.process_model_id ? params.process_model_id : '' @@ -602,28 +891,29 @@ export default function ProcessInstanceShow() { entityType: 'process-model-id', linkLastItem: true, }, - [`Process Instance Id: ${processInstanceToUse.id}`], + [`Process Instance Id: ${processInstance.id}`], ]} />

- Process Instance Id: {processInstanceToUse.id} + Process Instance Id: {processInstance.id}

- {buttonIcons(processInstanceToUse)} + {buttonIcons()}


- {getInfoTag(processInstanceToUse)} + {getInfoTag()}
- {taskDataDisplayArea()} - {stepsElement(processInstanceToUse)} + {taskUpdateDisplayArea()} + {processDataDisplayArea()} + {stepsElement()}
diff --git a/spiffworkflow-frontend/src/routes/ProcessModelEditDiagram.tsx b/spiffworkflow-frontend/src/routes/ProcessModelEditDiagram.tsx index 1a5c751f7..cc3ac8789 100644 --- a/spiffworkflow-frontend/src/routes/ProcessModelEditDiagram.tsx +++ b/spiffworkflow-frontend/src/routes/ProcessModelEditDiagram.tsx @@ -5,12 +5,21 @@ import { useParams, useSearchParams, } from 'react-router-dom'; -// @ts-ignore -import { Button, Modal, Stack, Content } from '@carbon/react'; +import { + Button, + Modal, + Content, + Tabs, + TabList, + Tab, + TabPanels, + TabPanel, + // @ts-ignore +} from '@carbon/react'; import Row from 'react-bootstrap/Row'; import Col from 'react-bootstrap/Col'; -import Editor from '@monaco-editor/react'; +import Editor, { DiffEditor } from '@monaco-editor/react'; import MDEditor from '@uiw/react-md-editor'; import ReactDiagramEditor from '../components/ReactDiagramEditor'; @@ -25,6 +34,7 @@ import { ProcessReference, } from '../interfaces'; import ProcessSearch from '../components/ProcessSearch'; +import { Notification } from '../components/Notification'; export default function ProcessModelEditDiagram() { const [showFileNameEditor, setShowFileNameEditor] = useState(false); @@ -46,6 +56,8 @@ export default function ProcessModelEditDiagram() { const [processSearchEventBus, setProcessSearchEventBus] = useState(null); const [processSearchElement, setProcessSearchElement] = useState(null); const [processes, setProcesses] = useState([]); + const [displaySaveFileMessage, setDisplaySaveFileMessage] = + useState(false); const handleShowMarkdownEditor = () => setShowMarkdownEditor(true); @@ -69,10 +81,10 @@ export default function ProcessModelEditDiagram() { interface ScriptUnitTestResult { result: boolean; - context: object; - error: string; - line_number: number; - offset: number; + context?: object; + error?: string; + line_number?: number; + offset?: number; } const [currentScriptUnitTest, setCurrentScriptUnitTest] = @@ -86,7 +98,7 @@ export default function ProcessModelEditDiagram() { const navigate = useNavigate(); const [searchParams] = useSearchParams(); - const setErrorMessage = (useContext as any)(ErrorContext)[1]; + const setErrorObject = (useContext as any)(ErrorContext)[1]; const [processModelFile, setProcessModelFile] = useState( null ); @@ -147,6 +159,7 @@ export default function ProcessModelEditDiagram() { }; const navigateToProcessModelFile = (_result: any) => { + setDisplaySaveFileMessage(true); if (!params.file_name) { const fileNameWithExtension = `${newFileName}.${searchParams.get( 'file_type' @@ -158,7 +171,8 @@ export default function ProcessModelEditDiagram() { }; const saveDiagram = (bpmnXML: any, fileName = params.file_name) => { - setErrorMessage(null); + setDisplaySaveFileMessage(false); + setErrorObject(null); setBpmnXmlForDiagramRendering(bpmnXML); let url = `/process-models/${modifiedProcessModelId}/files`; @@ -184,7 +198,7 @@ export default function ProcessModelEditDiagram() { HttpService.makeCallToBackend({ path: url, successCallback: navigateToProcessModelFile, - failureCallback: setErrorMessage, + failureCallback: setErrorObject, httpMethod, postBody: formData, }); @@ -397,6 +411,13 @@ export default function ProcessModelEditDiagram() { }; }; + const jsonEditorOptions = () => { + return Object.assign(generalEditorOptions(), { + minimap: { enabled: false }, + folding: true, + }); + }; + const setPreviousScriptUnitTest = () => { resetUnitTextResult(); const newScriptIndex = currentScriptUnitTestIndex - 1; @@ -457,6 +478,21 @@ export default function ProcessModelEditDiagram() { const runCurrentUnitTest = () => { if (currentScriptUnitTest && scriptElement) { + let inputJson = ''; + let expectedJson = ''; + try { + inputJson = JSON.parse(currentScriptUnitTest.inputJson.value); + expectedJson = JSON.parse( + currentScriptUnitTest.expectedOutputJson.value + ); + } catch (e) { + setScriptUnitTestResult({ + result: false, + error: 'The JSON provided contains a formatting error.', + }); + return; + } + resetUnitTextResult(); HttpService.makeCallToBackend({ path: `/process-models/${modifiedProcessModelId}/script-unit-tests/run`, @@ -465,37 +501,56 @@ export default function ProcessModelEditDiagram() { postBody: { bpmn_task_identifier: (scriptElement as any).id, python_script: scriptText, - input_json: JSON.parse(currentScriptUnitTest.inputJson.value), - expected_output_json: JSON.parse( - currentScriptUnitTest.expectedOutputJson.value - ), + input_json: inputJson, + expected_output_json: expectedJson, }, }); } }; const unitTestFailureElement = () => { - if ( - scriptUnitTestResult && - scriptUnitTestResult.result === false && - !scriptUnitTestResult.line_number - ) { - let errorStringElement = null; - if (scriptUnitTestResult.error) { - errorStringElement = ( - - Received error when running script:{' '} - {JSON.stringify(scriptUnitTestResult.error)} - - ); - } - let errorContextElement = null; + if (scriptUnitTestResult && scriptUnitTestResult.result === false) { + let errorObject = ''; if (scriptUnitTestResult.context) { + errorObject = 'Unexpected result. Please see the comparison below.'; + } else if (scriptUnitTestResult.line_number) { + errorObject = `Error encountered running the script. Please check the code around line ${scriptUnitTestResult.line_number}`; + } else { + errorObject = `Error encountered running the script. ${JSON.stringify( + scriptUnitTestResult.error + )}`; + } + let errorStringElement = {errorObject}; + + let errorContextElement = null; + + if (scriptUnitTestResult.context) { + errorStringElement = ( + Unexpected result. Please see the comparison below. + ); + let outputJson = '{}'; + if (currentScriptUnitTest) { + outputJson = JSON.stringify( + JSON.parse(currentScriptUnitTest.expectedOutputJson.value), + null, + ' ' + ); + } + const contextJson = JSON.stringify( + scriptUnitTestResult.context, + null, + ' ' + ); errorContextElement = ( - - Received unexpected output:{' '} - {JSON.stringify(scriptUnitTestResult.context)} - + ); } return ( @@ -539,19 +594,35 @@ export default function ProcessModelEditDiagram() { ); } + let inputJson = currentScriptUnitTest.inputJson.value; + let outputJson = currentScriptUnitTest.expectedOutputJson.value; + try { + inputJson = JSON.stringify( + JSON.parse(currentScriptUnitTest.inputJson.value), + null, + ' ' + ); + outputJson = JSON.stringify( + JSON.parse(currentScriptUnitTest.expectedOutputJson.value), + null, + ' ' + ); + } catch (e) { + // Attemping to format the json failed -- it's invalid. + } + return (
-
); } return null; }; - const scriptEditor = () => { + return ( + + ); + }; + const scriptEditorAndTests = () => { let scriptName = ''; if (scriptElement) { scriptName = (scriptElement as any).di.bpmnElement.name; } - return ( - - {scriptUnitTestEditorElement()} + + + Script Editor + Unit Tests + + + {scriptEditor()} + {scriptUnitTestEditorElement()} + + ); }; @@ -819,6 +899,7 @@ export default function ProcessModelEditDiagram() { processModelId={params.process_model_id || ''} saveDiagram={saveDiagram} onDeleteFile={onDeleteFile} + isPrimaryFile={params.file_name === processModel?.primary_file_name} onSetPrimaryFile={onSetPrimaryFileCallback} diagramXML={bpmnXmlForDiagramRendering} fileName={params.file_name} @@ -836,6 +917,20 @@ export default function ProcessModelEditDiagram() { ); }; + const saveFileMessage = () => { + if (displaySaveFileMessage) { + return ( + setDisplaySaveFileMessage(false)} + > + Changes to the file were saved. + + ); + } + return null; + }; + // if a file name is not given then this is a new model and the ReactDiagramEditor component will handle it if ((bpmnXmlForDiagramRendering || !params.file_name) && processModel) { const processModelFileName = processModelFile ? processModelFile.name : ''; @@ -856,9 +951,10 @@ export default function ProcessModelEditDiagram() { Process Model File{processModelFile ? ': ' : ''} {processModelFileName} + {saveFileMessage()} {appropriateEditor()} {newFileNameBox()} - {scriptEditor()} + {scriptEditorAndTests()} {markdownEditor()} {processModelSelector()}
diff --git a/spiffworkflow-frontend/src/routes/ProcessModelNewExperimental.tsx b/spiffworkflow-frontend/src/routes/ProcessModelNewExperimental.tsx new file mode 100644 index 000000000..af8be8226 --- /dev/null +++ b/spiffworkflow-frontend/src/routes/ProcessModelNewExperimental.tsx @@ -0,0 +1,73 @@ +import { useState } from 'react'; +import { useNavigate, useParams } from 'react-router-dom'; +// @ts-ignore +import { TextArea, Button, Form } from '@carbon/react'; +import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; +import { ProcessModel } from '../interfaces'; +import { modifyProcessIdentifierForPathParam } from '../helpers'; +import HttpService from '../services/HttpService'; + +export default function ProcessModelNewExperimental() { + const params = useParams(); + const navigate = useNavigate(); + const [processModelDescriptiveText, setProcessModelDescriptiveText] = + useState(''); + + const helperText = + 'Create a bug tracker process model with a bug-details form that collects summary, description, and priority'; + + const navigateToProcessModel = (result: ProcessModel) => { + if ('id' in result) { + const modifiedProcessModelPathFromResult = + modifyProcessIdentifierForPathParam(result.id); + navigate(`/admin/process-models/${modifiedProcessModelPathFromResult}`); + } + }; + + const handleFormSubmission = (event: any) => { + event.preventDefault(); + HttpService.makeCallToBackend({ + path: `/process-models-natural-language/${params.process_group_id}`, + successCallback: navigateToProcessModel, + httpMethod: 'POST', + postBody: { natural_language_text: processModelDescriptiveText }, + }); + }; + + const ohYeeeeaah = () => { + setProcessModelDescriptiveText(helperText); + }; + + return ( + <> + + {/* eslint-disable-next-line jsx-a11y/no-noninteractive-element-interactions */} +

+ Add Process Model +

+
+