using the new spiff branch and tests are now passing w/ burnettk

This commit is contained in:
jasquat 2023-05-11 12:41:52 -04:00
parent 4bff2bca95
commit 233d8a9832
14 changed files with 50 additions and 120 deletions

View File

@ -3088,8 +3088,8 @@ lxml = "*"
[package.source]
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "a844b34f9327a572d8f26110a01af21e22c548d1"
reference = "improvement/task-spec-attributes"
resolved_reference = "544614aa91201571d4c4132988aeca03a9cfa01e"
[[package]]
name = "sqlalchemy"
@ -3702,4 +3702,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
content-hash = "71d35a0ac0d7c680abd5cdcd8d28d08e8f582bc7ae9fc0fce2e0001bd3730930"
content-hash = "01580056dff4ab91b362301e85c59b0ecc2e0a007d7adbf6a74d6fc84c78c3f6"

View File

@ -27,7 +27,7 @@ flask-marshmallow = "*"
flask-migrate = "*"
flask-restful = "*"
werkzeug = "*"
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"}
SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "improvement/task-spec-attributes"}
# SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "6cad2981712bb61eca23af1adfafce02d3277cb9"}
# SpiffWorkflow = {develop = true, path = "../../spiffworkflow/" }
sentry-sdk = "^1.10"

View File

@ -15,9 +15,9 @@ from flask import jsonify
from flask import make_response
from sentry_sdk import capture_exception
from sentry_sdk import set_tag
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
from SpiffWorkflow.task import Task # type: ignore

View File

@ -20,8 +20,8 @@ from flask import make_response
from flask import stream_with_context
from flask.wrappers import Response
from jinja2 import TemplateSyntaxError
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
from sqlalchemy import and_
@ -471,15 +471,7 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
return len(
list(
[
t
for t in bpmn_process_instance.get_tasks(TaskState.READY)
if bpmn_process_instance._is_engine_task(t.task_spec)
]
)
)
return len(list([t for t in bpmn_process_instance.get_tasks(TaskState.READY) if not t.task_spec.manual]))
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[Optional[str], Optional[str], None]:

View File

@ -28,6 +28,7 @@ from flask import current_app
from lxml import etree # type: ignore
from lxml.etree import XMLSyntaxError # type: ignore
from RestrictedPython import safe_globals # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine # type: ignore
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment # type: ignore
@ -39,12 +40,9 @@ from SpiffWorkflow.bpmn.serializer.task_spec import ( # type: ignore
)
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent # type: ignore
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent # type: ignore
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG # type: ignore
@ -68,10 +66,6 @@ from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.json_data import JsonDataModel
from spiffworkflow_backend.models.message_instance import MessageInstanceModel
from spiffworkflow_backend.models.message_instance_correlation import (
MessageInstanceCorrelationRuleModel,
)
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
@ -901,9 +895,8 @@ class ProcessInstanceProcessor:
bpmn_process_definition.bpmn_identifier,
bpmn_process_definition=bpmn_process_definition,
)
for task_bpmn_identifier, task_bpmn_properties in task_specs.items():
task_bpmn_name = task_bpmn_properties["description"]
task_bpmn_name = task_bpmn_properties["bpmn_name"]
task_definition = TaskDefinitionModel(
bpmn_process_definition=bpmn_process_definition,
bpmn_identifier=task_bpmn_identifier,
@ -1036,7 +1029,7 @@ class ProcessInstanceProcessor:
for ready_or_waiting_task in ready_or_waiting_tasks:
# filter out non-usertasks
task_spec = ready_or_waiting_task.task_spec
if not self.bpmn_process_instance._is_engine_task(task_spec):
if task_spec.manual:
potential_owner_hash = self.get_potential_owner_ids_from_task(ready_or_waiting_task)
extensions = task_spec.extensions
@ -1073,8 +1066,8 @@ class ProcessInstanceProcessor:
ui_form_file_name=ui_form_file_name,
task_model_id=task_model.id,
task_id=task_guid,
task_name=ready_or_waiting_task.task_spec.name,
task_title=ready_or_waiting_task.task_spec.description,
task_name=ready_or_waiting_task.task_spec.bpmn_id,
task_title=ready_or_waiting_task.task_spec.bpmn_name,
task_type=ready_or_waiting_task.task_spec.__class__.__name__,
task_status=ready_or_waiting_task.get_state_name(),
lane_assignment_id=potential_owner_hash["lane_assignment_id"],
@ -1134,7 +1127,7 @@ class ProcessInstanceProcessor:
# We have to get to the actual start event
for spiff_task in self.bpmn_process_instance.get_tasks(workflow=subprocess):
spiff_task.run()
if isinstance(spiff_task.task_spec, StartEvent):
if spiff_task.task_spec.__class__.__name__ == "StartEvent":
break
else:
spiff_task.run()
@ -1146,7 +1139,7 @@ class ProcessInstanceProcessor:
spiff_task.workflow.last_task = spiff_task
end_in_seconds = time.time()
if isinstance(spiff_task.task_spec, EndEvent):
if spiff_task.task_spec.__class__.__name__ == "EndEvent":
for task in self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK, workflow=spiff_task.workflow):
task.complete()
@ -1387,56 +1380,6 @@ class ProcessInstanceProcessor:
# current_app.logger.debug(f"the_status: {the_status} for instance {self.process_instance_model.id}")
return the_status
def process_bpmn_messages(self) -> None:
"""Process_bpmn_messages."""
bpmn_messages = self.bpmn_process_instance.get_bpmn_messages()
for bpmn_message in bpmn_messages:
message_instance = MessageInstanceModel(
process_instance_id=self.process_instance_model.id,
user_id=self.process_instance_model.process_initiator_id, # TODO: use the correct swimlane user when that is set up
message_type="send",
name=bpmn_message.name,
payload=bpmn_message.payload,
correlation_keys=self.bpmn_process_instance.correlations,
)
db.session.add(message_instance)
db.session.commit()
def queue_waiting_receive_messages(self) -> None:
"""Queue_waiting_receive_messages."""
waiting_events = self.bpmn_process_instance.waiting_events()
waiting_message_events = filter(lambda e: e["event_type"] == "Message", waiting_events)
for event in waiting_message_events:
# Ensure we are only creating one message instance for each waiting message
if (
MessageInstanceModel.query.filter_by(
process_instance_id=self.process_instance_model.id,
message_type="receive",
name=event["name"],
).count()
> 0
):
continue
# Create a new Message Instance
message_instance = MessageInstanceModel(
process_instance_id=self.process_instance_model.id,
user_id=self.process_instance_model.process_initiator_id,
message_type="receive",
name=event["name"],
correlation_keys=self.bpmn_process_instance.correlations,
)
for correlation_property in event["value"]:
message_correlation = MessageInstanceCorrelationRuleModel(
message_instance_id=message_instance.id,
name=correlation_property.name,
retrieval_expression=correlation_property.retrieval_expression,
)
message_instance.correlation_rules.append(message_correlation)
db.session.add(message_instance)
db.session.commit()
def element_unit_specs_loader(self, process_id: str, element_id: str) -> Optional[Dict[str, Any]]:
full_process_model_hash = self.process_instance_model.bpmn_process_definition.full_process_model_hash
if full_process_model_hash is None:
@ -1463,7 +1406,7 @@ class ProcessInstanceProcessor:
tasks = self.bpmn_process_instance.get_tasks(TaskState.DEFINITE_MASK)
loaded_specs = set(self.bpmn_process_instance.subprocess_specs.keys())
for task in tasks:
if task.task_spec.spec_type != "Call Activity":
if task.task_spec.description != "Call Activity":
continue
spec_to_check = task.task_spec.spec
@ -1587,10 +1530,13 @@ class ProcessInstanceProcessor:
endtasks = []
if self.bpmn_process_instance.is_completed():
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.ANY_MASK):
for spiff_task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.ANY_MASK):
# Assure that we find the end event for this process_instance, and not for any sub-process_instances.
if isinstance(task.task_spec, EndEvent) and task.workflow == self.bpmn_process_instance:
endtasks.append(task)
if (
spiff_task.task_spec.__class__.__name__ == "EndEvent"
and spiff_task.workflow == self.bpmn_process_instance
):
endtasks.append(spiff_task)
if len(endtasks) > 0:
return endtasks[-1]
@ -1645,7 +1591,7 @@ class ProcessInstanceProcessor:
user_tasks.reverse()
user_tasks = list(
filter(
lambda task: not self.bpmn_process_instance._is_engine_task(task.task_spec),
lambda task: task.task_spec.manual,
user_tasks,
)
)
@ -1741,16 +1687,11 @@ class ProcessInstanceProcessor:
def get_all_user_tasks(self) -> List[SpiffTask]:
"""Get_all_user_tasks."""
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
return [t for t in all_tasks if not self.bpmn_process_instance._is_engine_task(t.task_spec)]
return [t for t in all_tasks if t.task_spec.manual]
def get_all_completed_tasks(self) -> list[SpiffTask]:
all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
return [
t
for t in all_tasks
if not self.bpmn_process_instance._is_engine_task(t.task_spec)
and t.state in [TaskState.COMPLETED, TaskState.CANCELLED]
]
return [t for t in all_tasks if t.task_spec.manual and t.state in [TaskState.COMPLETED, TaskState.CANCELLED]]
def get_all_waiting_tasks(self) -> list[SpiffTask]:
"""Get_all_ready_or_waiting_tasks."""

View File

@ -98,8 +98,8 @@ class ProcessInstanceService:
#
# example: {'event_type': 'Duration Timer', 'name': None, 'value': '2023-04-27T20:15:10.626656+00:00'}
#
event_type = waiting_event.get("event_type")
if event_type == "Duration Timer":
spiff_event_type = waiting_event.get("event_type")
if spiff_event_type == "DurationTimerEventDefinition":
event_value = waiting_event.get("value")
if event_value is not None:
event_datetime = TimerEventDefinition.get_datetime(event_value)
@ -456,7 +456,7 @@ class ProcessInstanceService:
calling_subprocess_task_id: Optional[str] = None,
) -> Task:
"""Spiff_task_to_api_task."""
task_type = spiff_task.task_spec.spec_type
task_type = spiff_task.task_spec.description
props = {}
if hasattr(spiff_task.task_spec, "extensions"):
@ -500,8 +500,8 @@ class ProcessInstanceService:
task = Task(
spiff_task.id,
spiff_task.task_spec.name,
spiff_task.task_spec.description,
spiff_task.task_spec.bpmn_id,
spiff_task.task_spec.bpmn_name,
task_type,
spiff_task.get_state_name(),
can_complete=can_complete,

View File

@ -4,7 +4,7 @@ from typing import Optional
from typing import Tuple
from flask import g
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel

View File

@ -6,7 +6,7 @@ from dataclasses import dataclass
from typing import Any
from typing import Optional
from SpiffWorkflow.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from spiffworkflow_backend.services.process_instance_processor import (

View File

@ -131,12 +131,12 @@ class SpecFileService(FileSystemService):
has_lanes = sub_parser.has_lanes()
is_executable = sub_parser.process_executable
start_messages = sub_parser.start_messages()
is_primary = sub_parser.get_id() == process_model_info.primary_process_id
is_primary = sub_parser.bpmn_id == process_model_info.primary_process_id
called_element_ids = sub_parser.called_element_ids()
references.append(
SpecReference(
identifier=sub_parser.get_id(),
identifier=sub_parser.bpmn_id,
display_name=sub_parser.get_name(),
process_model_id=process_model_info.id,
type=parser_type,

View File

@ -9,10 +9,10 @@ from typing import Dict
from typing import Optional
from uuid import UUID
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.exceptions import SpiffWorkflowException # type: ignore
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
@ -99,13 +99,7 @@ class ExecutionStrategy:
self.delegate.save(bpmn_process_instance)
def get_ready_engine_steps(self, bpmn_process_instance: BpmnWorkflow) -> list[SpiffTask]:
tasks = list(
[
t
for t in bpmn_process_instance.get_tasks(TaskState.READY)
if bpmn_process_instance._is_engine_task(t.task_spec)
]
)
tasks = list([t for t in bpmn_process_instance.get_tasks(TaskState.READY) if not t.task_spec.manual])
if len(tasks) > 0:
self.subprocess_spec_loader()
@ -297,7 +291,7 @@ class RunUntilServiceTaskExecutionStrategy(ExecutionStrategy):
engine_steps = self.get_ready_engine_steps(bpmn_process_instance)
while engine_steps:
for spiff_task in engine_steps:
if spiff_task.task_spec.spec_type == "Service Task":
if spiff_task.task_spec.description == "Service Task":
return
self.delegate.will_complete_task(spiff_task)
spiff_task.run()
@ -427,7 +421,6 @@ class WorkflowExecutionService:
self.process_instance_saver()
def process_bpmn_messages(self) -> None:
"""Process_bpmn_messages."""
bpmn_messages = self.bpmn_process_instance.get_bpmn_messages()
for bpmn_message in bpmn_messages:
message_instance = MessageInstanceModel(
@ -452,9 +445,8 @@ class WorkflowExecutionService:
db.session.commit()
def queue_waiting_receive_messages(self) -> None:
"""Queue_waiting_receive_messages."""
waiting_events = self.bpmn_process_instance.waiting_events()
waiting_message_events = filter(lambda e: e["event_type"] == "Message", waiting_events)
waiting_message_events = filter(lambda e: e["event_type"] == "MessageEventDefinition", waiting_events)
for event in waiting_message_events:
# Ensure we are only creating one message instance for each waiting message

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:spiffworkflow="http://spiffworkflow.org/bpmn/schema/1.0/core" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="top_level_process" name="Manual Task" isExecutable="true">
<bpmn:process id="top_level_process" name="Top Level Process" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0stlaxe</bpmn:outgoing>
</bpmn:startEvent>

View File

@ -1778,7 +1778,6 @@ class TestProcessApi(BaseTest):
# The second script task should produce rendered jinja text
# The Manual Task should then return a message as well.
assert len(results) == 2
# import pdb; pdb.set_trace()
assert json_results[0]["task"]["state"] == "READY"
assert json_results[0]["task"]["title"] == "Script Task #2"
assert json_results[0]["task"]["properties"]["instructionsForEndUser"] == "I am Script Task 2"
@ -2740,7 +2739,7 @@ class TestProcessApi(BaseTest):
)
assert response.status_code == 200
assert response.json is not None
assert response.json["type"] == "End Event"
assert response.json["type"] == "Default End Event"
assert response.json["state"] == "COMPLETED"
response = client.get(

View File

@ -226,14 +226,18 @@ class TestProcessInstanceService(BaseTest):
def test_does_skip_duration_timer_events_for_the_future(self) -> None:
assert ProcessInstanceService.waiting_event_can_be_skipped(
{"event_type": "Duration Timer", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
{"event_type": "DurationTimerEventDefinition", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
datetime.fromisoformat("2023-04-26T20:15:10.626656+00:00"),
)
def test_does_not_skip_duration_timer_events_for_the_past(self) -> None:
assert not (
ProcessInstanceService.waiting_event_can_be_skipped(
{"event_type": "Duration Timer", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
{
"event_type": "DurationTimerEventDefinition",
"name": None,
"value": "2023-04-27T20:15:10.626656+00:00",
},
datetime.fromisoformat("2023-04-28T20:15:10.626656+00:00"),
)
)
@ -241,7 +245,11 @@ class TestProcessInstanceService(BaseTest):
def test_does_not_skip_duration_timer_events_for_now(self) -> None:
assert not (
ProcessInstanceService.waiting_event_can_be_skipped(
{"event_type": "Duration Timer", "name": None, "value": "2023-04-27T20:15:10.626656+00:00"},
{
"event_type": "DurationTimerEventDefinition",
"name": None,
"value": "2023-04-27T20:15:10.626656+00:00",
},
datetime.fromisoformat("2023-04-27T20:15:10.626656+00:00"),
)
)

View File

@ -181,5 +181,3 @@ class TestTaskService(BaseTest):
assert signal_event["event"]["name"] == "eat_spam"
assert signal_event["event"]["typename"] == "SignalEventDefinition"
assert signal_event["label"] == "Eat Spam"
print(events)