diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py
index dec4c444..a111a08e 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/config/default.py
@@ -140,7 +140,7 @@ SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_BACKGROUND = environ.get(
)
SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB = environ.get(
- "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="greedy"
+ "SPIFFWORKFLOW_BACKEND_ENGINE_STEP_DEFAULT_STRATEGY_WEB", default="run_until_user_message"
)
# this is only used in CI. use SPIFFWORKFLOW_BACKEND_DATABASE_URI instead for real configuration
diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
index 2a2953a5..70c24d14 100644
--- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
+++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py
@@ -85,7 +85,7 @@ class ReactJsonSchemaSelectOption(TypedDict):
def task_list_my_tasks(
- process_instance_id: Optional[int] = None, page: int = 1, per_page: int = 100
+ process_instance_id: Optional[int] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_my_tasks."""
principal = _find_principal_or_raise()
@@ -166,7 +166,7 @@ def task_list_for_me(page: int = 1, per_page: int = 100) -> flask.wrappers.Respo
def task_list_for_my_groups(
- user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
+ user_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100
) -> flask.wrappers.Response:
"""Task_list_for_my_groups."""
return _get_tasks(
@@ -178,9 +178,9 @@ def task_list_for_my_groups(
def task_data_show(
- modified_process_model_identifier: str,
- process_instance_id: int,
- task_guid: str,
+ modified_process_model_identifier: str,
+ process_instance_id: int,
+ task_guid: str,
) -> flask.wrappers.Response:
task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
task_model.data = task_model.json_data()
@@ -188,10 +188,10 @@ def task_data_show(
def task_data_update(
- process_instance_id: str,
- modified_process_model_identifier: str,
- task_guid: str,
- body: Dict,
+ process_instance_id: str,
+ modified_process_model_identifier: str,
+ task_guid: str,
+ body: Dict,
) -> Response:
"""Update task data."""
process_instance = ProcessInstanceModel.query.filter(ProcessInstanceModel.id == int(process_instance_id)).first()
@@ -241,10 +241,10 @@ def task_data_update(
def manual_complete_task(
- modified_process_model_identifier: str,
- process_instance_id: str,
- task_guid: str,
- body: Dict,
+ modified_process_model_identifier: str,
+ process_instance_id: str,
+ task_guid: str,
+ body: Dict,
) -> Response:
"""Mark a task complete without executing it."""
execute = body.get("execute", True)
@@ -353,12 +353,13 @@ def task_show(process_instance_id: int, task_guid: str = "next") -> flask.wrappe
_render_instructions_for_end_user(spiff_task, task)
return make_response(jsonify(task), 200)
+
def _render_instructions_for_end_user(spiff_task: SpiffTask, task: Task):
"""Assure any instructions for end user are processed for jinja syntax."""
if task.properties and "instructionsForEndUser" in task.properties:
if task.properties["instructionsForEndUser"]:
try:
- instructions = _render_jinja_template(
+ instructions = _render_jinja_template(
task.properties["instructionsForEndUser"], spiff_task
)
task.properties["instructionsForEndUser"] = instructions
@@ -370,9 +371,9 @@ def _render_instructions_for_end_user(spiff_task: SpiffTask, task: Task):
def process_data_show(
- process_instance_id: int,
- process_data_identifier: str,
- modified_process_model_identifier: str,
+ process_instance_id: int,
+ process_data_identifier: str,
+ modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_data_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
@@ -392,36 +393,42 @@ def process_data_show(
200,
)
-def interstitial(process_instance_id: int):
+
+def _interstitial_stream(process_instance_id: int):
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
processor = ProcessInstanceProcessor(process_instance)
- reported_ids = [] # bit of an issue with end tasks showing as getting completed twice.
- def get_data():
- spiff_task = processor.next_task()
- last_task = None
- while last_task != spiff_task:
- task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
- instructions = _render_instructions_for_end_user(spiff_task, task)
- if instructions and spiff_task.id not in reported_ids:
- reported_ids.append(spiff_task.id)
- yield f'data: {current_app.json.dumps(task)} \n\n'
- last_task = spiff_task
- processor.do_engine_steps(execution_strategy_name="run_until_user_message")
- processor.do_engine_steps(execution_strategy_name="one_at_a_time")
- spiff_task = processor.next_task()
- # Note, this has to be done in case someone leaves the page,
- # which can otherwise cancel this function and leave completed tasks un-registered.
- processor.save() # Fixme - maybe find a way not to do this on every method?
- return
+ reported_ids = [] # bit of an issue with end tasks showing as getting completed twice.
# return Response(get_data(), mimetype='text/event-stream')
- return Response(stream_with_context(get_data()), mimetype='text/event-stream')
+
+ spiff_task = processor.next_task()
+ last_task = None
+ while last_task != spiff_task:
+ task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
+ instructions = _render_instructions_for_end_user(spiff_task, task)
+ if instructions and spiff_task.id not in reported_ids:
+ reported_ids.append(spiff_task.id)
+ yield f'data: {current_app.json.dumps(task)} \n\n'
+ last_task = spiff_task
+ processor.do_engine_steps(execution_strategy_name="run_until_user_message")
+ processor.do_engine_steps(execution_strategy_name="one_at_a_time")
+ spiff_task = processor.next_task()
+ # Note, this has to be done in case someone leaves the page,
+ # which can otherwise cancel this function and leave completed tasks un-registered.
+ processor.save() # Fixme - maybe find a way not to do this on every method?
+
+
+def interstitial(process_instance_id: int):
+ """A Server Side Events Stream for watching the execution of engine tasks in a
+ process instance. """
+ return Response(stream_with_context(_interstitial_stream(process_instance_id)), mimetype='text/event-stream')
+
def _task_submit_shared(
- process_instance_id: int,
- task_guid: str,
- body: Dict[str, Any],
- save_as_draft: bool = False,
+ process_instance_id: int,
+ task_guid: str,
+ body: Dict[str, Any],
+ save_as_draft: bool = False,
) -> flask.wrappers.Response:
principal = _find_principal_or_raise()
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
@@ -508,11 +515,12 @@ def _task_submit_shared(
"process_instance_id": process_instance_id
}), status=202, mimetype="application/json")
+
def task_submit(
- process_instance_id: int,
- task_guid: str,
- body: Dict[str, Any],
- save_as_draft: bool = False,
+ process_instance_id: int,
+ task_guid: str,
+ body: Dict[str, Any],
+ save_as_draft: bool = False,
) -> flask.wrappers.Response:
"""Task_submit_user_data."""
with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"):
@@ -520,11 +528,11 @@ def task_submit(
def _get_tasks(
- processes_started_by_user: bool = True,
- has_lane_assignment_id: bool = True,
- page: int = 1,
- per_page: int = 100,
- user_group_identifier: Optional[str] = None,
+ processes_started_by_user: bool = True,
+ has_lane_assignment_id: bool = True,
+ page: int = 1,
+ per_page: int = 100,
+ user_group_identifier: Optional[str] = None,
) -> flask.wrappers.Response:
"""Get_tasks."""
user_id = g.user.id
@@ -671,9 +679,9 @@ def _render_jinja_template(unprocessed_template: str, spiff_task: SpiffTask) ->
def _get_spiff_task_from_process_instance(
- task_guid: str,
- process_instance: ProcessInstanceModel,
- processor: Union[ProcessInstanceProcessor, None] = None,
+ task_guid: str,
+ process_instance: ProcessInstanceModel,
+ processor: Union[ProcessInstanceProcessor, None] = None,
) -> SpiffTask:
"""Get_spiff_task_from_process_instance."""
if processor is None:
@@ -729,9 +737,8 @@ def _update_form_schema_with_task_data_as_needed(in_dict: dict, task: Task, spif
select_options_from_task_data = task.data.get(task_data_var)
if isinstance(select_options_from_task_data, list):
if all("value" in d and "label" in d for d in select_options_from_task_data):
-
def map_function(
- task_data_select_option: TaskDataSelectOption,
+ task_data_select_option: TaskDataSelectOption,
) -> ReactJsonSchemaSelectOption:
"""Map_function."""
return {
@@ -769,9 +776,9 @@ def _get_potential_owner_usernames(assigned_user: AliasedClass) -> Any:
def _find_human_task_or_raise(
- process_instance_id: int,
- task_guid: str,
- only_tasks_that_can_be_completed: bool = False,
+ process_instance_id: int,
+ task_guid: str,
+ only_tasks_that_can_be_completed: bool = False,
) -> HumanTaskModel:
if only_tasks_that_can_be_completed:
human_task_query = HumanTaskModel.query.filter_by(
diff --git a/spiffworkflow-backend/tests/data/interstitial/interstitial.bpmn b/spiffworkflow-backend/tests/data/interstitial/interstitial.bpmn
new file mode 100644
index 00000000..ec386e56
--- /dev/null
+++ b/spiffworkflow-backend/tests/data/interstitial/interstitial.bpmn
@@ -0,0 +1,123 @@
+
+
- {d.title}- |
-
- {userMessage(d)} - |
-