some attempts to not change the process instance status w/ burnettk
This commit is contained in:
parent
20ee46899d
commit
8e0630947d
|
@ -172,7 +172,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
|||
|
||||
@classmethod
|
||||
def active_statuses(cls) -> list[str]:
|
||||
return ["user_input_required", "waiting"]
|
||||
return ["not_started", "user_input_required", "waiting"]
|
||||
|
||||
|
||||
class ProcessInstanceModelSchema(Schema):
|
||||
|
|
|
@ -45,7 +45,7 @@ from spiffworkflow_backend.models.process_instance import (
|
|||
)
|
||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.task import Task, TaskModel # noqa: F401
|
||||
from spiffworkflow_backend.models.user import UserModel
|
||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||
_find_principal_or_raise,
|
||||
|
@ -400,6 +400,11 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
|||
extensions = TaskService.get_extensions_from_task_model(task_model)
|
||||
return _render_instructions_for_end_user(task_model, extensions)
|
||||
|
||||
def render_data(return_type: str, entity: Union[ApiError, Task]) -> str:
|
||||
return_hash: dict = {"type": type}
|
||||
return_hash[return_type] = entity
|
||||
return f"data: {current_app.json.dumps(return_hash)} \n\n"
|
||||
|
||||
tasks = get_reportable_tasks()
|
||||
while True:
|
||||
for spiff_task in tasks:
|
||||
|
@ -411,12 +416,12 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
|||
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
||||
status_code=400,
|
||||
)
|
||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
||||
yield render_data('error', api_error)
|
||||
raise e
|
||||
if instructions and spiff_task.id not in reported_ids:
|
||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||
task.properties = {"instructionsForEndUser": instructions}
|
||||
yield f"data: {current_app.json.dumps(task)} \n\n"
|
||||
yield render_data('task', task)
|
||||
reported_ids.append(spiff_task.id)
|
||||
if spiff_task.state == TaskState.READY:
|
||||
try:
|
||||
|
@ -427,7 +432,7 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
|||
api_error = ApiError.from_workflow_exception(
|
||||
"engine_steps_error", "Failed complete an automated task.", exp=wfe
|
||||
)
|
||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
||||
yield render_data('error', api_error)
|
||||
return
|
||||
except Exception as e:
|
||||
api_error = ApiError(
|
||||
|
@ -435,7 +440,7 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
|||
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
||||
status_code=400,
|
||||
)
|
||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
||||
yield render_data('error', api_error)
|
||||
return
|
||||
processor.refresh_waiting_tasks()
|
||||
ready_engine_task_count = get_ready_engine_step_count(processor.bpmn_process_instance)
|
||||
|
@ -454,10 +459,10 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
|||
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
||||
status_code=400,
|
||||
)
|
||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
||||
yield render_data('error', api_error)
|
||||
raise e
|
||||
task.properties = {"instructionsForEndUser": instructions}
|
||||
yield f"data: {current_app.json.dumps(task)} \n\n"
|
||||
yield render_data('task', task)
|
||||
|
||||
|
||||
def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
|
||||
|
@ -472,8 +477,12 @@ def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
|
|||
)
|
||||
|
||||
|
||||
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[str, Optional[str], None]:
|
||||
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[Optional[str], Optional[str], None]:
|
||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||
if process_instance.status not in process_instance.__class__.active_statuses():
|
||||
yield f"data: {current_app.json.dumps(process_instance)} \n\n"
|
||||
return
|
||||
|
||||
with ProcessInstanceQueueService.dequeued(process_instance):
|
||||
yield from _interstitial_stream(process_instance)
|
||||
|
||||
|
|
|
@ -351,3 +351,15 @@ export interface ProcessModelCaller {
|
|||
}
|
||||
|
||||
export interface UserGroup {}
|
||||
|
||||
type InterstitialPageResponseType =
|
||||
| 'task_update'
|
||||
| 'error'
|
||||
| 'unrunnable_instance';
|
||||
|
||||
export interface InterstitialPageResponse {
|
||||
type: InterstitialPageResponseType;
|
||||
error?: any;
|
||||
task?: ProcessInstanceTask;
|
||||
process_instance?: ProcessInstance;
|
||||
}
|
||||
|
|
|
@ -31,12 +31,17 @@ export default function ProcessInterstitial() {
|
|||
{
|
||||
headers: getBasicHeaders(),
|
||||
onmessage(ev) {
|
||||
console.log('ev', ev);
|
||||
const retValue = JSON.parse(ev.data);
|
||||
if ('error_code' in retValue) {
|
||||
addError(retValue);
|
||||
} else {
|
||||
setData((prevData) => [retValue, ...prevData]);
|
||||
if (retValue.type === 'error') {
|
||||
addError(retValue.error);
|
||||
} else if (retValue.type === 'task') {
|
||||
setData((prevData) => [retValue.task, ...prevData]);
|
||||
setLastTask(retValue);
|
||||
// } else if (retValue.type === 'unrunnable_instance') {
|
||||
// // setData((prevData) => [retValue.task, ...prevData]);
|
||||
// // setLastTask(retValue);
|
||||
// setState('CLOSED');
|
||||
}
|
||||
},
|
||||
onclose() {
|
||||
|
|
Loading…
Reference in New Issue