some attempts to not change the process instance status w/ burnettk
This commit is contained in:
parent
20ee46899d
commit
8e0630947d
|
@ -172,7 +172,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def active_statuses(cls) -> list[str]:
|
def active_statuses(cls) -> list[str]:
|
||||||
return ["user_input_required", "waiting"]
|
return ["not_started", "user_input_required", "waiting"]
|
||||||
|
|
||||||
|
|
||||||
class ProcessInstanceModelSchema(Schema):
|
class ProcessInstanceModelSchema(Schema):
|
||||||
|
|
|
@ -45,7 +45,7 @@ from spiffworkflow_backend.models.process_instance import (
|
||||||
)
|
)
|
||||||
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
from spiffworkflow_backend.models.process_instance_event import ProcessInstanceEventType
|
||||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||||
from spiffworkflow_backend.models.task import TaskModel # noqa: F401
|
from spiffworkflow_backend.models.task import Task, TaskModel # noqa: F401
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||||
_find_principal_or_raise,
|
_find_principal_or_raise,
|
||||||
|
@ -400,6 +400,11 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
||||||
extensions = TaskService.get_extensions_from_task_model(task_model)
|
extensions = TaskService.get_extensions_from_task_model(task_model)
|
||||||
return _render_instructions_for_end_user(task_model, extensions)
|
return _render_instructions_for_end_user(task_model, extensions)
|
||||||
|
|
||||||
|
def render_data(return_type: str, entity: Union[ApiError, Task]) -> str:
|
||||||
|
return_hash: dict = {"type": type}
|
||||||
|
return_hash[return_type] = entity
|
||||||
|
return f"data: {current_app.json.dumps(return_hash)} \n\n"
|
||||||
|
|
||||||
tasks = get_reportable_tasks()
|
tasks = get_reportable_tasks()
|
||||||
while True:
|
while True:
|
||||||
for spiff_task in tasks:
|
for spiff_task in tasks:
|
||||||
|
@ -411,12 +416,12 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
||||||
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
yield render_data('error', api_error)
|
||||||
raise e
|
raise e
|
||||||
if instructions and spiff_task.id not in reported_ids:
|
if instructions and spiff_task.id not in reported_ids:
|
||||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
task = ProcessInstanceService.spiff_task_to_api_task(processor, spiff_task)
|
||||||
task.properties = {"instructionsForEndUser": instructions}
|
task.properties = {"instructionsForEndUser": instructions}
|
||||||
yield f"data: {current_app.json.dumps(task)} \n\n"
|
yield render_data('task', task)
|
||||||
reported_ids.append(spiff_task.id)
|
reported_ids.append(spiff_task.id)
|
||||||
if spiff_task.state == TaskState.READY:
|
if spiff_task.state == TaskState.READY:
|
||||||
try:
|
try:
|
||||||
|
@ -427,7 +432,7 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
||||||
api_error = ApiError.from_workflow_exception(
|
api_error = ApiError.from_workflow_exception(
|
||||||
"engine_steps_error", "Failed complete an automated task.", exp=wfe
|
"engine_steps_error", "Failed complete an automated task.", exp=wfe
|
||||||
)
|
)
|
||||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
yield render_data('error', api_error)
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
api_error = ApiError(
|
api_error = ApiError(
|
||||||
|
@ -435,7 +440,7 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
||||||
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
yield render_data('error', api_error)
|
||||||
return
|
return
|
||||||
processor.refresh_waiting_tasks()
|
processor.refresh_waiting_tasks()
|
||||||
ready_engine_task_count = get_ready_engine_step_count(processor.bpmn_process_instance)
|
ready_engine_task_count = get_ready_engine_step_count(processor.bpmn_process_instance)
|
||||||
|
@ -454,10 +459,10 @@ def _interstitial_stream(process_instance: ProcessInstanceModel) -> Generator[st
|
||||||
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
message=f"Failed to complete an automated task. Error was: {str(e)}",
|
||||||
status_code=400,
|
status_code=400,
|
||||||
)
|
)
|
||||||
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
yield render_data('error', api_error)
|
||||||
raise e
|
raise e
|
||||||
task.properties = {"instructionsForEndUser": instructions}
|
task.properties = {"instructionsForEndUser": instructions}
|
||||||
yield f"data: {current_app.json.dumps(task)} \n\n"
|
yield render_data('task', task)
|
||||||
|
|
||||||
|
|
||||||
def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
|
def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
|
||||||
|
@ -472,8 +477,12 @@ def get_ready_engine_step_count(bpmn_process_instance: BpmnWorkflow) -> int:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[str, Optional[str], None]:
|
def _dequeued_interstitial_stream(process_instance_id: int) -> Generator[Optional[str], Optional[str], None]:
|
||||||
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
|
if process_instance.status not in process_instance.__class__.active_statuses():
|
||||||
|
yield f"data: {current_app.json.dumps(process_instance)} \n\n"
|
||||||
|
return
|
||||||
|
|
||||||
with ProcessInstanceQueueService.dequeued(process_instance):
|
with ProcessInstanceQueueService.dequeued(process_instance):
|
||||||
yield from _interstitial_stream(process_instance)
|
yield from _interstitial_stream(process_instance)
|
||||||
|
|
||||||
|
|
|
@ -351,3 +351,15 @@ export interface ProcessModelCaller {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UserGroup {}
|
export interface UserGroup {}
|
||||||
|
|
||||||
|
type InterstitialPageResponseType =
|
||||||
|
| 'task_update'
|
||||||
|
| 'error'
|
||||||
|
| 'unrunnable_instance';
|
||||||
|
|
||||||
|
export interface InterstitialPageResponse {
|
||||||
|
type: InterstitialPageResponseType;
|
||||||
|
error?: any;
|
||||||
|
task?: ProcessInstanceTask;
|
||||||
|
process_instance?: ProcessInstance;
|
||||||
|
}
|
||||||
|
|
|
@ -31,12 +31,17 @@ export default function ProcessInterstitial() {
|
||||||
{
|
{
|
||||||
headers: getBasicHeaders(),
|
headers: getBasicHeaders(),
|
||||||
onmessage(ev) {
|
onmessage(ev) {
|
||||||
|
console.log('ev', ev);
|
||||||
const retValue = JSON.parse(ev.data);
|
const retValue = JSON.parse(ev.data);
|
||||||
if ('error_code' in retValue) {
|
if (retValue.type === 'error') {
|
||||||
addError(retValue);
|
addError(retValue.error);
|
||||||
} else {
|
} else if (retValue.type === 'task') {
|
||||||
setData((prevData) => [retValue, ...prevData]);
|
setData((prevData) => [retValue.task, ...prevData]);
|
||||||
setLastTask(retValue);
|
setLastTask(retValue);
|
||||||
|
// } else if (retValue.type === 'unrunnable_instance') {
|
||||||
|
// // setData((prevData) => [retValue.task, ...prevData]);
|
||||||
|
// // setLastTask(retValue);
|
||||||
|
// setState('CLOSED');
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
onclose() {
|
onclose() {
|
||||||
|
|
Loading…
Reference in New Issue