Merge pull request #223 from sartography/feature/scroll_to_top_better_errors
* Scroll to the top on each update of a process result.
This commit is contained in:
commit
a4dab51a0e
|
@ -18,13 +18,13 @@ def setup_database_uri(app: Flask) -> None:
|
||||||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None:
|
||||||
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}"
|
||||||
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
|
if app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "sqlite":
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
app.config[
|
||||||
f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
"SQLALCHEMY_DATABASE_URI"
|
||||||
)
|
] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3"
|
||||||
elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
|
elif app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_TYPE") == "postgres":
|
||||||
app.config["SQLALCHEMY_DATABASE_URI"] = (
|
app.config[
|
||||||
f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
"SQLALCHEMY_DATABASE_URI"
|
||||||
)
|
] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}"
|
||||||
else:
|
else:
|
||||||
# use pswd to trick flake8 with hardcoded passwords
|
# use pswd to trick flake8 with hardcoded passwords
|
||||||
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")
|
db_pswd = app.config.get("SPIFFWORKFLOW_BACKEND_DATABASE_PASSWORD")
|
||||||
|
|
|
@ -127,9 +127,9 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
|
||||||
def serialized_with_metadata(self) -> dict[str, Any]:
|
def serialized_with_metadata(self) -> dict[str, Any]:
|
||||||
process_instance_attributes = self.serialized
|
process_instance_attributes = self.serialized
|
||||||
process_instance_attributes["process_metadata"] = self.process_metadata
|
process_instance_attributes["process_metadata"] = self.process_metadata
|
||||||
process_instance_attributes["process_model_with_diagram_identifier"] = (
|
process_instance_attributes[
|
||||||
self.process_model_with_diagram_identifier
|
"process_model_with_diagram_identifier"
|
||||||
)
|
] = self.process_model_with_diagram_identifier
|
||||||
return process_instance_attributes
|
return process_instance_attributes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -110,6 +110,7 @@ class Task:
|
||||||
event_definition: Union[dict[str, Any], None] = None,
|
event_definition: Union[dict[str, Any], None] = None,
|
||||||
call_activity_process_identifier: Optional[str] = None,
|
call_activity_process_identifier: Optional[str] = None,
|
||||||
calling_subprocess_task_id: Optional[str] = None,
|
calling_subprocess_task_id: Optional[str] = None,
|
||||||
|
error_message: Optional[str] = None,
|
||||||
):
|
):
|
||||||
"""__init__."""
|
"""__init__."""
|
||||||
self.id = id
|
self.id = id
|
||||||
|
@ -147,6 +148,7 @@ class Task:
|
||||||
self.properties = properties # Arbitrary extension properties from BPMN editor.
|
self.properties = properties # Arbitrary extension properties from BPMN editor.
|
||||||
if self.properties is None:
|
if self.properties is None:
|
||||||
self.properties = {}
|
self.properties = {}
|
||||||
|
self.error_message = error_message
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def serialized(self) -> dict[str, Any]:
|
def serialized(self) -> dict[str, Any]:
|
||||||
|
@ -183,6 +185,7 @@ class Task:
|
||||||
"event_definition": self.event_definition,
|
"event_definition": self.event_definition,
|
||||||
"call_activity_process_identifier": self.call_activity_process_identifier,
|
"call_activity_process_identifier": self.call_activity_process_identifier,
|
||||||
"calling_subprocess_task_id": self.calling_subprocess_task_id,
|
"calling_subprocess_task_id": self.calling_subprocess_task_id,
|
||||||
|
"error_message": self.error_message,
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -405,14 +405,21 @@ def _interstitial_stream(process_instance_id: int) -> Generator[str, Optional[st
|
||||||
reported_ids.append(spiff_task.id)
|
reported_ids.append(spiff_task.id)
|
||||||
yield f"data: {current_app.json.dumps(task)} \n\n"
|
yield f"data: {current_app.json.dumps(task)} \n\n"
|
||||||
last_task = spiff_task
|
last_task = spiff_task
|
||||||
|
try:
|
||||||
processor.do_engine_steps(execution_strategy_name="run_until_user_message")
|
processor.do_engine_steps(execution_strategy_name="run_until_user_message")
|
||||||
processor.do_engine_steps(execution_strategy_name="one_at_a_time")
|
processor.do_engine_steps(execution_strategy_name="one_at_a_time")
|
||||||
spiff_task = processor.next_task()
|
except WorkflowTaskException as wfe:
|
||||||
|
api_error = ApiError.from_workflow_exception(
|
||||||
|
"engine_steps_error", "Failed complete an automated task.", exp=wfe
|
||||||
|
)
|
||||||
|
yield f"data: {current_app.json.dumps(api_error)} \n\n"
|
||||||
# Note, this has to be done in case someone leaves the page,
|
# Note, this has to be done in case someone leaves the page,
|
||||||
# which can otherwise cancel this function and leave completed tasks un-registered.
|
# which can otherwise cancel this function and leave completed tasks un-registered.
|
||||||
processor.save() # Fixme - maybe find a way not to do this on every loop?
|
processor.save() # Fixme - maybe find a way not to do this on every loop?
|
||||||
if len(reported_ids) == 0:
|
spiff_task = processor.next_task()
|
||||||
|
|
||||||
# Always provide some response, in the event no instructions were provided.
|
# Always provide some response, in the event no instructions were provided.
|
||||||
|
if len(reported_ids) == 0:
|
||||||
task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
|
task = ProcessInstanceService.spiff_task_to_api_task(processor, processor.next_task())
|
||||||
yield f"data: {current_app.json.dumps(task)} \n\n"
|
yield f"data: {current_app.json.dumps(task)} \n\n"
|
||||||
|
|
||||||
|
|
|
@ -423,9 +423,9 @@ class ProcessInstanceProcessor:
|
||||||
tld.process_instance_id = process_instance_model.id
|
tld.process_instance_id = process_instance_model.id
|
||||||
|
|
||||||
# we want this to be the fully qualified path to the process model including all group subcomponents
|
# we want this to be the fully qualified path to the process model including all group subcomponents
|
||||||
current_app.config["THREAD_LOCAL_DATA"].process_model_identifier = (
|
current_app.config[
|
||||||
f"{process_instance_model.process_model_identifier}"
|
"THREAD_LOCAL_DATA"
|
||||||
)
|
].process_model_identifier = f"{process_instance_model.process_model_identifier}"
|
||||||
|
|
||||||
self.process_instance_model = process_instance_model
|
self.process_instance_model = process_instance_model
|
||||||
self.process_model_service = ProcessModelService()
|
self.process_model_service = ProcessModelService()
|
||||||
|
@ -585,9 +585,9 @@ class ProcessInstanceProcessor:
|
||||||
bpmn_subprocess_definition.bpmn_identifier
|
bpmn_subprocess_definition.bpmn_identifier
|
||||||
] = bpmn_process_definition_dict
|
] = bpmn_process_definition_dict
|
||||||
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
|
spiff_bpmn_process_dict["subprocess_specs"][bpmn_subprocess_definition.bpmn_identifier]["task_specs"] = {}
|
||||||
bpmn_subprocess_definition_bpmn_identifiers[bpmn_subprocess_definition.id] = (
|
bpmn_subprocess_definition_bpmn_identifiers[
|
||||||
bpmn_subprocess_definition.bpmn_identifier
|
bpmn_subprocess_definition.id
|
||||||
)
|
] = bpmn_subprocess_definition.bpmn_identifier
|
||||||
|
|
||||||
task_definitions = TaskDefinitionModel.query.filter(
|
task_definitions = TaskDefinitionModel.query.filter(
|
||||||
TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore
|
TaskDefinitionModel.bpmn_process_definition_id.in_( # type: ignore
|
||||||
|
@ -1741,8 +1741,8 @@ class ProcessInstanceProcessor:
|
||||||
def next_task(self) -> SpiffTask:
|
def next_task(self) -> SpiffTask:
|
||||||
"""Returns the next task that should be completed even if there are parallel tasks and multiple options are available.
|
"""Returns the next task that should be completed even if there are parallel tasks and multiple options are available.
|
||||||
|
|
||||||
If the process_instance is complete
|
If the process_instance is complete it will return the final end task.
|
||||||
it will return the final end task.
|
If the process_instance is in an error state it will return the task that is erroring.
|
||||||
"""
|
"""
|
||||||
# If the whole blessed mess is done, return the end_event task in the tree
|
# If the whole blessed mess is done, return the end_event task in the tree
|
||||||
# This was failing in the case of a call activity where we have an intermediate EndEvent
|
# This was failing in the case of a call activity where we have an intermediate EndEvent
|
||||||
|
@ -1769,8 +1769,12 @@ class ProcessInstanceProcessor:
|
||||||
waiting_tasks = self.bpmn_process_instance.get_tasks(TaskState.WAITING)
|
waiting_tasks = self.bpmn_process_instance.get_tasks(TaskState.WAITING)
|
||||||
if len(waiting_tasks) > 0:
|
if len(waiting_tasks) > 0:
|
||||||
return waiting_tasks[0]
|
return waiting_tasks[0]
|
||||||
else:
|
|
||||||
return # We have not tasks to return.
|
# If there are no ready tasks, and not waiting tasks, return the latest error.
|
||||||
|
error_task = None
|
||||||
|
for task in SpiffTask.Iterator(self.bpmn_process_instance.task_tree, TaskState.ERROR):
|
||||||
|
error_task = task
|
||||||
|
return error_task
|
||||||
|
|
||||||
# Get a list of all completed user tasks (Non engine tasks)
|
# Get a list of all completed user tasks (Non engine tasks)
|
||||||
completed_user_tasks = self.completed_user_tasks()
|
completed_user_tasks = self.completed_user_tasks()
|
||||||
|
|
|
@ -462,6 +462,12 @@ class ProcessInstanceService:
|
||||||
|
|
||||||
serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec)
|
serialized_task_spec = processor.serialize_task_spec(spiff_task.task_spec)
|
||||||
|
|
||||||
|
# Grab the last error message.
|
||||||
|
error_message = None
|
||||||
|
for event in processor.process_instance_model.process_instance_events:
|
||||||
|
for detail in event.error_details:
|
||||||
|
error_message = detail.message
|
||||||
|
|
||||||
task = Task(
|
task = Task(
|
||||||
spiff_task.id,
|
spiff_task.id,
|
||||||
spiff_task.task_spec.name,
|
spiff_task.task_spec.name,
|
||||||
|
@ -479,6 +485,7 @@ class ProcessInstanceService:
|
||||||
event_definition=serialized_task_spec.get("event_definition"),
|
event_definition=serialized_task_spec.get("event_definition"),
|
||||||
call_activity_process_identifier=call_activity_process_identifier,
|
call_activity_process_identifier=call_activity_process_identifier,
|
||||||
calling_subprocess_task_id=calling_subprocess_task_id,
|
calling_subprocess_task_id=calling_subprocess_task_id,
|
||||||
|
error_message=error_message,
|
||||||
)
|
)
|
||||||
|
|
||||||
return task
|
return task
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 1.9 KiB |
|
@ -109,6 +109,7 @@ export default function ErrorDisplay() {
|
||||||
|
|
||||||
if (errorObject) {
|
if (errorObject) {
|
||||||
const title = 'Error:';
|
const title = 'Error:';
|
||||||
|
window.scrollTo(0, 0); // Scroll back to the top of the page
|
||||||
|
|
||||||
errorTag = (
|
errorTag = (
|
||||||
<Notification title={title} onClose={() => removeError()} type="error">
|
<Notification title={title} onClose={() => removeError()} type="error">
|
||||||
|
|
|
@ -1190,7 +1190,7 @@ export default function ProcessInstanceListTable({
|
||||||
return null;
|
return null;
|
||||||
}}
|
}}
|
||||||
placeholder="Start typing username"
|
placeholder="Start typing username"
|
||||||
titleText="Process Initiator"
|
titleText="Started By"
|
||||||
selectedItem={processInitiatorSelection}
|
selectedItem={processInitiatorSelection}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
|
@ -1199,7 +1199,7 @@ export default function ProcessInstanceListTable({
|
||||||
<TextInput
|
<TextInput
|
||||||
id="process-instance-initiator-search"
|
id="process-instance-initiator-search"
|
||||||
placeholder="Enter username"
|
placeholder="Enter username"
|
||||||
labelText="Process Initiator"
|
labelText="Started By"
|
||||||
invalid={processInitiatorNotFoundErrorText !== ''}
|
invalid={processInitiatorNotFoundErrorText !== ''}
|
||||||
invalidText={processInitiatorNotFoundErrorText}
|
invalidText={processInitiatorNotFoundErrorText}
|
||||||
onChange={(event: any) => {
|
onChange={(event: any) => {
|
||||||
|
|
|
@ -15,7 +15,7 @@ export default function ProcessModelSearch({
|
||||||
processModels,
|
processModels,
|
||||||
selectedItem,
|
selectedItem,
|
||||||
onChange,
|
onChange,
|
||||||
titleText = 'Process model',
|
titleText = 'Process',
|
||||||
}: OwnProps) {
|
}: OwnProps) {
|
||||||
const getParentGroupsDisplayName = (processModel: ProcessModel) => {
|
const getParentGroupsDisplayName = (processModel: ProcessModel) => {
|
||||||
if (processModel.parent_groups) {
|
if (processModel.parent_groups) {
|
||||||
|
|
|
@ -81,6 +81,7 @@ export interface ProcessInstanceTask {
|
||||||
|
|
||||||
potential_owner_usernames?: string;
|
potential_owner_usernames?: string;
|
||||||
assigned_user_group_identifier?: string;
|
assigned_user_group_identifier?: string;
|
||||||
|
error_message?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ProcessReference {
|
export interface ProcessReference {
|
||||||
|
|
|
@ -10,6 +10,7 @@ import { getBasicHeaders } from '../services/HttpService';
|
||||||
import InstructionsForEndUser from '../components/InstructionsForEndUser';
|
import InstructionsForEndUser from '../components/InstructionsForEndUser';
|
||||||
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
import ProcessBreadcrumb from '../components/ProcessBreadcrumb';
|
||||||
import { ProcessInstanceTask } from '../interfaces';
|
import { ProcessInstanceTask } from '../interfaces';
|
||||||
|
import useAPIError from '../hooks/UseApiError';
|
||||||
|
|
||||||
export default function ProcessInterstitial() {
|
export default function ProcessInterstitial() {
|
||||||
const [data, setData] = useState<any[]>([]);
|
const [data, setData] = useState<any[]>([]);
|
||||||
|
@ -20,6 +21,7 @@ export default function ProcessInterstitial() {
|
||||||
const userTasks = useMemo(() => {
|
const userTasks = useMemo(() => {
|
||||||
return ['User Task', 'Manual Task'];
|
return ['User Task', 'Manual Task'];
|
||||||
}, []);
|
}, []);
|
||||||
|
const { addError } = useAPIError();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchEventSource(
|
fetchEventSource(
|
||||||
|
@ -27,9 +29,13 @@ export default function ProcessInterstitial() {
|
||||||
{
|
{
|
||||||
headers: getBasicHeaders(),
|
headers: getBasicHeaders(),
|
||||||
onmessage(ev) {
|
onmessage(ev) {
|
||||||
const task = JSON.parse(ev.data);
|
const retValue = JSON.parse(ev.data);
|
||||||
setData((prevData) => [...prevData, task]);
|
if ('error_code' in retValue) {
|
||||||
setLastTask(task);
|
addError(retValue);
|
||||||
|
} else {
|
||||||
|
setData((prevData) => [...prevData, retValue]);
|
||||||
|
setLastTask(retValue);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
onclose() {
|
onclose() {
|
||||||
setState('CLOSED');
|
setState('CLOSED');
|
||||||
|
@ -85,6 +91,8 @@ export default function ProcessInterstitial() {
|
||||||
return <img src="/interstitial/waiting.png" alt="Waiting ...." />;
|
return <img src="/interstitial/waiting.png" alt="Waiting ...." />;
|
||||||
case 'COMPLETED':
|
case 'COMPLETED':
|
||||||
return <img src="/interstitial/completed.png" alt="Completed" />;
|
return <img src="/interstitial/completed.png" alt="Completed" />;
|
||||||
|
case 'ERROR':
|
||||||
|
return <img src="/interstitial/errored.png" alt="Errored" />;
|
||||||
default:
|
default:
|
||||||
return getStatus();
|
return getStatus();
|
||||||
}
|
}
|
||||||
|
@ -104,6 +112,10 @@ export default function ProcessInterstitial() {
|
||||||
if (shouldRedirect(myTask)) {
|
if (shouldRedirect(myTask)) {
|
||||||
return <div>Redirecting you to the next task now ...</div>;
|
return <div>Redirecting you to the next task now ...</div>;
|
||||||
}
|
}
|
||||||
|
if (myTask.error_message) {
|
||||||
|
return <div>{myTask.error_message}</div>;
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<InstructionsForEndUser task={myTask} />
|
<InstructionsForEndUser task={myTask} />
|
||||||
|
@ -147,7 +159,7 @@ export default function ProcessInterstitial() {
|
||||||
<Column md={2} lg={4} sm={2}>
|
<Column md={2} lg={4} sm={2}>
|
||||||
Task: <em>{d.title}</em>
|
Task: <em>{d.title}</em>
|
||||||
</Column>
|
</Column>
|
||||||
<Column md={6} lg={8} sm={4}>
|
<Column md={6} lg={6} sm={4}>
|
||||||
{userMessage(d)}
|
{userMessage(d)}
|
||||||
</Column>
|
</Column>
|
||||||
</Grid>
|
</Grid>
|
||||||
|
|
|
@ -117,10 +117,10 @@ export default function TaskShow() {
|
||||||
const processResult = (result: ProcessInstanceTask) => {
|
const processResult = (result: ProcessInstanceTask) => {
|
||||||
setTask(result);
|
setTask(result);
|
||||||
setDisabled(false);
|
setDisabled(false);
|
||||||
|
|
||||||
if (!result.can_complete) {
|
if (!result.can_complete) {
|
||||||
navigateToInterstitial(result);
|
navigateToInterstitial(result);
|
||||||
}
|
}
|
||||||
|
window.scrollTo(0, 0); // Scroll back to the top of the page
|
||||||
|
|
||||||
/* Disable call to load previous tasks -- do not display menu.
|
/* Disable call to load previous tasks -- do not display menu.
|
||||||
const url = `/v1.0/process-instances/for-me/${modifyProcessIdentifierForPathParam(
|
const url = `/v1.0/process-instances/for-me/${modifyProcessIdentifierForPathParam(
|
||||||
|
|
Loading…
Reference in New Issue