diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml index 06f482bce..373f18319 100755 --- a/spiffworkflow-backend/src/spiffworkflow_backend/api.yml +++ b/spiffworkflow-backend/src/spiffworkflow_backend/api.yml @@ -1832,10 +1832,10 @@ paths: description: The unique id of an existing process instance. schema: type: integer - - name: terminate_loop + - name: save_as_draft in: query required: false - description: Terminate the loop on a looping task + description: Save the data to task but do not complete it. schema: type: boolean get: diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py index 7145dcce3..7c8973aa4 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/routes/tasks_controller.py @@ -180,13 +180,7 @@ def task_data_show( process_instance_id: int, task_guid: str, ) -> flask.wrappers.Response: - task_model = TaskModel.query.filter_by(guid=task_guid, process_instance_id=process_instance_id).first() - if task_model is None: - raise ApiError( - error_code="task_not_found", - message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", - status_code=400, - ) + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) task_model.data = task_model.json_data() return make_response(jsonify(task_model), 200) @@ -216,13 +210,11 @@ def task_data_update( if "new_task_data" in body: new_task_data_str: str = body["new_task_data"] new_task_data_dict = json.loads(new_task_data_str) - json_data_dict = TaskService.update_task_data_on_task_model( + json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( task_model, new_task_data_dict, "json_data_hash" ) if json_data_dict is not None: TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) - # json_data = JsonDataModel(**json_data_dict) - # db.session.add(json_data) ProcessInstanceProcessor.add_event_to_process_instance( process_instance, ProcessInstanceEventType.task_data_edited.value, task_guid=task_guid ) @@ -389,11 +381,11 @@ def process_data_show( ) -def task_submit_shared( +def _task_submit_shared( process_instance_id: int, task_guid: str, body: Dict[str, Any], - terminate_loop: bool = False, + save_as_draft: bool = False, ) -> flask.wrappers.Response: principal = _find_principal_or_raise() process_instance = _find_process_instance_by_id_or_raise(process_instance_id) @@ -420,25 +412,10 @@ def task_submit_shared( ) ) - if terminate_loop and spiff_task.is_looping(): - spiff_task.terminate_loop() - - human_task = _find_human_task_or_raise( - process_instance_id=process_instance_id, - task_guid=task_guid, - only_tasks_that_can_be_completed=True, - ) - - with sentry_sdk.start_span(op="task", description="complete_form_task"): - with ProcessInstanceQueueService.dequeued(process_instance): - ProcessInstanceService.complete_form_task( - processor=processor, - spiff_task=spiff_task, - data=body, - user=g.user, - human_task=human_task, - ) - + # multi-instance code from crconnect - we may need it or may not + # if terminate_loop and spiff_task.is_looping(): + # spiff_task.terminate_loop() + # # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same # task spec, complete that form as well. # if update_all: @@ -449,15 +426,41 @@ def task_submit_shared( # last_index = next_task.task_info()["mi_index"] # next_task = processor.next_task() - next_human_task_assigned_to_me = ( - HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) - .order_by(asc(HumanTaskModel.id)) # type: ignore - .join(HumanTaskUserModel) - .filter_by(user_id=principal.user_id) - .first() - ) - if next_human_task_assigned_to_me: - return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) + if save_as_draft: + task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id) + json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, body, "json_data_hash" + ) + if json_data_dict is not None: + TaskService.insert_or_update_json_data_dict(json_data_dict) + db.session.add(task_model) + db.session.commit() + else: + human_task = _find_human_task_or_raise( + process_instance_id=process_instance_id, + task_guid=task_guid, + only_tasks_that_can_be_completed=True, + ) + + with sentry_sdk.start_span(op="task", description="complete_form_task"): + with ProcessInstanceQueueService.dequeued(process_instance): + ProcessInstanceService.complete_form_task( + processor=processor, + spiff_task=spiff_task, + data=body, + user=g.user, + human_task=human_task, + ) + + next_human_task_assigned_to_me = ( + HumanTaskModel.query.filter_by(process_instance_id=process_instance_id, completed=False) + .order_by(asc(HumanTaskModel.id)) # type: ignore + .join(HumanTaskUserModel) + .filter_by(user_id=principal.user_id) + .first() + ) + if next_human_task_assigned_to_me: + return make_response(jsonify(HumanTaskModel.to_task(next_human_task_assigned_to_me)), 200) return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") @@ -466,11 +469,11 @@ def task_submit( process_instance_id: int, task_guid: str, body: Dict[str, Any], - terminate_loop: bool = False, + save_as_draft: bool = False, ) -> flask.wrappers.Response: """Task_submit_user_data.""" with sentry_sdk.start_span(op="controller_action", description="tasks_controller.task_submit"): - return task_submit_shared(process_instance_id, task_guid, body, terminate_loop) + return _task_submit_shared(process_instance_id, task_guid, body, save_as_draft) def _get_tasks( @@ -764,3 +767,16 @@ def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> Non relevant_depth_of_ui_schema = relevant_depth_of_ui_schema[hidden_field_part] if len(hidden_field_parts) == ii + 1: relevant_depth_of_ui_schema["ui:widget"] = "hidden" + + +def _get_task_model_from_guid_or_raise(task_guid: str, process_instance_id: int) -> TaskModel: + task_model: Optional[TaskModel] = TaskModel.query.filter_by( + guid=task_guid, process_instance_id=process_instance_id + ).first() + if task_model is None: + raise ApiError( + error_code="task_not_found", + message=f"Cannot find a task with guid '{task_guid}' for process instance '{process_instance_id}'", + status_code=400, + ) + return task_model diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index d2579357d..5505f635f 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -93,6 +93,7 @@ from spiffworkflow_backend.services.process_instance_queue_service import Proces from spiffworkflow_backend.services.process_model_service import ProcessModelService from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.task_service import JsonDataDict from spiffworkflow_backend.services.task_service import TaskService from spiffworkflow_backend.services.user_service import UserService from spiffworkflow_backend.services.workflow_execution_service import ( @@ -1790,12 +1791,9 @@ class ProcessInstanceProcessor: db.session.add(human_task) json_data_dict_list = TaskService.update_task_model(task_model, spiff_task, self._serializer) - for json_data_dict in json_data_dict_list: - if json_data_dict is not None: - json_data = db.session.query(JsonDataModel.id).filter_by(hash=json_data_dict["hash"]).first() - if json_data is None: - json_data = JsonDataModel(**json_data_dict) - db.session.add(json_data) + json_data_dict_mapping: dict[str, JsonDataDict] = {} + TaskService.update_json_data_dicts_using_list(json_data_dict_list, json_data_dict_mapping) + TaskService.insert_or_update_json_data_records(json_data_dict_mapping) self.add_event_to_process_instance( self.process_instance_model, diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py index e9839fa74..b89c0bfbe 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/task_service.py @@ -130,7 +130,7 @@ class TaskService: self.task_models[task_model.guid] = task_model if bpmn_process_json_data is not None: json_data_dict_list.append(bpmn_process_json_data) - self._update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts) + self.update_json_data_dicts_using_list(json_data_dict_list, self.json_data_dicts) if task_model.state == "COMPLETED" or task_failed: event_type = ProcessInstanceEventType.task_completed.value @@ -207,8 +207,12 @@ class TaskService: python_env_data_dict = cls._get_python_env_data_dict_from_spiff_task(spiff_task, serializer) task_model.properties_json = new_properties_json task_model.state = TaskStateNames[new_properties_json["state"]] - json_data_dict = cls.update_task_data_on_task_model(task_model, spiff_task_data, "json_data_hash") - python_env_dict = cls.update_task_data_on_task_model(task_model, python_env_data_dict, "python_env_data_hash") + json_data_dict = cls.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, spiff_task_data, "json_data_hash" + ) + python_env_dict = cls.update_task_data_on_task_model_and_return_dict_if_updated( + task_model, python_env_data_dict, "python_env_data_hash" + ) return [json_data_dict, python_env_dict] @classmethod @@ -446,7 +450,11 @@ class TaskService: return json_data_dict @classmethod - def update_task_data_on_task_model( + def insert_or_update_json_data_dict(cls, json_data_dict: JsonDataDict) -> None: + TaskService.insert_or_update_json_data_records({json_data_dict["hash"]: json_data_dict}) + + @classmethod + def update_task_data_on_task_model_and_return_dict_if_updated( cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str ) -> Optional[JsonDataDict]: task_data_json = json.dumps(task_data_dict, sort_keys=True) @@ -501,11 +509,11 @@ class TaskService: python_env_data_hash: Optional[str] = None, ) -> None: if json_data_hash is None: - cls.update_task_data_on_task_model(task_model, {}, "json_data_hash") + cls.update_task_data_on_task_model_and_return_dict_if_updated(task_model, {}, "json_data_hash") else: task_model.json_data_hash = json_data_hash if python_env_data_hash is None: - cls.update_task_data_on_task_model(task_model, {}, "python_env_data") + cls.update_task_data_on_task_model_and_return_dict_if_updated(task_model, {}, "python_env_data") else: task_model.python_env_data_hash = python_env_data_hash @@ -556,7 +564,7 @@ class TaskService: return converted_data @classmethod - def _update_json_data_dicts_using_list( + def update_json_data_dicts_using_list( cls, json_data_dict_list: list[Optional[JsonDataDict]], json_data_dicts: dict[str, JsonDataDict] ) -> None: for json_data_dict in json_data_dict_list: diff --git a/spiffworkflow-frontend/src/routes/TaskShow.tsx b/spiffworkflow-frontend/src/routes/TaskShow.tsx index 058ee0b52..5362cf323 100644 --- a/spiffworkflow-frontend/src/routes/TaskShow.tsx +++ b/spiffworkflow-frontend/src/routes/TaskShow.tsx @@ -1,4 +1,4 @@ -import { useEffect, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { useNavigate, useParams } from 'react-router-dom'; import validator from '@rjsf/validator-ajv8'; @@ -9,6 +9,7 @@ import { Grid, Column, Button, + ButtonSet, // @ts-ignore } from '@carbon/react'; @@ -21,6 +22,13 @@ import { modifyProcessIdentifierForPathParam } from '../helpers'; import { ProcessInstanceTask } from '../interfaces'; import ProcessBreadcrumb from '../components/ProcessBreadcrumb'; +class UnexpectedHumanTaskType extends Error { + constructor(message: string) { + super(message); + this.name = 'UnexpectedHumanTaskType'; + } +} + export default function TaskShow() { const [task, setTask] = useState(null); const [userTasks] = useState(null); @@ -30,6 +38,9 @@ export default function TaskShow() { const { addError, removeError } = useAPIError(); + // eslint-disable-next-line sonarjs/no-duplicate-string + const supportedHumanTaskTypes = ['User Task', 'Manual Task']; + useEffect(() => { const processResult = (result: ProcessInstanceTask) => { setTask(result); @@ -76,16 +87,22 @@ export default function TaskShow() { } }; - const handleFormSubmit = (event: any) => { + const handleFormSubmit = (formObject: any, event: any) => { if (disabled) { return; } + const submitButtonId = event.nativeEvent.submitter.id; + let queryParams = ''; + console.log('submitButtonId', submitButtonId); + if (submitButtonId === 'save-as-draft-button') { + queryParams = '?save_as_draft=true'; + } setDisabled(true); removeError(); - const dataToSubmit = event.formData; + const dataToSubmit = formObject.formData; delete dataToSubmit.isManualTask; HttpService.makeCallToBackend({ - path: `/tasks/${params.process_instance_id}/${params.task_id}`, + path: `/tasks/${params.process_instance_id}/${params.task_id}${queryParams}`, successCallback: processSubmitResult, failureCallback: (error: any) => { addError(error); @@ -226,16 +243,33 @@ export default function TaskShow() { } if (task.state === 'READY') { - let buttonText = 'Submit'; + let submitButtonText = 'Submit'; + let saveAsDraftButton = null; if (task.type === 'Manual Task') { - buttonText = 'Continue'; + submitButtonText = 'Continue'; + } else if (task.type === 'User Task') { + saveAsDraftButton = ( + + ); + } else { + throw new UnexpectedHumanTaskType( + `Invalid task type given: ${task.type}. Only supported types: ${supportedHumanTaskTypes}` + ); } reactFragmentToHideSubmitButton = ( -
- -
+ {saveAsDraftButton} + ); }