added api to get task data and do not return from task data list anymore w/ burnettk
This commit is contained in:
parent
d2eb2d85d8
commit
6dc42aa273
|
@ -1559,6 +1559,39 @@ paths:
|
||||||
items:
|
items:
|
||||||
$ref: "#/components/schemas/Task"
|
$ref: "#/components/schemas/Task"
|
||||||
|
|
||||||
|
/task-data/{modified_process_model_identifier}/{process_instance_id}/{spiff_step}:
|
||||||
|
parameters:
|
||||||
|
- name: modified_process_model_identifier
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The modified id of an existing process model
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: process_instance_id
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: The unique id of an existing process instance.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
- name: spiff_step
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
description: If set will return the tasks as they were during a specific step of execution.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
|
get:
|
||||||
|
operationId: spiffworkflow_backend.routes.tasks_controller.task_data_show
|
||||||
|
summary: Get task data for a single task in a spiff step.
|
||||||
|
tags:
|
||||||
|
- Process Instances
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
description: list of tasks
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Task"
|
||||||
|
|
||||||
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
|
/task-data/{modified_process_model_identifier}/{process_instance_id}/{task_id}:
|
||||||
parameters:
|
parameters:
|
||||||
- name: modified_process_model_identifier
|
- name: modified_process_model_identifier
|
||||||
|
@ -1579,6 +1612,12 @@ paths:
|
||||||
description: The unique id of the task.
|
description: The unique id of the task.
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
- name: spiff_step
|
||||||
|
in: query
|
||||||
|
required: false
|
||||||
|
description: If set will return the tasks as they were during a specific step of execution.
|
||||||
|
schema:
|
||||||
|
type: integer
|
||||||
put:
|
put:
|
||||||
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update
|
operationId: spiffworkflow_backend.routes.process_api_blueprint.task_data_update
|
||||||
summary: Update the task data for requested instance and task
|
summary: Update the task data for requested instance and task
|
||||||
|
|
|
@ -576,31 +576,15 @@ def process_instance_task_list(
|
||||||
|
|
||||||
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
|
steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
|
||||||
|
|
||||||
# FIXME: never evaluate task data in this call and instead create a new api getter
|
|
||||||
# that will return the task data for a given step only. We think processing this
|
|
||||||
# data is what is causing long load times on the processInstanceShowPage.
|
|
||||||
subprocess_state_overrides = {}
|
subprocess_state_overrides = {}
|
||||||
for step_detail in step_details:
|
for step_detail in step_details:
|
||||||
if step_detail.task_id in tasks:
|
if step_detail.task_id in tasks:
|
||||||
task_data = (
|
|
||||||
step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
|
||||||
)
|
|
||||||
if task_data is None:
|
|
||||||
task_data = {}
|
|
||||||
tasks[step_detail.task_id]["data"] = task_data
|
|
||||||
tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int(
|
tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int(
|
||||||
step_detail.task_state
|
step_detail.task_state
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
for subprocess_id, subprocess_info in subprocesses.items():
|
for subprocess_id, subprocess_info in subprocesses.items():
|
||||||
if step_detail.task_id in subprocess_info["tasks"]:
|
if step_detail.task_id in subprocess_info["tasks"]:
|
||||||
task_data = (
|
|
||||||
step_detail.task_json["task_data"]
|
|
||||||
| step_detail.task_json["python_env"]
|
|
||||||
)
|
|
||||||
if task_data is None:
|
|
||||||
task_data = {}
|
|
||||||
subprocess_info["tasks"][step_detail.task_id]["data"] = task_data
|
|
||||||
subprocess_info["tasks"][step_detail.task_id]["state"] = (
|
subprocess_info["tasks"][step_detail.task_id]["state"] = (
|
||||||
Task.task_state_name_to_int(step_detail.task_state)
|
Task.task_state_name_to_int(step_detail.task_state)
|
||||||
)
|
)
|
||||||
|
@ -657,8 +641,6 @@ def process_instance_task_list(
|
||||||
calling_subprocess_task_id=calling_subprocess_task_id,
|
calling_subprocess_task_id=calling_subprocess_task_id,
|
||||||
task_spiff_step=task_spiff_step,
|
task_spiff_step=task_spiff_step,
|
||||||
)
|
)
|
||||||
if get_task_data:
|
|
||||||
task.data = spiff_task.data
|
|
||||||
tasks.append(task)
|
tasks.append(task)
|
||||||
|
|
||||||
return make_response(jsonify(tasks), 200)
|
return make_response(jsonify(tasks), 200)
|
||||||
|
|
|
@ -36,6 +36,7 @@ from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
|
||||||
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus
|
||||||
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
from spiffworkflow_backend.models.process_model import ProcessModelInfo
|
||||||
|
from spiffworkflow_backend.models.spiff_step_details import SpiffStepDetailsModel
|
||||||
from spiffworkflow_backend.models.task import Task
|
from spiffworkflow_backend.models.task import Task
|
||||||
from spiffworkflow_backend.models.user import UserModel
|
from spiffworkflow_backend.models.user import UserModel
|
||||||
from spiffworkflow_backend.routes.process_api_blueprint import (
|
from spiffworkflow_backend.routes.process_api_blueprint import (
|
||||||
|
@ -171,6 +172,127 @@ def task_list_for_my_groups(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def task_data_show(
|
||||||
|
modified_process_model_identifier: str,
|
||||||
|
process_instance_id: int,
|
||||||
|
spiff_step: int = 0,
|
||||||
|
) -> flask.wrappers.Response:
|
||||||
|
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
|
||||||
|
step_detail = (
|
||||||
|
db.session.query(SpiffStepDetailsModel)
|
||||||
|
.filter(
|
||||||
|
SpiffStepDetailsModel.process_instance_id == process_instance.id,
|
||||||
|
SpiffStepDetailsModel.spiff_step == spiff_step,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if step_detail is None:
|
||||||
|
raise ApiError(
|
||||||
|
error_code="spiff_step_for_proces_instance_not_found",
|
||||||
|
message=(
|
||||||
|
"The given spiff step for the given process instance could not be"
|
||||||
|
" found."
|
||||||
|
),
|
||||||
|
status_code=400,
|
||||||
|
)
|
||||||
|
|
||||||
|
# step_details = step_detail_query.all()
|
||||||
|
# bpmn_json = json.loads(process_instance.bpmn_json or "{}")
|
||||||
|
# tasks = bpmn_json["tasks"]
|
||||||
|
# subprocesses = bpmn_json["subprocesses"]
|
||||||
|
#
|
||||||
|
# steps_by_id = {step_detail.task_id: step_detail for step_detail in step_details}
|
||||||
|
#
|
||||||
|
# # FIXME: never evaluate task data in this call and instead create a new api getter
|
||||||
|
# # that will return the task data for a given step only. We think processing this
|
||||||
|
# # data is what is causing long load times on the processInstanceShowPage.
|
||||||
|
# subprocess_state_overrides = {}
|
||||||
|
# if step_detail.task_id in tasks:
|
||||||
|
# task_data = (
|
||||||
|
# step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
||||||
|
# )
|
||||||
|
# if task_data is None:
|
||||||
|
# task_data = {}
|
||||||
|
# tasks[step_detail.task_id]["data"] = task_data
|
||||||
|
# tasks[step_detail.task_id]["state"] = Task.task_state_name_to_int(
|
||||||
|
# step_detail.task_state
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# for subprocess_id, subprocess_info in subprocesses.items():
|
||||||
|
# if step_detail.task_id in subprocess_info["tasks"]:
|
||||||
|
# task_data = (
|
||||||
|
# step_detail.task_json["task_data"]
|
||||||
|
# | step_detail.task_json["python_env"]
|
||||||
|
# )
|
||||||
|
# if task_data is None:
|
||||||
|
# task_data = {}
|
||||||
|
# subprocess_info["tasks"][step_detail.task_id]["data"] = task_data
|
||||||
|
# subprocess_info["tasks"][step_detail.task_id]["state"] = (
|
||||||
|
# Task.task_state_name_to_int(step_detail.task_state)
|
||||||
|
# )
|
||||||
|
# subprocess_state_overrides[subprocess_id] = TaskState.WAITING
|
||||||
|
#
|
||||||
|
# for subprocess_info in subprocesses.values():
|
||||||
|
# for spiff_task_id in subprocess_info["tasks"]:
|
||||||
|
# if spiff_task_id not in steps_by_id:
|
||||||
|
# subprocess_info["tasks"][spiff_task_id]["data"] = {}
|
||||||
|
# subprocess_info["tasks"][spiff_task_id]["state"] = (
|
||||||
|
# subprocess_state_overrides.get(spiff_task_id, TaskState.FUTURE)
|
||||||
|
# )
|
||||||
|
# for spiff_task_id in tasks:
|
||||||
|
# if spiff_task_id not in steps_by_id:
|
||||||
|
# tasks[spiff_task_id]["data"] = {}
|
||||||
|
# tasks[spiff_task_id]["state"] = subprocess_state_overrides.get(
|
||||||
|
# spiff_task_id, TaskState.FUTURE
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# process_instance.bpmn_json = json.dumps(bpmn_json)
|
||||||
|
#
|
||||||
|
# processor = ProcessInstanceProcessor(process_instance)
|
||||||
|
# spiff_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||||
|
# step_details[-1].bpmn_task_identifier, processor.bpmn_process_instance
|
||||||
|
# )
|
||||||
|
# if spiff_task is not None and spiff_task.state != TaskState.READY:
|
||||||
|
# spiff_task.complete()
|
||||||
|
#
|
||||||
|
# spiff_tasks = None
|
||||||
|
# if all_tasks:
|
||||||
|
# spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK)
|
||||||
|
# else:
|
||||||
|
# spiff_tasks = processor.get_all_user_tasks()
|
||||||
|
#
|
||||||
|
# (
|
||||||
|
# subprocesses_by_child_task_ids,
|
||||||
|
# task_typename_by_task_id,
|
||||||
|
# ) = processor.get_subprocesses_by_child_task_ids()
|
||||||
|
# processor.get_highest_level_calling_subprocesses_by_child_task_ids(
|
||||||
|
# subprocesses_by_child_task_ids, task_typename_by_task_id
|
||||||
|
# )
|
||||||
|
#
|
||||||
|
# tasks = []
|
||||||
|
# for spiff_task in spiff_tasks:
|
||||||
|
# task_spiff_step: Optional[int] = None
|
||||||
|
# if str(spiff_task.id) in steps_by_id:
|
||||||
|
# task_spiff_step = steps_by_id[str(spiff_task.id)].spiff_step
|
||||||
|
# calling_subprocess_task_id = subprocesses_by_child_task_ids.get(
|
||||||
|
# str(spiff_task.id), None
|
||||||
|
# )
|
||||||
|
processor = ProcessInstanceProcessor(process_instance)
|
||||||
|
spiff_task = processor.__class__.get_task_by_bpmn_identifier(
|
||||||
|
step_detail.bpmn_task_identifier, processor.bpmn_process_instance
|
||||||
|
)
|
||||||
|
task_data = step_detail.task_json["task_data"] | step_detail.task_json["python_env"]
|
||||||
|
task = ProcessInstanceService.spiff_task_to_api_task(
|
||||||
|
processor,
|
||||||
|
spiff_task,
|
||||||
|
task_spiff_step=spiff_step,
|
||||||
|
)
|
||||||
|
task.data = task_data
|
||||||
|
|
||||||
|
return make_response(jsonify(task), 200)
|
||||||
|
|
||||||
|
|
||||||
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
|
def _munge_form_ui_schema_based_on_hidden_fields_in_task_data(task: Task) -> None:
|
||||||
if task.form_ui_schema is None:
|
if task.form_ui_schema is None:
|
||||||
task.form_ui_schema = {}
|
task.form_ui_schema = {}
|
||||||
|
|
|
@ -65,7 +65,8 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
useState<ProcessInstance | null>(null);
|
useState<ProcessInstance | null>(null);
|
||||||
const [tasks, setTasks] = useState<ProcessInstanceTask[] | null>(null);
|
const [tasks, setTasks] = useState<ProcessInstanceTask[] | null>(null);
|
||||||
const [tasksCallHadError, setTasksCallHadError] = useState<boolean>(false);
|
const [tasksCallHadError, setTasksCallHadError] = useState<boolean>(false);
|
||||||
const [taskToDisplay, setTaskToDisplay] = useState<object | null>(null);
|
const [taskToDisplay, setTaskToDisplay] =
|
||||||
|
useState<ProcessInstanceTask | null>(null);
|
||||||
const [taskDataToDisplay, setTaskDataToDisplay] = useState<string>('');
|
const [taskDataToDisplay, setTaskDataToDisplay] = useState<string>('');
|
||||||
const [processDataToDisplay, setProcessDataToDisplay] =
|
const [processDataToDisplay, setProcessDataToDisplay] =
|
||||||
useState<ProcessData | null>(null);
|
useState<ProcessData | null>(null);
|
||||||
|
@ -557,11 +558,23 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
return <div />;
|
return <div />;
|
||||||
};
|
};
|
||||||
|
|
||||||
const initializeTaskDataToDisplay = (task: any) => {
|
const processTaskResult = (result: ProcessInstanceTask) => {
|
||||||
if (task == null) {
|
if (result == null) {
|
||||||
setTaskDataToDisplay('');
|
setTaskDataToDisplay('');
|
||||||
} else {
|
} else {
|
||||||
setTaskDataToDisplay(JSON.stringify(task.data, null, 2));
|
setTaskDataToDisplay(JSON.stringify(result.data, null, 2));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const initializeTaskDataToDisplay = (task: ProcessInstanceTask | null) => {
|
||||||
|
if (task == null || task.state !== 'COMPLETED') {
|
||||||
|
setTaskDataToDisplay('');
|
||||||
|
} else {
|
||||||
|
HttpService.makeCallToBackend({
|
||||||
|
path: `/task-data/${params.process_model_id}/${params.process_instance_id}/${task.task_spiff_step}`,
|
||||||
|
httpMethod: 'GET',
|
||||||
|
successCallback: processTaskResult,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -742,8 +755,13 @@ export default function ProcessInstanceShow({ variant }: OwnProps) {
|
||||||
const saveTaskDataResult = (_: any) => {
|
const saveTaskDataResult = (_: any) => {
|
||||||
setEditingTaskData(false);
|
setEditingTaskData(false);
|
||||||
const dataObject = taskDataStringToObject(taskDataToDisplay);
|
const dataObject = taskDataStringToObject(taskDataToDisplay);
|
||||||
const taskToDisplayCopy = { ...taskToDisplay, data: dataObject }; // spread operator
|
if (taskToDisplay) {
|
||||||
|
const taskToDisplayCopy: ProcessInstanceTask = {
|
||||||
|
...taskToDisplay,
|
||||||
|
data: dataObject,
|
||||||
|
}; // spread operator
|
||||||
setTaskToDisplay(taskToDisplayCopy);
|
setTaskToDisplay(taskToDisplayCopy);
|
||||||
|
}
|
||||||
refreshPage();
|
refreshPage();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue