From c4bb1e188dbdf45045e68d32e22e7f219526cd52 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Thu, 12 Jan 2023 18:16:06 -0500 Subject: [PATCH] POC checking cumulative task data len --- .../services/process_instance_processor.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 3631f0b79..5dddcbbd8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -564,10 +564,25 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } + def check_task_data_size(self, tasks_dict: dict) -> None: + """CheckTaskDataSize.""" + task_data = [v["data"] for v in tasks_dict.values() if len(v["data"]) > 0] + task_data_len = len(json.dumps(task_data)) + task_data_limit = 1024 + + if task_data_len > task_data_limit: + raise ( + ApiError( + error_code="task_data_size_exceeded", + message=f"Maximum task data size of {task_data_limit} exceeded." + ) + ) + def spiff_step_details_mapping(self) -> dict: """SaveSpiffStepDetails.""" bpmn_json = self.serialize() wf_json = json.loads(bpmn_json) + self.check_task_data_size(wf_json["tasks"]) task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]} return {