From c4bb1e188dbdf45045e68d32e22e7f219526cd52 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Thu, 12 Jan 2023 18:16:06 -0500 Subject: [PATCH 1/5] POC checking cumulative task data len --- .../services/process_instance_processor.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 3631f0b79..5dddcbbd8 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -564,10 +564,25 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } + def check_task_data_size(self, tasks_dict: dict) -> None: + """CheckTaskDataSize.""" + task_data = [v["data"] for v in tasks_dict.values() if len(v["data"]) > 0] + task_data_len = len(json.dumps(task_data)) + task_data_limit = 1024 + + if task_data_len > task_data_limit: + raise ( + ApiError( + error_code="task_data_size_exceeded", + message=f"Maximum task data size of {task_data_limit} exceeded." + ) + ) + def spiff_step_details_mapping(self) -> dict: """SaveSpiffStepDetails.""" bpmn_json = self.serialize() wf_json = json.loads(bpmn_json) + self.check_task_data_size(wf_json["tasks"]) task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]} return { From ab2d0cdf6d34f7497bad8a2c3bac05ad054429d9 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Fri, 13 Jan 2023 09:05:35 -0500 Subject: [PATCH 2/5] Better impl --- .../services/process_instance_processor.py | 40 ++++++++++++------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 5dddcbbd8..5de0d4ac7 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -564,25 +564,10 @@ class ProcessInstanceProcessor: "lane_assignment_id": lane_assignment_id, } - def check_task_data_size(self, tasks_dict: dict) -> None: - """CheckTaskDataSize.""" - task_data = [v["data"] for v in tasks_dict.values() if len(v["data"]) > 0] - task_data_len = len(json.dumps(task_data)) - task_data_limit = 1024 - - if task_data_len > task_data_limit: - raise ( - ApiError( - error_code="task_data_size_exceeded", - message=f"Maximum task data size of {task_data_limit} exceeded." - ) - ) - def spiff_step_details_mapping(self) -> dict: """SaveSpiffStepDetails.""" bpmn_json = self.serialize() wf_json = json.loads(bpmn_json) - self.check_task_data_size(wf_json["tasks"]) task_json = {"tasks": wf_json["tasks"], "subprocesses": wf_json["subprocesses"]} return { @@ -1283,8 +1268,33 @@ class ProcessInstanceProcessor: except WorkflowTaskExecException as we: raise ApiError.from_workflow_exception("task_error", str(we), we) from we + def user_defined_task_data(self, task_data: dict) -> dict: + """UserDefinedTaskData.""" + return {k: v for k, v in task_data.items() if k != "current_user"} + + def check_task_data_size(self) -> None: + """CheckTaskDataSize.""" + tasks_to_check = self.bpmn_process_instance.get_tasks(TaskState.FINISHED_MASK) + task_data = [self.user_defined_task_data(task.data) for task in tasks_to_check] + task_data_to_check = list(filter(len, task_data)) + + if len(task_data_to_check) == 0: + return + + task_data_len = len(json.dumps(task_data_to_check)) + task_data_limit = 1024 + + if task_data_len > task_data_limit: + raise ( + ApiError( + error_code="task_data_size_exceeded", + message=f"Maximum task data size of {task_data_limit} exceeded." + ) + ) + def serialize(self) -> str: """Serialize.""" + self.check_task_data_size() return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore def next_user_tasks(self) -> list[SpiffTask]: From e0b1d6877b15da1a35f899be4edfbf3ce3fbc893 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Fri, 13 Jan 2023 09:12:41 -0500 Subject: [PATCH 3/5] Bump the limit to 1mb --- .../services/process_instance_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 5de0d4ac7..f8e1f3d37 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1282,7 +1282,7 @@ class ProcessInstanceProcessor: return task_data_len = len(json.dumps(task_data_to_check)) - task_data_limit = 1024 + task_data_limit = 1024**2 if task_data_len > task_data_limit: raise ( From 8cd275a54f28ce6cbc6f0ddfa77fd95c21e88c58 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Fri, 13 Jan 2023 09:45:05 -0500 Subject: [PATCH 4/5] Getting ./bin/pyl to pass --- .../services/process_instance_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index f8e1f3d37..135fbf144 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1288,7 +1288,7 @@ class ProcessInstanceProcessor: raise ( ApiError( error_code="task_data_size_exceeded", - message=f"Maximum task data size of {task_data_limit} exceeded." + message=f"Maximum task data size of {task_data_limit} exceeded.", ) ) From 65fa9e483424e55ebe4d983ea3a62014b33d7e0e Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Fri, 13 Jan 2023 11:11:12 -0500 Subject: [PATCH 5/5] Fixed failing test --- .../services/process_instance_processor.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py index 135fbf144..bf8c42b50 100644 --- a/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py +++ b/spiffworkflow-backend/src/spiffworkflow_backend/services/process_instance_processor.py @@ -1278,10 +1278,11 @@ class ProcessInstanceProcessor: task_data = [self.user_defined_task_data(task.data) for task in tasks_to_check] task_data_to_check = list(filter(len, task_data)) - if len(task_data_to_check) == 0: - return + try: + task_data_len = len(json.dumps(task_data_to_check)) + except Exception: + task_data_len = 0 - task_data_len = len(json.dumps(task_data_to_check)) task_data_limit = 1024**2 if task_data_len > task_data_limit: