handle dup key error when saving draft data by updating the record on conflict w/ burnettk (#518)

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2023-09-28 15:01:57 -04:00 committed by GitHub
parent a211f3fd49
commit 10fa556525
4 changed files with 80 additions and 36 deletions

View File

@ -81,6 +81,13 @@ class JsonDataModel(SpiffworkflowBaseDBModel):
@classmethod
def create_and_insert_json_data_from_dict(cls, data: dict) -> str:
json_data_hash = sha256(json.dumps(data, sort_keys=True).encode("utf8")).hexdigest()
cls.insert_or_update_json_data_dict({"hash": json_data_hash, "data": data})
return json_data_hash
json_data_dict = cls.json_data_dict_from_dict(data)
cls.insert_or_update_json_data_dict(json_data_dict)
return json_data_dict["hash"]
@classmethod
def json_data_dict_from_dict(cls, data: dict) -> JsonDataDict:
task_data_json = json.dumps(data, sort_keys=True)
task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest()
json_data_dict: JsonDataDict = {"hash": task_data_hash, "data": data}
return json_data_dict

View File

@ -1,9 +1,14 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import TypedDict
from flask import current_app
from sqlalchemy import ForeignKey
from sqlalchemy import UniqueConstraint
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
from sqlalchemy.dialects.sqlite import insert as sqlite_insert
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
@ -11,6 +16,12 @@ from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
class TaskDraftDataDict(TypedDict):
process_instance_id: int
task_definition_id_path: str
saved_form_data_hash: str | None
@dataclass
class TaskDraftDataModel(SpiffworkflowBaseDBModel):
__tablename__ = "task_draft_data"
@ -36,3 +47,23 @@ class TaskDraftDataModel(SpiffworkflowBaseDBModel):
if self.saved_form_data_hash is not None:
return JsonDataModel.find_data_dict_by_hash(self.saved_form_data_hash)
return None
@classmethod
def insert_or_update_task_draft_data_dict(cls, task_draft_data_dict: TaskDraftDataDict) -> None:
on_duplicate_key_stmt = None
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "mysql":
insert_stmt = mysql_insert(TaskDraftDataModel).values([task_draft_data_dict])
on_duplicate_key_stmt = insert_stmt.on_duplicate_key_update(
saved_form_data_hash=insert_stmt.inserted.saved_form_data_hash
)
else:
insert_stmt = None
if current_app.config["SPIFFWORKFLOW_BACKEND_DATABASE_TYPE"] == "sqlite":
insert_stmt = sqlite_insert(TaskDraftDataModel).values([task_draft_data_dict])
else:
insert_stmt = postgres_insert(TaskDraftDataModel).values([task_draft_data_dict])
on_duplicate_key_stmt = insert_stmt.on_conflict_do_update(
index_elements=["process_instance_id", "task_definition_id_path"],
set_={"saved_form_data_hash": task_draft_data_dict["saved_form_data_hash"]},
)
db.session.execute(on_duplicate_key_stmt)

View File

@ -32,6 +32,7 @@ from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.group import GroupModel
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.human_task_user import HumanTaskUserModel
from spiffworkflow_backend.models.json_data import JsonDataDict # noqa: F401
from spiffworkflow_backend.models.json_data import JsonDataModel # noqa: F401
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema
@ -41,6 +42,8 @@ from spiffworkflow_backend.models.process_instance_event import ProcessInstanceE
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.task import Task
from spiffworkflow_backend.models.task import TaskModel
from spiffworkflow_backend.models.task_draft_data import TaskDraftDataDict
from spiffworkflow_backend.models.task_draft_data import TaskDraftDataModel
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.routes.process_api_blueprint import _find_principal_or_raise
from spiffworkflow_backend.routes.process_api_blueprint import _find_process_instance_by_id_or_raise
@ -700,33 +703,38 @@ def task_save_draft(
return make_response(jsonify({"ok": True}), 200)
task_model = _get_task_model_from_guid_or_raise(task_guid, process_instance_id)
task_draft_data = TaskService.task_draft_data_from_task_model(task_model, create_if_not_exists=True)
full_bpmn_process_id_path = TaskService.full_bpmn_process_path(task_model.bpmn_process, "id")
task_definition_id_path = f"{':'.join(map(str,full_bpmn_process_id_path))}:{task_model.task_definition_id}"
task_draft_data_dict: TaskDraftDataDict = {
"process_instance_id": process_instance.id,
"task_definition_id_path": task_definition_id_path,
"saved_form_data_hash": None,
}
if task_draft_data is not None:
json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
task_draft_data, body, "saved_form_data_hash"
)
if json_data_dict is not None:
JsonDataModel.insert_or_update_json_data_dict(json_data_dict)
db.session.add(task_draft_data)
try:
db.session.commit()
except OperationalError as exception:
db.session.rollback()
if "Deadlock" in str(exception):
task_draft_data = TaskService.task_draft_data_from_task_model(task_model)
# if we do not find a task_draft_data record, that means it was deleted when the form was submitted
# and we therefore have no need to save draft data
if task_draft_data is not None:
json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
task_draft_data, body, "saved_form_data_hash"
)
if json_data_dict is not None:
JsonDataModel.insert_or_update_json_data_dict(json_data_dict)
db.session.add(task_draft_data)
db.session.commit()
else:
raise exception
json_data_dict = JsonDataModel.json_data_dict_from_dict(body)
JsonDataModel.insert_or_update_json_data_dict(json_data_dict)
task_draft_data_dict["saved_form_data_hash"] = json_data_dict["hash"]
TaskDraftDataModel.insert_or_update_task_draft_data_dict(task_draft_data_dict)
try:
db.session.commit()
except OperationalError as exception:
db.session.rollback()
if "Deadlock" in str(exception):
task_draft_data = TaskService.task_draft_data_from_task_model(task_model)
# if we do not find a task_draft_data record, that means it was deleted when the form was submitted
# and we therefore have no need to save draft data
if task_draft_data is not None:
# using this method here since it will check the db if the json_data_hash
# has changed and then we can update the task_data_draft record if it has
new_json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
task_draft_data, body, "saved_form_data_hash"
)
if new_json_data_dict is not None:
JsonDataModel.insert_or_update_json_data_dict(new_json_data_dict)
db.session.add(task_draft_data)
db.session.commit()
else:
raise exception
return Response(
json.dumps(

View File

@ -512,13 +512,11 @@ class TaskService:
def update_task_data_on_task_model_and_return_dict_if_updated(
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
) -> JsonDataDict | None:
task_data_json = json.dumps(task_data_dict, sort_keys=True)
task_data_hash: str = sha256(task_data_json.encode("utf8")).hexdigest()
json_data_dict: JsonDataDict | None = None
if getattr(task_model, task_model_data_column) != task_data_hash:
json_data_dict = {"hash": task_data_hash, "data": task_data_dict}
setattr(task_model, task_model_data_column, task_data_hash)
return json_data_dict
json_data_dict = JsonDataModel.json_data_dict_from_dict(task_data_dict)
if getattr(task_model, task_model_data_column) != json_data_dict["hash"]:
setattr(task_model, task_model_data_column, json_data_dict["hash"])
return json_data_dict
return None
@classmethod
def bpmn_process_and_descendants(cls, bpmn_processes: list[BpmnProcessModel]) -> list[BpmnProcessModel]: