Feature/drop id column on json data (#587)

* added migration to drop the id column from json_data and make hash the primary key

* removed id column from task_draft_data as well

---------

Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
jasquat 2023-10-27 11:02:52 -04:00 committed by GitHub
parent f8112641cc
commit c2ce27e961
7 changed files with 66 additions and 17 deletions

View File

@ -32,6 +32,7 @@ on:
branches:
- main
- spiffdemo
- feature/drop-id-column-on-json-data
jobs:
create_frontend_docker_image:

View File

@ -44,14 +44,12 @@ if [[ "${1:-}" == "clean" ]]; then
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "sqlite" ]]; then
rm -f ./src/instance/*.sqlite3
else
elif [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
fi
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
# start postgres in background with one db
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
elif [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
# start postgres in background with one db
container_name="postgres-spiff"
container_regex="^postgres-spiff$"
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then

View File

@ -0,0 +1,44 @@
"""empty message
Revision ID: d8901960326e
Revises: 78f5c2c65bf3
Create Date: 2023-10-24 14:15:20.401370
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'd8901960326e'
down_revision = '78f5c2c65bf3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('json_data', schema=None) as batch_op:
batch_op.drop_column('id')
batch_op.create_primary_key('hash_pk', ['hash'])
with op.batch_alter_table('task_draft_data', schema=None) as batch_op:
batch_op.drop_column('id')
batch_op.create_primary_key('process_instance_task_definition_pk', ['process_instance_id', 'task_definition_id_path'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('json_data', schema=None) as batch_op:
batch_op.drop_constraint('hash_pk', 'primary')
# NOTE: it does not actually add the autoincrement to the column which means if we ever need to put
# back the id column, we may have to create a new migration
batch_op.add_column(sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False))
batch_op.create_primary_key('id_pk', ['id'])
with op.batch_alter_table('task_draft_data', schema=None) as batch_op:
batch_op.drop_constraint('process_instance_task_definition_pk', 'primary')
batch_op.add_column(sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False))
batch_op.create_primary_key('id_pk', ['id'])
# ### end Alembic commands ###

View File

@ -43,10 +43,10 @@ class JsonDataDict(TypedDict):
# grep -R '_data_hash: ' src/spiffworkflow_backend/models/
class JsonDataModel(SpiffworkflowBaseDBModel):
__tablename__ = "json_data"
id: int = db.Column(db.Integer, primary_key=True)
# id: int = db.Column(db.Integer, primary_key=True)
# this is a sha256 hash of spec and serializer_version
hash: str = db.Column(db.String(255), nullable=False, unique=True)
hash: str = db.Column(db.String(255), nullable=False, unique=True, primary_key=True)
data: dict = db.Column(db.JSON, nullable=False)
@classmethod

View File

@ -5,6 +5,7 @@ from typing import TypedDict
from flask import current_app
from sqlalchemy import ForeignKey
from sqlalchemy import PrimaryKeyConstraint
from sqlalchemy import UniqueConstraint
from sqlalchemy.dialects.mysql import insert as mysql_insert
from sqlalchemy.dialects.postgresql import insert as postgres_insert
@ -31,9 +32,13 @@ class TaskDraftDataModel(SpiffworkflowBaseDBModel):
"task_definition_id_path",
name="process_instance_task_definition_unique",
),
PrimaryKeyConstraint(
"process_instance_id",
"task_definition_id_path",
name="process_instance_task_definition_pk",
),
)
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
)

View File

@ -267,7 +267,7 @@ def task_data_update(
if "new_task_data" in body:
new_task_data_str: str = body["new_task_data"]
new_task_data_dict = json.loads(new_task_data_str)
json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
json_data_dict = TaskService.update_json_data_on_db_model_and_return_dict_if_updated(
task_model, new_task_data_dict, "json_data_hash"
)
if json_data_dict is not None:
@ -769,7 +769,7 @@ def task_save_draft(
if task_draft_data is not None:
# using this method here since it will check the db if the json_data_hash
# has changed and then we can update the task_data_draft record if it has
new_json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
new_json_data_dict = TaskService.update_json_data_on_db_model_and_return_dict_if_updated(
task_draft_data, body, "saved_form_data_hash"
)
if new_json_data_dict is not None:

View File

@ -13,6 +13,7 @@ from SpiffWorkflow.util.task import TaskState # type: ignore
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.json_data import JsonDataDict
@ -277,10 +278,10 @@ class TaskService:
python_env_data_dict = self.__class__._get_python_env_data_dict_from_spiff_task(spiff_task, self.serializer)
task_model.properties_json = new_properties_json
task_model.state = TaskState.get_name(new_properties_json["state"])
json_data_dict = self.__class__.update_task_data_on_task_model_and_return_dict_if_updated(
json_data_dict = self.__class__.update_json_data_on_db_model_and_return_dict_if_updated(
task_model, spiff_task_data, "json_data_hash"
)
python_env_dict = self.__class__.update_task_data_on_task_model_and_return_dict_if_updated(
python_env_dict = self.__class__.update_json_data_on_db_model_and_return_dict_if_updated(
task_model, python_env_data_dict, "python_env_data_hash"
)
if json_data_dict is not None:
@ -516,12 +517,12 @@ class TaskService:
return json_data_dict
@classmethod
def update_task_data_on_task_model_and_return_dict_if_updated(
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
def update_json_data_on_db_model_and_return_dict_if_updated(
cls, db_model: SpiffworkflowBaseDBModel, task_data_dict: dict, task_model_data_column: str
) -> JsonDataDict | None:
json_data_dict = JsonDataModel.json_data_dict_from_dict(task_data_dict)
if getattr(task_model, task_model_data_column) != json_data_dict["hash"]:
setattr(task_model, task_model_data_column, json_data_dict["hash"])
if getattr(db_model, task_model_data_column) != json_data_dict["hash"]:
setattr(db_model, task_model_data_column, json_data_dict["hash"])
return json_data_dict
return None