Feature/drop id column on json data (#587)
* added migration to drop the id column from json_data and make hash the primary key * removed id column from task_draft_data as well --------- Co-authored-by: jasquat <jasquat@users.noreply.github.com>
This commit is contained in:
parent
f8112641cc
commit
c2ce27e961
|
@ -32,6 +32,7 @@ on:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
- spiffdemo
|
- spiffdemo
|
||||||
|
- feature/drop-id-column-on-json-data
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
create_frontend_docker_image:
|
create_frontend_docker_image:
|
||||||
|
|
|
@ -44,14 +44,12 @@ if [[ "${1:-}" == "clean" ]]; then
|
||||||
|
|
||||||
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "sqlite" ]]; then
|
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "sqlite" ]]; then
|
||||||
rm -f ./src/instance/*.sqlite3
|
rm -f ./src/instance/*.sqlite3
|
||||||
else
|
elif [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-mysql}" == "mysql" ]]; then
|
||||||
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
|
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_local_development"
|
||||||
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
|
mysql -h "$database_host" -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_unit_testing"
|
||||||
fi
|
elif [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
|
||||||
|
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
|
||||||
# TODO: check to see if the db already exists and we can connect to it. also actually clean it up.
|
# start postgres in background with one db
|
||||||
# start postgres in background with one db
|
|
||||||
if [[ "${SPIFFWORKFLOW_BACKEND_DATABASE_TYPE:-}" == "postgres" ]]; then
|
|
||||||
container_name="postgres-spiff"
|
container_name="postgres-spiff"
|
||||||
container_regex="^postgres-spiff$"
|
container_regex="^postgres-spiff$"
|
||||||
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then
|
if [[ -n "$(docker ps -qa -f name=$container_regex)" ]]; then
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: d8901960326e
|
||||||
|
Revises: 78f5c2c65bf3
|
||||||
|
Create Date: 2023-10-24 14:15:20.401370
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'd8901960326e'
|
||||||
|
down_revision = '78f5c2c65bf3'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('json_data', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('id')
|
||||||
|
batch_op.create_primary_key('hash_pk', ['hash'])
|
||||||
|
|
||||||
|
with op.batch_alter_table('task_draft_data', schema=None) as batch_op:
|
||||||
|
batch_op.drop_column('id')
|
||||||
|
batch_op.create_primary_key('process_instance_task_definition_pk', ['process_instance_id', 'task_definition_id_path'])
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('json_data', schema=None) as batch_op:
|
||||||
|
batch_op.drop_constraint('hash_pk', 'primary')
|
||||||
|
# NOTE: it does not actually add the autoincrement to the column which means if we ever need to put
|
||||||
|
# back the id column, we may have to create a new migration
|
||||||
|
batch_op.add_column(sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False))
|
||||||
|
batch_op.create_primary_key('id_pk', ['id'])
|
||||||
|
|
||||||
|
with op.batch_alter_table('task_draft_data', schema=None) as batch_op:
|
||||||
|
batch_op.drop_constraint('process_instance_task_definition_pk', 'primary')
|
||||||
|
batch_op.add_column(sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False))
|
||||||
|
batch_op.create_primary_key('id_pk', ['id'])
|
||||||
|
# ### end Alembic commands ###
|
|
@ -43,10 +43,10 @@ class JsonDataDict(TypedDict):
|
||||||
# grep -R '_data_hash: ' src/spiffworkflow_backend/models/
|
# grep -R '_data_hash: ' src/spiffworkflow_backend/models/
|
||||||
class JsonDataModel(SpiffworkflowBaseDBModel):
|
class JsonDataModel(SpiffworkflowBaseDBModel):
|
||||||
__tablename__ = "json_data"
|
__tablename__ = "json_data"
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
# id: int = db.Column(db.Integer, primary_key=True)
|
||||||
|
|
||||||
# this is a sha256 hash of spec and serializer_version
|
# this is a sha256 hash of spec and serializer_version
|
||||||
hash: str = db.Column(db.String(255), nullable=False, unique=True)
|
hash: str = db.Column(db.String(255), nullable=False, unique=True, primary_key=True)
|
||||||
data: dict = db.Column(db.JSON, nullable=False)
|
data: dict = db.Column(db.JSON, nullable=False)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -5,6 +5,7 @@ from typing import TypedDict
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from sqlalchemy import ForeignKey
|
from sqlalchemy import ForeignKey
|
||||||
|
from sqlalchemy import PrimaryKeyConstraint
|
||||||
from sqlalchemy import UniqueConstraint
|
from sqlalchemy import UniqueConstraint
|
||||||
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
from sqlalchemy.dialects.mysql import insert as mysql_insert
|
||||||
from sqlalchemy.dialects.postgresql import insert as postgres_insert
|
from sqlalchemy.dialects.postgresql import insert as postgres_insert
|
||||||
|
@ -31,9 +32,13 @@ class TaskDraftDataModel(SpiffworkflowBaseDBModel):
|
||||||
"task_definition_id_path",
|
"task_definition_id_path",
|
||||||
name="process_instance_task_definition_unique",
|
name="process_instance_task_definition_unique",
|
||||||
),
|
),
|
||||||
|
PrimaryKeyConstraint(
|
||||||
|
"process_instance_id",
|
||||||
|
"task_definition_id_path",
|
||||||
|
name="process_instance_task_definition_pk",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
id: int = db.Column(db.Integer, primary_key=True)
|
|
||||||
process_instance_id: int = db.Column(
|
process_instance_id: int = db.Column(
|
||||||
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore
|
||||||
)
|
)
|
||||||
|
|
|
@ -267,7 +267,7 @@ def task_data_update(
|
||||||
if "new_task_data" in body:
|
if "new_task_data" in body:
|
||||||
new_task_data_str: str = body["new_task_data"]
|
new_task_data_str: str = body["new_task_data"]
|
||||||
new_task_data_dict = json.loads(new_task_data_str)
|
new_task_data_dict = json.loads(new_task_data_str)
|
||||||
json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
|
json_data_dict = TaskService.update_json_data_on_db_model_and_return_dict_if_updated(
|
||||||
task_model, new_task_data_dict, "json_data_hash"
|
task_model, new_task_data_dict, "json_data_hash"
|
||||||
)
|
)
|
||||||
if json_data_dict is not None:
|
if json_data_dict is not None:
|
||||||
|
@ -769,7 +769,7 @@ def task_save_draft(
|
||||||
if task_draft_data is not None:
|
if task_draft_data is not None:
|
||||||
# using this method here since it will check the db if the json_data_hash
|
# using this method here since it will check the db if the json_data_hash
|
||||||
# has changed and then we can update the task_data_draft record if it has
|
# has changed and then we can update the task_data_draft record if it has
|
||||||
new_json_data_dict = TaskService.update_task_data_on_task_model_and_return_dict_if_updated(
|
new_json_data_dict = TaskService.update_json_data_on_db_model_and_return_dict_if_updated(
|
||||||
task_draft_data, body, "saved_form_data_hash"
|
task_draft_data, body, "saved_form_data_hash"
|
||||||
)
|
)
|
||||||
if new_json_data_dict is not None:
|
if new_json_data_dict is not None:
|
||||||
|
|
|
@ -13,6 +13,7 @@ from SpiffWorkflow.util.task import TaskState # type: ignore
|
||||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
from spiffworkflow_backend.models.bpmn_process import BpmnProcessModel
|
||||||
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
|
from spiffworkflow_backend.models.bpmn_process import BpmnProcessNotFoundError
|
||||||
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
from spiffworkflow_backend.models.bpmn_process_definition import BpmnProcessDefinitionModel
|
||||||
|
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
|
||||||
from spiffworkflow_backend.models.db import db
|
from spiffworkflow_backend.models.db import db
|
||||||
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
from spiffworkflow_backend.models.human_task import HumanTaskModel
|
||||||
from spiffworkflow_backend.models.json_data import JsonDataDict
|
from spiffworkflow_backend.models.json_data import JsonDataDict
|
||||||
|
@ -277,10 +278,10 @@ class TaskService:
|
||||||
python_env_data_dict = self.__class__._get_python_env_data_dict_from_spiff_task(spiff_task, self.serializer)
|
python_env_data_dict = self.__class__._get_python_env_data_dict_from_spiff_task(spiff_task, self.serializer)
|
||||||
task_model.properties_json = new_properties_json
|
task_model.properties_json = new_properties_json
|
||||||
task_model.state = TaskState.get_name(new_properties_json["state"])
|
task_model.state = TaskState.get_name(new_properties_json["state"])
|
||||||
json_data_dict = self.__class__.update_task_data_on_task_model_and_return_dict_if_updated(
|
json_data_dict = self.__class__.update_json_data_on_db_model_and_return_dict_if_updated(
|
||||||
task_model, spiff_task_data, "json_data_hash"
|
task_model, spiff_task_data, "json_data_hash"
|
||||||
)
|
)
|
||||||
python_env_dict = self.__class__.update_task_data_on_task_model_and_return_dict_if_updated(
|
python_env_dict = self.__class__.update_json_data_on_db_model_and_return_dict_if_updated(
|
||||||
task_model, python_env_data_dict, "python_env_data_hash"
|
task_model, python_env_data_dict, "python_env_data_hash"
|
||||||
)
|
)
|
||||||
if json_data_dict is not None:
|
if json_data_dict is not None:
|
||||||
|
@ -516,12 +517,12 @@ class TaskService:
|
||||||
return json_data_dict
|
return json_data_dict
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_task_data_on_task_model_and_return_dict_if_updated(
|
def update_json_data_on_db_model_and_return_dict_if_updated(
|
||||||
cls, task_model: TaskModel, task_data_dict: dict, task_model_data_column: str
|
cls, db_model: SpiffworkflowBaseDBModel, task_data_dict: dict, task_model_data_column: str
|
||||||
) -> JsonDataDict | None:
|
) -> JsonDataDict | None:
|
||||||
json_data_dict = JsonDataModel.json_data_dict_from_dict(task_data_dict)
|
json_data_dict = JsonDataModel.json_data_dict_from_dict(task_data_dict)
|
||||||
if getattr(task_model, task_model_data_column) != json_data_dict["hash"]:
|
if getattr(db_model, task_model_data_column) != json_data_dict["hash"]:
|
||||||
setattr(task_model, task_model_data_column, json_data_dict["hash"])
|
setattr(db_model, task_model_data_column, json_data_dict["hash"])
|
||||||
return json_data_dict
|
return json_data_dict
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue