store the process instance data id on the process instance and not the other way around w/ burnettk

This commit is contained in:
jasquat 2023-03-01 11:39:03 -05:00
parent ab50e7ac03
commit e2891425fc
7 changed files with 72 additions and 23 deletions

View File

@ -0,0 +1,50 @@
"""empty message
Revision ID: 553ba18e4076
Revises: e494a3955ce5
Create Date: 2023-03-01 11:31:20.619328
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '553ba18e4076'
down_revision = 'e494a3955ce5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('process_instance', sa.Column('process_instance_data_id', sa.Integer(), nullable=True))
op.create_foreign_key(None, 'process_instance', 'process_instance_data', ['process_instance_data_id'], ['id'])
op.alter_column('process_instance_data', 'runtime_json',
existing_type=mysql.JSON(),
nullable=False)
op.drop_constraint('process_instance_data_ibfk_1', 'process_instance_data', type_='foreignkey')
op.drop_column('process_instance_data', 'process_instance_id')
op.alter_column('serialized_bpmn_definition', 'static_json',
existing_type=mysql.JSON(),
nullable=False)
op.drop_index('ix_serialized_bpmn_definition_hash', table_name='serialized_bpmn_definition')
op.create_index(op.f('ix_serialized_bpmn_definition_hash'), 'serialized_bpmn_definition', ['hash'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_serialized_bpmn_definition_hash'), table_name='serialized_bpmn_definition')
op.create_index('ix_serialized_bpmn_definition_hash', 'serialized_bpmn_definition', ['hash'], unique=False)
op.alter_column('serialized_bpmn_definition', 'static_json',
existing_type=mysql.JSON(),
nullable=True)
op.add_column('process_instance_data', sa.Column('process_instance_id', mysql.INTEGER(), autoincrement=False, nullable=False))
op.create_foreign_key('process_instance_data_ibfk_1', 'process_instance_data', 'process_instance', ['process_instance_id'], ['id'])
op.alter_column('process_instance_data', 'runtime_json',
existing_type=mysql.JSON(),
nullable=True)
op.drop_constraint(None, 'process_instance', type_='foreignkey')
op.drop_column('process_instance', 'process_instance_data_id')
# ### end Alembic commands ###

View File

@ -1,6 +1,7 @@
"""Process_instance."""
from __future__ import annotations
from typing import Optional
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.serialized_bpmn_definition import SerializedBpmnDefinitionModel # noqa: F401
from typing import Any
@ -65,7 +66,7 @@ class ProcessInstanceModel(SpiffworkflowBaseDBModel):
serialized_bpmn_definition_id: Optional[int] = db.Column(ForeignKey(SerializedBpmnDefinitionModel.id), nullable=True) # type: ignore
serialized_bpmn_definition = relationship("SerializedBpmnDefinitionModel")
# added mostly for the cascade delete
process_instance_data_id: Optional[int] = db.Column(ForeignKey(ProcessInstanceDataModel.id), nullable=True) # type: ignore
process_instance_data = relationship("ProcessInstanceDataModel", cascade="delete") # type: ignore
active_human_tasks = relationship(

View File

@ -1,9 +1,6 @@
"""Process_instance."""
from __future__ import annotations
from sqlalchemy import ForeignKey
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from sqlalchemy.orm import deferred
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -23,7 +20,5 @@ class ProcessInstanceDataModel(SpiffworkflowBaseDBModel):
__tablename__ = "process_instance_data"
id: int = db.Column(db.Integer, primary_key=True)
process_instance_id: int = db.Column(
ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore
)
runtime_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
# this is not deferred because there is no reason to query this model if you do not want the runtime_json
runtime_json: str = db.Column(db.JSON, nullable=False)

View File

@ -1,8 +1,6 @@
"""Process_instance."""
from __future__ import annotations
from sqlalchemy.orm import deferred
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.db import SpiffworkflowBaseDBModel
@ -46,5 +44,5 @@ class SerializedBpmnDefinitionModel(SpiffworkflowBaseDBModel):
__tablename__ = "serialized_bpmn_definition"
id: int = db.Column(db.Integer, primary_key=True)
hash: str = db.Column(db.String(255), nullable=False, index=True)
static_json: str | None = deferred(db.Column(db.JSON)) # type: ignore
hash: str = db.Column(db.String(255), nullable=False, index=True, unique=True)
static_json: str = db.Column(db.JSON, nullable=False)

View File

@ -25,7 +25,6 @@ from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSc
from spiffworkflow_backend.models.process_instance import (
ProcessInstanceTaskDataCannotBeUpdatedError,
)
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.spec_reference import SpecReferenceCache
from spiffworkflow_backend.models.spec_reference import SpecReferenceSchema
@ -191,11 +190,11 @@ def task_data_update(
if process_instance:
if process_instance.status != "suspended":
raise ProcessInstanceTaskDataCannotBeUpdatedError(
"The process instance needs to be suspended to udpate the task-data."
"The process instance needs to be suspended to update the task-data."
f" It is currently: {process_instance.status}"
)
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=process_instance.id).first()
process_instance_data = process_instance.process_instance_data
if process_instance_data is None:
raise ApiError(
error_code="process_instance_data_not_found",

View File

@ -1,6 +1,5 @@
"""APIs for dealing with process groups, process models, and process instances."""
import json
from spiffworkflow_backend.models.process_instance_data import ProcessInstanceDataModel
from typing import Any
from typing import Dict
from typing import Optional
@ -552,7 +551,7 @@ def process_instance_task_list(
step_details = step_detail_query.all()
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=process_instance.id).first()
process_instance_data = process_instance.process_instance_data
process_instance_data_json = "{}" if process_instance_data is None else process_instance_data.runtime_json
process_instance_data_dict = json.loads(process_instance_data_json)
tasks = process_instance_data_dict["tasks"]

View File

@ -524,7 +524,7 @@ class ProcessInstanceProcessor:
if process_instance_model.serialized_bpmn_definition_id is None:
return {}
serialized_bpmn_definition = process_instance_model.serialized_bpmn_definition
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=process_instance_model.id).first()
process_instance_data = process_instance_model.process_instance_data
loaded_json: dict = json.loads(serialized_bpmn_definition.static_json or '{}')
loaded_json.update(json.loads(process_instance_data.runtime_json))
return loaded_json
@ -847,7 +847,11 @@ class ProcessInstanceProcessor:
)
return subprocesses_by_child_task_ids
def add_bpmn_json_records(self) -> None:
def _add_bpmn_json_records(self) -> None:
"""Adds serialized_bpmn_definition and process_instance_data records to the db session.
Expects the save method to commit it.
"""
bpmn_dict = json.loads(self.serialize())
bpmn_dict_keys = ('spec', 'subprocess_specs', 'serializer_version')
bpmn_spec_dict = {}
@ -866,16 +870,19 @@ class ProcessInstanceProcessor:
db.session.add(serialized_bpmn_definition)
self.process_instance_model.serialized_bpmn_definition = serialized_bpmn_definition
process_instance_data = ProcessInstanceDataModel.query.filter_by(process_instance_id=self.process_instance_model.id).first()
if process_instance_data is None:
process_instance_data = ProcessInstanceDataModel(process_instance_id=self.process_instance_model.id)
process_instance_data = None
if self.process_instance_model.process_instance_data_id is None:
process_instance_data = ProcessInstanceDataModel()
else:
process_instance_data = self.process_instance_model.process_instance_data
process_instance_data.runtime_json = json.dumps(process_instance_data_dict)
db.session.add(process_instance_data)
self.process_instance_model.process_instance_data = process_instance_data
def save(self) -> None:
"""Saves the current state of this processor to the database."""
self.add_bpmn_json_records()
self._add_bpmn_json_records()
complete_states = [TaskState.CANCELLED, TaskState.COMPLETED]
user_tasks = list(self.get_all_user_tasks())