added backfill for task_guid in human_task table and added a check to make sure task model is not none w/ burnettk

This commit is contained in:
jasquat 2024-02-08 10:19:57 -05:00
parent aaec9f6ffc
commit 81b6431c58
No known key found for this signature in database
3 changed files with 18 additions and 22 deletions

View File

@ -5,6 +5,7 @@ from spiffworkflow_backend import create_app
from spiffworkflow_backend.data_migrations.version_1_3 import VersionOneThree
from spiffworkflow_backend.data_migrations.version_2 import Version2
from spiffworkflow_backend.models.db import db
from spiffworkflow_backend.models.human_task import HumanTaskModel
from spiffworkflow_backend.models.process_instance import ProcessInstanceModel
from sqlalchemy import update
@ -39,6 +40,15 @@ def put_serializer_version_onto_numeric_track() -> None:
db.session.commit()
@benchmark_log_func
def backfill_task_guid_for_human_tasks() -> None:
update_query = (
update(HumanTaskModel).where(HumanTaskModel.task_guid == None).values(task_guid=HumanTaskModel.task_id) # noqa: E711
)
db.session.execute(update_query)
db.session.commit()
def all_potentially_relevant_process_instances() -> list[ProcessInstanceModel]:
return ProcessInstanceModel.query.filter(
ProcessInstanceModel.spiff_serializer_version < Version2.version(),
@ -65,6 +75,7 @@ def main() -> None:
current_app.logger.debug(f"data_migrations/run_all::create_app took {end_time - start_time} seconds")
start_time = time.time()
put_serializer_version_onto_numeric_track()
backfill_task_guid_for_human_tasks()
process_instances = all_potentially_relevant_process_instances()
potentially_relevant_instance_count = len(process_instances)
current_app.logger.debug(f"Found potentially relevant process_instances: {potentially_relevant_instance_count}")

View File

@ -1,21 +0,0 @@
import time
from spiffworkflow_backend import create_app
from spiffworkflow_backend.data_migrations.version_1_3 import VersionOneThree
def main() -> None:
app = create_app()
start_time = time.time()
with app.app_context():
VersionOneThree().run()
end_time = time.time()
app.logger.debug(
f"done running data migration from ./bin/data_migrations/version_1_3.py. took {end_time - start_time} seconds"
)
if __name__ == "__main__":
main()

View File

@ -925,9 +925,15 @@ def _task_submit_shared(
human_task=human_task,
)
# currently task_model has the potential to be None. This should be removable once
# we backfill the human_task table for task_guid and make that column not nullable
task_model: TaskModel | None = human_task.task_model
if task_model is None:
task_model = TaskModel.query.filter_by(guid=human_task.task_id).first()
# delete draft data when we submit a task to ensure cycling back to the task contains the
# most up-to-date data
task_draft_data = TaskService.task_draft_data_from_task_model(human_task.task_model)
task_draft_data = TaskService.task_draft_data_from_task_model(task_model)
if task_draft_data is not None:
db.session.delete(task_draft_data)
db.session.commit()