mirror of
https://github.com/sartography/cr-connect-workflow.git
synced 2025-02-23 13:18:35 +00:00
only migrate the newest file_data model
clean up downgrade methods
This commit is contained in:
parent
e641a660fb
commit
7f8ea483c9
@ -7,11 +7,9 @@ Create Date: 2022-04-11 11:34:27.392601
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from crc import app
|
||||
from crc.models.data_store import DataStoreModel
|
||||
from crc.models.file import OldFileModel, FileModel, FileDataModel, FileModel
|
||||
from crc.models.file import OldFileModel, FileModel, FileDataModel
|
||||
|
||||
|
||||
|
||||
@ -22,12 +20,12 @@ branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def update_data_store(old_file_id, file_id, session):
|
||||
# update data_store with new fie_ids
|
||||
data_stores = session.query(DataStoreModel).filter(DataStoreModel.file_id == old_file_id).all()
|
||||
for data_store in data_stores:
|
||||
data_store.file_id = file_id
|
||||
session.commit()
|
||||
# def update_data_store(old_file_id, file_id, session):
|
||||
# # update data_store with new fie_ids
|
||||
# data_stores = session.query(DataStoreModel).filter(DataStoreModel.file_id == old_file_id).all()
|
||||
# for data_store in data_stores:
|
||||
# data_store.file_id = file_id
|
||||
# session.commit()
|
||||
|
||||
|
||||
def upgrade():
|
||||
@ -37,17 +35,18 @@ def upgrade():
|
||||
|
||||
# migrate data from old file table and file data table to new file table
|
||||
old_file_models = session.query(OldFileModel).all()
|
||||
largest_file_id = 0
|
||||
for old_file_model in old_file_models:
|
||||
if old_file_model.irb_doc_code is not None:
|
||||
largest_file_id = max(largest_file_id, old_file_model.id)
|
||||
file_data_models = session.query(FileDataModel).\
|
||||
filter(FileDataModel.file_model_id == old_file_model.id).\
|
||||
order_by(sa.desc(FileDataModel.date_created)).\
|
||||
all()
|
||||
count = 0
|
||||
for file_data_model in file_data_models:
|
||||
archived = count > 0
|
||||
if len(file_data_models) > 0:
|
||||
file_data_model = file_data_models[0]
|
||||
file_model = FileModel(
|
||||
# id=old_file_model.id,
|
||||
id=old_file_model.id,
|
||||
name=old_file_model.name,
|
||||
type=old_file_model.type.value,
|
||||
content_type=old_file_model.content_type,
|
||||
@ -60,19 +59,20 @@ def upgrade():
|
||||
date_modified=file_data_model.date_created,
|
||||
date_created=file_data_model.date_created,
|
||||
user_uid=file_data_model.user_uid,
|
||||
archived=archived
|
||||
archived=False
|
||||
)
|
||||
session.add(file_model)
|
||||
session.commit()
|
||||
count += 1
|
||||
# update data_store with new file_ids
|
||||
update_data_store(old_file_model.id, file_model.id, session)
|
||||
sequence = FileModel.__tablename__ + '_id_seq'
|
||||
new_start_id = largest_file_id + 1
|
||||
alter_sequence = f'ALTER SEQUENCE {sequence} RESTART WITH {new_start_id}'
|
||||
op.execute(alter_sequence)
|
||||
|
||||
# Wait until data is migrated before adding foreign key restraint
|
||||
# Otherwise, file_ids don't exist
|
||||
op.create_foreign_key('file_id_key', 'data_store', 'file', ['file_id'], ['id'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
...
|
||||
# op.add_column('data_store', sa.Column('file_id', sa.Integer(), nullable=True))
|
||||
# op.create_foreign_key('file_id_key', 'data_store', 'file', ['file_id'], ['id'])
|
||||
# Instead of deleting the new records here, we just drop the table in revision 92d554ab6e32
|
||||
op.drop_constraint('file_id_key', 'data_store', type_='foreignkey')
|
||||
|
@ -41,5 +41,6 @@ def upgrade():
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
op.drop_table('file')
|
||||
op.rename_table('old_file', 'file')
|
||||
|
Loading…
x
Reference in New Issue
Block a user