Don't put foreign key constraints on primary keys.
This commit is contained in:
parent
24ced2e280
commit
0685b282e8
|
@ -54,11 +54,11 @@ class FileType(enum.Enum):
|
|||
|
||||
class FileDataModel(db.Model):
|
||||
__tablename__ = 'file_data'
|
||||
id = db.Column(db.Integer, db.ForeignKey('file.id'), primary_key=True)
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
data = db.Column(db.LargeBinary)
|
||||
file_model_id = db.Column(db.Integer, db.ForeignKey('file.id'))
|
||||
file_model = db.relationship("FileModel")
|
||||
|
||||
|
||||
class FileModel(db.Model):
|
||||
__tablename__ = 'file'
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
|
|
|
@ -6,49 +6,52 @@ from crc.models import StudyModel, WorkflowSpecModel, FileType, FileModel, FileD
|
|||
|
||||
|
||||
class ExampleDataLoader:
|
||||
studies = [
|
||||
StudyModel(
|
||||
id=1,
|
||||
title='The impact of fried pickles on beer consumption in bipedal software developers.',
|
||||
last_updated=datetime.datetime.now(),
|
||||
protocol_builder_status='in_process',
|
||||
primary_investigator_id='dhf8r',
|
||||
sponsor='Sartography Pharmaceuticals',
|
||||
ind_number='1234'
|
||||
),
|
||||
StudyModel(
|
||||
id=2,
|
||||
title='Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
|
||||
last_updated=datetime.datetime.now(),
|
||||
protocol_builder_status='in_process',
|
||||
primary_investigator_id='dhf8r',
|
||||
sponsor='Makerspace & Co.',
|
||||
ind_number='5678'
|
||||
),
|
||||
]
|
||||
def make_data(self):
|
||||
studies = [
|
||||
StudyModel(
|
||||
id=1,
|
||||
title='The impact of fried pickles on beer consumption in bipedal software developers.',
|
||||
last_updated=datetime.datetime.now(),
|
||||
protocol_builder_status='in_process',
|
||||
primary_investigator_id='dhf8r',
|
||||
sponsor='Sartography Pharmaceuticals',
|
||||
ind_number='1234'
|
||||
),
|
||||
StudyModel(
|
||||
id=2,
|
||||
title='Requirement of hippocampal neurogenesis for the behavioral effects of soft pretzels',
|
||||
last_updated=datetime.datetime.now(),
|
||||
protocol_builder_status='in_process',
|
||||
primary_investigator_id='dhf8r',
|
||||
sponsor='Makerspace & Co.',
|
||||
ind_number='5678'
|
||||
),
|
||||
]
|
||||
|
||||
workflow_specs = [WorkflowSpecModel(
|
||||
id="random_fact",
|
||||
display_name="Random Fact Generator",
|
||||
description='Displays a random fact about a topic of your choosing.',
|
||||
)]
|
||||
workflow_specs = [WorkflowSpecModel(
|
||||
id="random_fact",
|
||||
display_name="Random Fact Generator",
|
||||
description='Displays a random fact about a topic of your choosing.',
|
||||
)]
|
||||
|
||||
workflow_spec_files = [WorkflowSpecModel(
|
||||
id="random_fact",
|
||||
display_name="Random Fact Generator",
|
||||
description='Displays a random fact about a topic of your choosing.',
|
||||
)]
|
||||
workflow_spec_files = [WorkflowSpecModel(
|
||||
id="random_fact",
|
||||
display_name="Random Fact Generator",
|
||||
description='Displays a random fact about a topic of your choosing.',
|
||||
)]
|
||||
|
||||
workflow_spec_files = [FileModel(name="random_fact.bpmn",
|
||||
type=FileType.bpmn,
|
||||
version="1",
|
||||
last_updated=datetime.datetime.now(),
|
||||
primary=True,
|
||||
workflow_spec_id=workflow_specs[0].id)]
|
||||
workflow_spec_files = [FileModel(name="random_fact.bpmn",
|
||||
type=FileType.bpmn,
|
||||
version="1",
|
||||
last_updated=datetime.datetime.now(),
|
||||
primary=True,
|
||||
workflow_spec_id=workflow_specs[0].id)]
|
||||
|
||||
filename = os.path.join(app.root_path, 'static', 'bpmn', 'random_fact', 'random_fact.bpmn')
|
||||
file = open(filename, "rb")
|
||||
workflow_data = [FileDataModel(data=file.read(), file_model=workflow_spec_files[0])]
|
||||
filename = os.path.join(app.root_path, 'static', 'bpmn', 'random_fact', 'random_fact.bpmn')
|
||||
file = open(filename, "rb")
|
||||
workflow_data = [FileDataModel(data=file.read(), file_model=workflow_spec_files[0])]
|
||||
all_data = studies+workflow_specs+workflow_spec_files+workflow_data
|
||||
return all_data
|
||||
|
||||
@staticmethod
|
||||
def clean_db():
|
||||
|
@ -58,9 +61,6 @@ class ExampleDataLoader:
|
|||
db.session.flush()
|
||||
|
||||
def load_all(self):
|
||||
db.session.bulk_save_objects(ExampleDataLoader.studies)
|
||||
db.session.bulk_save_objects(ExampleDataLoader.workflow_specs)
|
||||
db.session.bulk_save_objects(ExampleDataLoader.workflow_spec_files)
|
||||
db.session.bulk_save_objects(ExampleDataLoader.workflow_data)
|
||||
|
||||
db.session.add_all(self.make_data())
|
||||
db.session.commit()
|
||||
db.session.flush()
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
""" Don't use a primary key as the foreign key, things blow up.
|
||||
|
||||
Revision ID: 555d3c6c0f9a
|
||||
Revises: cb645597b71b
|
||||
Create Date: 2019-12-30 15:58:20.633838
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '555d3c6c0f9a'
|
||||
down_revision = 'cb645597b71b'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('file_data', sa.Column('file_model_id', sa.Integer(), nullable=True))
|
||||
op.drop_constraint(None, 'file_data', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'file_data', 'file', ['file_model_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(None, 'file_data', type_='foreignkey')
|
||||
op.create_foreign_key(None, 'file_data', 'file', ['id'], ['id'])
|
||||
op.drop_column('file_data', 'file_model_id')
|
||||
# ### end Alembic commands ###
|
|
@ -65,14 +65,14 @@ class TestStudy(BaseTest, unittest.TestCase):
|
|||
self.assertEqual(spec.id, workflow.workflow_spec_id)
|
||||
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
workflows = WorkflowSchema(many=True).load(json_data, session=db.session)
|
||||
self.assertEqual(workflows[0].id, workflow.id)
|
||||
workflow = WorkflowSchema().load(json_data, session=db.session)
|
||||
self.assertEqual(workflow.id, workflow.id)
|
||||
|
||||
def test_delete_workflow(self):
|
||||
self.load_example_data()
|
||||
study = db.session.query(StudyModel).first()
|
||||
spec = db.session.query(WorkflowSpecModel).first()
|
||||
rv = self.app.post('/v1.0/study/%i/workflows' % study.id,content_type="application/json",
|
||||
rv = self.app.post('/v1.0/study/%i/workflows' % study.id, content_type="application/json",
|
||||
data=json.dumps(WorkflowSpecSchema().dump(spec)))
|
||||
self.assertEqual(1, db.session.query(WorkflowModel).count())
|
||||
json_data = json.loads(rv.get_data(as_text=True))
|
||||
|
|
Loading…
Reference in New Issue