From 0c1c9dcec507c6c7b2e179abbdfdf56502b58715 Mon Sep 17 00:00:00 2001 From: jasquat Date: Fri, 20 May 2022 14:25:12 -0400 Subject: [PATCH] respect committed migration dir and run flask upgrade in nox w/ burnettk --- .gitignore | 2 +- bin/recreate_db | 13 ++-- migrations/README | 1 + migrations/alembic.ini | 50 +++++++++++++++ migrations/env.py | 91 +++++++++++++++++++++++++++ migrations/script.py.mako | 24 +++++++ migrations/versions/1f7b1ad256dc_.py | 58 +++++++++++++++++ noxfile.py | 5 +- src/spiff_workflow_webapp/db.sqlite3 | Bin 323584 -> 36864 bytes 9 files changed, 232 insertions(+), 12 deletions(-) create mode 100644 migrations/README create mode 100644 migrations/alembic.ini create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/1f7b1ad256dc_.py diff --git a/.gitignore b/.gitignore index c7691a80..d730b9a2 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,4 @@ __pycache__/ monkeytype.sqlite3 # we will probably want this committed at some point -/migrations +# /migrations diff --git a/bin/recreate_db b/bin/recreate_db index d2ede16c..14624e33 100755 --- a/bin/recreate_db +++ b/bin/recreate_db @@ -13,14 +13,11 @@ if [[ "${1:-}" == "clean" ]]; then rm -rf migrations/ - # if [[ "${TEST_DATABASE_TYPE:-}" == "sqlite" ]]; then - # rm -f ./src/spiff_workflow_webapp/db.sqlite3 - # else - # mysql -uroot -e "DROP DATABASE IF EXISTS spiff_workflow_webapp_development" - # mysql -uroot -e "CREATE DATABASE spiff_workflow_webapp_development" - # mysql -uroot -e "DROP DATABASE IF EXISTS spiff_workflow_webapp_testing" - # mysql -uroot -e "CREATE DATABASE spiff_workflow_webapp_testing" - # fi + rm -f ./src/spiff_workflow_webapp/db.sqlite3 + mysql -uroot -e "DROP DATABASE IF EXISTS spiff_workflow_webapp_development" + mysql -uroot -e "CREATE DATABASE spiff_workflow_webapp_development" + mysql -uroot -e "DROP DATABASE IF EXISTS spiff_workflow_webapp_testing" + mysql -uroot -e "CREATE DATABASE spiff_workflow_webapp_testing" fi tasks="$tasks migrate upgrade" diff --git a/migrations/README b/migrations/README new file mode 100644 index 00000000..0e048441 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 00000000..ec9d45c2 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 00000000..68feded2 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,91 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.get_engine().url).replace( + '%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = current_app.extensions['migrate'].db.get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 00000000..2c015630 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/1f7b1ad256dc_.py b/migrations/versions/1f7b1ad256dc_.py new file mode 100644 index 00000000..7469e3c6 --- /dev/null +++ b/migrations/versions/1f7b1ad256dc_.py @@ -0,0 +1,58 @@ +"""empty message + +Revision ID: 1f7b1ad256dc +Revises: +Create Date: 2022-05-20 14:21:53.581395 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1f7b1ad256dc' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('group', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('new_name_two', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('process_model', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('bpmn_json', sa.JSON(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(length=50), nullable=False), + sa.Column('name', sa.String(length=50), nullable=True), + sa.Column('email', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('username') + ) + op.create_table('user_group_assignment', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique') + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('user_group_assignment') + op.drop_table('user') + op.drop_table('process_model') + op.drop_table('group') + # ### end Alembic commands ### diff --git a/noxfile.py b/noxfile.py index 9471810e..ab01f89f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -132,9 +132,8 @@ def tests(session: Session) -> None: session.install(".") session.install("coverage[toml]", "pytest", "pygments") try: - session.run("ls") - session.run("pwd") - session.run("./bin/recreate_db", "clean") + session.env["FLASK_ENV"] = "testing" + session.run("flask", "upgrade") session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs) finally: if session.interactive: diff --git a/src/spiff_workflow_webapp/db.sqlite3 b/src/spiff_workflow_webapp/db.sqlite3 index ea1f707d121b71330af432903225f1dbd6c55ba0..c92410092742e42623894b3763a51ffcc09e3a70 100644 GIT binary patch delta 70 zcmZp8Al$HkX@WE_8v_FaClG_dL=AIBwv7o>>6K5TXhVl~$psn6mbkOFAX*$#tD)@3CJ0w7&J< zA9mbO6YtW$Q`A%_{fwe&uhP%Q)!2jd?_*)v2y1rRvO9YBX$3EhQ--9V!$35A0BHoAy^vQ{B$~Q||@+wGA1# FeLtk9vv2?a