From 32652b6fd8a62b20ce910f9e626e891ec7c205f6 Mon Sep 17 00:00:00 2001 From: Michael Milton Date: Wed, 5 Aug 2020 13:34:37 +1000 Subject: [PATCH 1/3] Add complete migrations, support unix sockets with postgres, use UnicodeText columns --- megaqc/database.py | 6 +- megaqc/migrations/env.py | 5 +- megaqc/migrations/versions/007c354223ec_.py | 112 +++++ megaqc/migrations/versions/4515f66f3690_.py | 529 ++++++++++++++++++++ megaqc/migrations/versions/dc45ba7b086d_.py | 27 - megaqc/migrations/versions/e38ef2ac89ab_.py | 75 +++ megaqc/migrations/versions/eb9fdf01bbae_.py | 225 +++++++++ megaqc/model/models.py | 59 +-- megaqc/settings.py | 26 +- megaqc/user/models.py | 18 +- 10 files changed, 1012 insertions(+), 70 deletions(-) create mode 100644 megaqc/migrations/versions/007c354223ec_.py create mode 100644 megaqc/migrations/versions/4515f66f3690_.py delete mode 100644 megaqc/migrations/versions/dc45ba7b086d_.py create mode 100644 megaqc/migrations/versions/e38ef2ac89ab_.py create mode 100644 megaqc/migrations/versions/eb9fdf01bbae_.py diff --git a/megaqc/database.py b/megaqc/database.py index 1669c14b..23af3652 100644 --- a/megaqc/database.py +++ b/megaqc/database.py @@ -11,7 +11,7 @@ from sqlalchemy.orm import scoped_session, sessionmaker from .compat import basestring -from .extensions import db +from .extensions import db, migrate # Alias common SQLAlchemy names Column = db.Column @@ -135,4 +135,8 @@ def init_db(url): """Initializes the database.""" db.metadata.bind = engine db.metadata.create_all() + + # Tell alembic that we're at the latest migration, since we just created everything from scratch + migrate.stamp() + print("Initialized the database.") diff --git a/megaqc/migrations/env.py b/megaqc/migrations/env.py index 31f3d447..fda67331 100644 --- a/megaqc/migrations/env.py +++ b/megaqc/migrations/env.py @@ -41,7 +41,9 @@ def run_migrations_offline(): script output. """ url = config.get_main_option("sqlalchemy.url") - context.configure(url=url, target_metadata=target_metadata, literal_binds=True) + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True, compare_type=True + ) with context.begin_transaction(): context.run_migrations() @@ -74,6 +76,7 @@ def process_revision_directives(context, revision, directives): with connectable.connect() as connection: context.configure( connection=connection, + compare_type=True, target_metadata=target_metadata, process_revision_directives=process_revision_directives, **current_app.extensions["migrate"].configure_args diff --git a/megaqc/migrations/versions/007c354223ec_.py b/megaqc/migrations/versions/007c354223ec_.py new file mode 100644 index 00000000..6f97c31c --- /dev/null +++ b/megaqc/migrations/versions/007c354223ec_.py @@ -0,0 +1,112 @@ +""" +empty message. + +Revision ID: 007c354223ec +Revises: e38ef2ac89ab +Create Date: 2020-08-05 12:01:31.378972 +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "007c354223ec" +down_revision = "e38ef2ac89ab" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_report_report_hash", table_name="report") + op.create_index( + op.f("ix_report_report_hash"), "report", ["report_hash"], unique=True + ) + op.drop_constraint("report_user_id_fkey", "report", type_="foreignkey") + op.create_foreign_key( + None, "report", "users", ["user_id"], ["user_id"], ondelete="SET NULL" + ) + op.alter_column( + "report_meta", "report_id", existing_type=sa.INTEGER(), nullable=False + ) + op.drop_constraint("report_meta_report_id_fkey", "report_meta", type_="foreignkey") + op.create_foreign_key( + None, "report_meta", "report", ["report_id"], ["report_id"], ondelete="CASCADE" + ) + op.alter_column("sample", "report_id", existing_type=sa.INTEGER(), nullable=False) + op.drop_constraint("sample_report_id_fkey", "sample", type_="foreignkey") + op.create_foreign_key( + None, "sample", "report", ["report_id"], ["report_id"], ondelete="CASCADE" + ) + op.alter_column( + "sample_data", "sample_data_type_id", existing_type=sa.INTEGER(), nullable=False + ) + op.alter_column( + "sample_data", "sample_id", existing_type=sa.INTEGER(), nullable=False + ) + op.drop_constraint("sample_data_sample_id_fkey", "sample_data", type_="foreignkey") + op.drop_constraint( + "sample_data_sample_data_type_id_fkey", "sample_data", type_="foreignkey" + ) + op.create_foreign_key( + None, "sample_data", "sample", ["sample_id"], ["sample_id"], ondelete="CASCADE" + ) + op.create_foreign_key( + None, + "sample_data", + "sample_data_type", + ["sample_data_type_id"], + ["sample_data_type_id"], + ondelete="CASCADE", + ) + op.add_column("sample_data_type", sa.Column("schema", sa.Unicode(), nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("sample_data_type", "schema") + op.drop_constraint(None, "sample_data", type_="foreignkey") + op.drop_constraint(None, "sample_data", type_="foreignkey") + op.create_foreign_key( + "sample_data_sample_data_type_id_fkey", + "sample_data", + "sample_data_type", + ["sample_data_type_id"], + ["sample_data_type_id"], + ) + op.create_foreign_key( + "sample_data_sample_id_fkey", + "sample_data", + "sample", + ["sample_id"], + ["sample_id"], + ) + op.alter_column( + "sample_data", "sample_id", existing_type=sa.INTEGER(), nullable=True + ) + op.alter_column( + "sample_data", "sample_data_type_id", existing_type=sa.INTEGER(), nullable=True + ) + op.drop_constraint(None, "sample", type_="foreignkey") + op.create_foreign_key( + "sample_report_id_fkey", "sample", "report", ["report_id"], ["report_id"] + ) + op.alter_column("sample", "report_id", existing_type=sa.INTEGER(), nullable=True) + op.drop_constraint(None, "report_meta", type_="foreignkey") + op.create_foreign_key( + "report_meta_report_id_fkey", + "report_meta", + "report", + ["report_id"], + ["report_id"], + ) + op.alter_column( + "report_meta", "report_id", existing_type=sa.INTEGER(), nullable=True + ) + op.drop_constraint(None, "report", type_="foreignkey") + op.create_foreign_key( + "report_user_id_fkey", "report", "users", ["user_id"], ["user_id"] + ) + op.drop_index(op.f("ix_report_report_hash"), table_name="report") + op.create_index("ix_report_report_hash", "report", ["report_hash"], unique=False) + # ### end Alembic commands ### diff --git a/megaqc/migrations/versions/4515f66f3690_.py b/megaqc/migrations/versions/4515f66f3690_.py new file mode 100644 index 00000000..7ea2805e --- /dev/null +++ b/megaqc/migrations/versions/4515f66f3690_.py @@ -0,0 +1,529 @@ +""" +empty message. + +Revision ID: 4515f66f3690 +Revises: 007c354223ec +Create Date: 2020-08-05 12:11:26.802210 +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "4515f66f3690" +down_revision = "007c354223ec" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "dashboard", + "data", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "dashboard", + "title", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_category", + "category_name", + existing_type=sa.VARCHAR(length=128), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "plot_category", + "data", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "config_dataset", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "plot_config", + "config_name", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "config_type", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "data", + existing_type=sa.VARCHAR(), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_data", + "data", + existing_type=sa.VARCHAR(), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_favourite", + "data", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_favourite", + "description", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "plot_favourite", + "plot_type", + existing_type=sa.VARCHAR(length=128), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "plot_favourite", + "title", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "report", + "report_hash", + existing_type=sa.VARCHAR(), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "report_meta", + "report_meta_key", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "report_meta", + "report_meta_value", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "roles", + "name", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "sample", + "sample_name", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "sample_data", + "value", + existing_type=sa.VARCHAR(length=1024), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "sample_data_type", + "data_id", + existing_type=sa.VARCHAR(length=128), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "sample_data_type", + "data_key", + existing_type=sa.VARCHAR(length=128), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "sample_data_type", + "data_section", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "sample_data_type", + "schema", + existing_type=sa.VARCHAR(), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "sample_filter", + "sample_filter_data", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "sample_filter", + "sample_filter_name", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "sample_filter", + "sample_filter_tag", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "uploads", + "message", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "uploads", + "path", + existing_type=sa.VARCHAR(length=2048), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "uploads", + "status", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "users", + "api_token", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "users", + "email", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + op.alter_column( + "users", + "first_name", + existing_type=sa.VARCHAR(length=30), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "users", + "last_name", + existing_type=sa.VARCHAR(length=30), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "users", + "password", + existing_type=sa.VARCHAR(length=128), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "users", + "salt", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=True, + ) + op.alter_column( + "users", + "username", + existing_type=sa.VARCHAR(length=80), + type_=sa.UnicodeText(), + existing_nullable=False, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "users", + "username", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "users", + "salt", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "users", + "password", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=128), + existing_nullable=True, + ) + op.alter_column( + "users", + "last_name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=30), + existing_nullable=True, + ) + op.alter_column( + "users", + "first_name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=30), + existing_nullable=True, + ) + op.alter_column( + "users", + "email", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "users", + "api_token", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "uploads", + "status", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "uploads", + "path", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=True, + ) + op.alter_column( + "uploads", + "message", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=True, + ) + op.alter_column( + "sample_filter", + "sample_filter_tag", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "sample_filter", + "sample_filter_name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "sample_filter", + "sample_filter_data", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=False, + ) + op.alter_column( + "sample_data_type", + "schema", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(), + existing_nullable=True, + ) + op.alter_column( + "sample_data_type", + "data_section", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "sample_data_type", + "data_key", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=128), + existing_nullable=False, + ) + op.alter_column( + "sample_data_type", + "data_id", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=128), + existing_nullable=True, + ) + op.alter_column( + "sample_data", + "value", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=1024), + existing_nullable=True, + ) + op.alter_column( + "sample", + "sample_name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "roles", + "name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "report_meta", + "report_meta_value", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "report_meta", + "report_meta_key", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "report", + "report_hash", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(), + existing_nullable=True, + ) + op.alter_column( + "plot_favourite", + "title", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=False, + ) + op.alter_column( + "plot_favourite", + "plot_type", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=128), + existing_nullable=False, + ) + op.alter_column( + "plot_favourite", + "description", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=True, + ) + op.alter_column( + "plot_favourite", + "data", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=False, + ) + op.alter_column( + "plot_data", + "data", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "data", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "config_type", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "config_name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=False, + ) + op.alter_column( + "plot_config", + "config_dataset", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=80), + existing_nullable=True, + ) + op.alter_column( + "plot_category", + "data", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=False, + ) + op.alter_column( + "plot_category", + "category_name", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=128), + existing_nullable=True, + ) + op.alter_column( + "dashboard", + "title", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=False, + ) + op.alter_column( + "dashboard", + "data", + existing_type=sa.UnicodeText(), + type_=sa.VARCHAR(length=2048), + existing_nullable=False, + ) + # ### end Alembic commands ### diff --git a/megaqc/migrations/versions/dc45ba7b086d_.py b/megaqc/migrations/versions/dc45ba7b086d_.py deleted file mode 100644 index 05c94d55..00000000 --- a/megaqc/migrations/versions/dc45ba7b086d_.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -empty message. - -Revision ID: dc45ba7b086d -Revises: -Create Date: 2019-09-23 18:25:54.379761 -""" -import sqlalchemy as sa -from alembic import op - -# revision identifiers, used by Alembic. -revision = "dc45ba7b086d" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column("sample_data_type", sa.Column("schema", sa.Unicode(), nullable=True)) - # ### end Alembic commands ### - - -def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column("sample_data_type", "schema") - # ### end Alembic commands ### diff --git a/megaqc/migrations/versions/e38ef2ac89ab_.py b/megaqc/migrations/versions/e38ef2ac89ab_.py new file mode 100644 index 00000000..7c7751dc --- /dev/null +++ b/megaqc/migrations/versions/e38ef2ac89ab_.py @@ -0,0 +1,75 @@ +""" +empty message. + +Revision ID: e38ef2ac89ab +Revises: eb9fdf01bbae +Create Date: 2020-08-05 12:00:54.737816 +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "e38ef2ac89ab" +down_revision = "eb9fdf01bbae" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_index( + op.f("ix_dashboard_is_public"), "dashboard", ["is_public"], unique=False + ) + op.create_index( + op.f("ix_dashboard_user_id"), "dashboard", ["user_id"], unique=False + ) + op.create_index( + op.f("ix_plot_data_report_id"), "plot_data", ["report_id"], unique=False + ) + op.create_index( + op.f("ix_plot_data_sample_id"), "plot_data", ["sample_id"], unique=False + ) + op.create_index( + op.f("ix_plot_favourite_user_id"), "plot_favourite", ["user_id"], unique=False + ) + op.create_index( + op.f("ix_report_report_hash"), "report", ["report_hash"], unique=False + ) + op.create_index(op.f("ix_report_user_id"), "report", ["user_id"], unique=False) + op.create_index( + op.f("ix_report_meta_report_id"), "report_meta", ["report_id"], unique=False + ) + op.create_index(op.f("ix_sample_report_id"), "sample", ["report_id"], unique=False) + op.create_index( + op.f("ix_sample_data_report_id"), "sample_data", ["report_id"], unique=False + ) + op.create_index( + op.f("ix_sample_data_sample_id"), "sample_data", ["sample_id"], unique=False + ) + op.create_index( + op.f("ix_sample_filter_is_public"), "sample_filter", ["is_public"], unique=False + ) + op.create_index( + op.f("ix_sample_filter_user_id"), "sample_filter", ["user_id"], unique=False + ) + op.create_index(op.f("ix_uploads_status"), "uploads", ["status"], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_uploads_status"), table_name="uploads") + op.drop_index(op.f("ix_sample_filter_user_id"), table_name="sample_filter") + op.drop_index(op.f("ix_sample_filter_is_public"), table_name="sample_filter") + op.drop_index(op.f("ix_sample_data_sample_id"), table_name="sample_data") + op.drop_index(op.f("ix_sample_data_report_id"), table_name="sample_data") + op.drop_index(op.f("ix_sample_report_id"), table_name="sample") + op.drop_index(op.f("ix_report_meta_report_id"), table_name="report_meta") + op.drop_index(op.f("ix_report_user_id"), table_name="report") + op.drop_index(op.f("ix_report_report_hash"), table_name="report") + op.drop_index(op.f("ix_plot_favourite_user_id"), table_name="plot_favourite") + op.drop_index(op.f("ix_plot_data_sample_id"), table_name="plot_data") + op.drop_index(op.f("ix_plot_data_report_id"), table_name="plot_data") + op.drop_index(op.f("ix_dashboard_user_id"), table_name="dashboard") + op.drop_index(op.f("ix_dashboard_is_public"), table_name="dashboard") + # ### end Alembic commands ### diff --git a/megaqc/migrations/versions/eb9fdf01bbae_.py b/megaqc/migrations/versions/eb9fdf01bbae_.py new file mode 100644 index 00000000..ca7c4fbf --- /dev/null +++ b/megaqc/migrations/versions/eb9fdf01bbae_.py @@ -0,0 +1,225 @@ +""" +empty message. + +Revision ID: eb9fdf01bbae +Revises: +Create Date: 2020-08-05 11:55:10.348319 +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "eb9fdf01bbae" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "plot_config", + sa.Column("config_id", sa.Integer(), nullable=False), + sa.Column("config_type", sa.String(length=80), nullable=False), + sa.Column("config_name", sa.String(length=80), nullable=False), + sa.Column("config_dataset", sa.String(length=80), nullable=True), + sa.Column("data", sa.String(), nullable=False), + sa.PrimaryKeyConstraint("config_id"), + ) + op.create_table( + "sample_data_type", + sa.Column("sample_data_type_id", sa.Integer(), nullable=False), + sa.Column("data_id", sa.String(length=128), nullable=True), + sa.Column("data_section", sa.String(length=80), nullable=True), + sa.Column("data_key", sa.String(length=128), nullable=False), + sa.PrimaryKeyConstraint("sample_data_type_id"), + ) + op.create_table( + "users", + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column( + "username", sa.String(length=80, _expect_unicode=True), nullable=False + ), + sa.Column("email", sa.String(length=80, _expect_unicode=True), nullable=False), + sa.Column("salt", sa.String(length=80, _expect_unicode=True), nullable=True), + sa.Column( + "password", sa.String(length=128, _expect_unicode=True), nullable=True + ), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column( + "first_name", sa.String(length=30, _expect_unicode=True), nullable=True + ), + sa.Column( + "last_name", sa.String(length=30, _expect_unicode=True), nullable=True + ), + sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("is_admin", sa.Boolean(), nullable=True), + sa.Column( + "api_token", sa.String(length=80, _expect_unicode=True), nullable=True + ), + sa.PrimaryKeyConstraint("user_id"), + sa.UniqueConstraint("email"), + sa.UniqueConstraint("username"), + ) + op.create_table( + "dashboard", + sa.Column("dashboard_id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("title", sa.String(length=2048), nullable=False), + sa.Column("data", sa.String(length=2048), nullable=False), + sa.Column("is_public", sa.Boolean(), nullable=True), + sa.Column("modified_at", sa.DateTime(), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + sa.PrimaryKeyConstraint("dashboard_id"), + ) + op.create_table( + "plot_favourite", + sa.Column("plot_favourite_id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("title", sa.String(length=2048), nullable=False), + sa.Column("description", sa.String(length=2048), nullable=True), + sa.Column("plot_type", sa.String(length=128), nullable=False), + sa.Column("data", sa.String(length=2048), nullable=False), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + sa.PrimaryKeyConstraint("plot_favourite_id"), + ) + op.create_table( + "report", + sa.Column("report_id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("report_hash", sa.String(), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("uploaded_at", sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + sa.PrimaryKeyConstraint("report_id"), + ) + op.create_table( + "roles", + sa.Column("role_id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=80, _expect_unicode=True), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + sa.PrimaryKeyConstraint("role_id"), + sa.UniqueConstraint("name"), + ) + op.create_table( + "sample_filter", + sa.Column("sample_filter_id", sa.Integer(), nullable=False), + sa.Column("sample_filter_name", sa.String(length=80), nullable=True), + sa.Column("sample_filter_tag", sa.String(length=80), nullable=True), + sa.Column("is_public", sa.Boolean(), nullable=True), + sa.Column("sample_filter_data", sa.String(length=2048), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + sa.PrimaryKeyConstraint("sample_filter_id"), + ) + op.create_table( + "uploads", + sa.Column("upload_id", sa.Integer(), nullable=False), + sa.Column("status", sa.String(length=80), nullable=True), + sa.Column("path", sa.String(length=2048), nullable=True), + sa.Column("message", sa.String(length=2048), nullable=True), + sa.Column("created_at", sa.DateTime(), nullable=False), + sa.Column("modified_at", sa.DateTime(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + sa.PrimaryKeyConstraint("upload_id"), + ) + op.create_table( + "user_plotconfig_map", + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("plot_config_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["plot_config_id"], ["plot_config.config_id"],), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + ) + op.create_table( + "user_sampletype_map", + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("sample_data_type_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["sample_data_type_id"], ["sample_data_type.sample_data_type_id"], + ), + sa.ForeignKeyConstraint(["user_id"], ["users.user_id"],), + ) + op.create_table( + "plot_category", + sa.Column("plot_category_id", sa.Integer(), nullable=False), + sa.Column("report_id", sa.Integer(), nullable=True), + sa.Column("config_id", sa.Integer(), nullable=True), + sa.Column("category_name", sa.String(length=128), nullable=True), + sa.Column("data", sa.String(length=2048), nullable=False), + sa.ForeignKeyConstraint(["config_id"], ["plot_config.config_id"],), + sa.ForeignKeyConstraint(["report_id"], ["report.report_id"],), + sa.PrimaryKeyConstraint("plot_category_id"), + ) + op.create_table( + "report_meta", + sa.Column("report_meta_id", sa.Integer(), nullable=False), + sa.Column("report_meta_key", sa.String(length=80), nullable=False), + sa.Column("report_meta_value", sa.String(length=80), nullable=False), + sa.Column("report_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_id"], ["report.report_id"],), + sa.PrimaryKeyConstraint("report_meta_id"), + ) + op.create_table( + "sample", + sa.Column("sample_id", sa.Integer(), nullable=False), + sa.Column("sample_name", sa.String(length=80), nullable=True), + sa.Column("report_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["report_id"], ["report.report_id"],), + sa.PrimaryKeyConstraint("sample_id"), + ) + op.create_table( + "plot_data", + sa.Column("plot_data_id", sa.Integer(), nullable=False), + sa.Column("report_id", sa.Integer(), nullable=True), + sa.Column("config_id", sa.Integer(), nullable=True), + sa.Column("plot_category_id", sa.Integer(), nullable=True), + sa.Column("sample_id", sa.Integer(), nullable=True), + sa.Column("data", sa.String(), nullable=False), + sa.ForeignKeyConstraint(["config_id"], ["plot_config.config_id"],), + sa.ForeignKeyConstraint( + ["plot_category_id"], ["plot_category.plot_category_id"], + ), + sa.ForeignKeyConstraint(["report_id"], ["report.report_id"],), + sa.ForeignKeyConstraint(["sample_id"], ["sample.sample_id"],), + sa.PrimaryKeyConstraint("plot_data_id"), + ) + op.create_table( + "sample_data", + sa.Column("sample_data_id", sa.Integer(), nullable=False), + sa.Column("report_id", sa.Integer(), nullable=True), + sa.Column("sample_data_type_id", sa.Integer(), nullable=True), + sa.Column("sample_id", sa.Integer(), nullable=True), + sa.Column("value", sa.String(length=1024), nullable=True), + sa.ForeignKeyConstraint(["report_id"], ["report.report_id"],), + sa.ForeignKeyConstraint( + ["sample_data_type_id"], ["sample_data_type.sample_data_type_id"], + ), + sa.ForeignKeyConstraint(["sample_id"], ["sample.sample_id"],), + sa.PrimaryKeyConstraint("sample_data_id"), + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("sample_data") + op.drop_table("plot_data") + op.drop_table("sample") + op.drop_table("report_meta") + op.drop_table("plot_category") + op.drop_table("user_sampletype_map") + op.drop_table("user_plotconfig_map") + op.drop_table("uploads") + op.drop_table("sample_filter") + op.drop_table("roles") + op.drop_table("report") + op.drop_table("plot_favourite") + op.drop_table("dashboard") + op.drop_table("users") + op.drop_table("sample_data_type") + op.drop_table("plot_config") + # ### end Alembic commands ### diff --git a/megaqc/model/models.py b/megaqc/model/models.py index 40a70d76..acac356e 100644 --- a/megaqc/model/models.py +++ b/megaqc/model/models.py @@ -7,7 +7,7 @@ from megaqc.extensions import db from sqlalchemy import Boolean, Column, DateTime from sqlalchemy import Enum as SqlEnum -from sqlalchemy import ForeignKey, Integer, Unicode, func +from sqlalchemy import ForeignKey, Integer, UnicodeText, func from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property from sqlalchemy.orm import relationship @@ -39,7 +39,7 @@ class Report(db.Model, CRUDMixin): user_id = Column( Integer, ForeignKey("users.user_id", ondelete="SET NULL"), index=True ) - report_hash = Column(Unicode, index=True, unique=True) + report_hash = Column(UnicodeText, index=True, unique=True) created_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) uploaded_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) @@ -54,8 +54,8 @@ class Report(db.Model, CRUDMixin): class ReportMeta(db.Model, CRUDMixin): __tablename__ = "report_meta" report_meta_id = Column(Integer, primary_key=True) - report_meta_key = Column(Unicode, nullable=False) - report_meta_value = Column(Unicode, nullable=False) + report_meta_key = Column(UnicodeText, nullable=False) + report_meta_value = Column(UnicodeText, nullable=False) # If the report is deleted, remove the report metadata report_id = Column( Integer, @@ -84,10 +84,10 @@ def get_keys(cls, session): class PlotConfig(db.Model, CRUDMixin): __tablename__ = "plot_config" config_id = Column(Integer, primary_key=True) - config_type = Column(Unicode, nullable=False) - config_name = Column(Unicode, nullable=False) - config_dataset = Column(Unicode, nullable=True) - data = Column(Unicode, nullable=False) + config_type = Column(UnicodeText, nullable=False) + config_name = Column(UnicodeText, nullable=False) + config_dataset = Column(UnicodeText, nullable=True) + data = Column(UnicodeText, nullable=False) fav_users = db.relationship( "User", secondary=user_plotconfig_map, backref="favourite_plotconfigs" @@ -101,7 +101,7 @@ class PlotData(db.Model, CRUDMixin): config_id = Column(Integer, ForeignKey("plot_config.config_id")) plot_category_id = Column(Integer(), ForeignKey("plot_category.plot_category_id")) sample_id = Column(Integer, ForeignKey("sample.sample_id"), index=True) - data = Column(Unicode, nullable=False) + data = Column(UnicodeText, nullable=False) class PlotCategory(db.Model, CRUDMixin): @@ -109,18 +109,18 @@ class PlotCategory(db.Model, CRUDMixin): plot_category_id = Column(Integer, primary_key=True) report_id = Column(Integer, ForeignKey("report.report_id")) config_id = Column(Integer, ForeignKey("plot_config.config_id")) - category_name = Column(Unicode, nullable=True) - data = Column(Unicode, nullable=False) + category_name = Column(UnicodeText, nullable=True) + data = Column(UnicodeText, nullable=False) class PlotFavourite(db.Model, CRUDMixin): __tablename__ = "plot_favourite" plot_favourite_id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("users.user_id"), index=True) - title = Column(Unicode, nullable=False) - description = Column(Unicode, nullable=True) - plot_type = Column(Unicode, nullable=False) - data = Column(Unicode, nullable=False) + title = Column(UnicodeText, nullable=False) + description = Column(UnicodeText, nullable=True) + plot_type = Column(UnicodeText, nullable=False) + data = Column(UnicodeText, nullable=False) created_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) user = relationship("User", back_populates="favourite_plots") @@ -130,8 +130,8 @@ class Dashboard(db.Model, CRUDMixin): __tablename__ = "dashboard" dashboard_id = Column(Integer, primary_key=True) user_id = Column(Integer, ForeignKey("users.user_id"), index=True) - title = Column(Unicode, nullable=False) - data = Column(Unicode, nullable=False) + title = Column(UnicodeText, nullable=False) + data = Column(UnicodeText, nullable=False) is_public = Column(Boolean, default=False, index=True) modified_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) created_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) @@ -142,11 +142,12 @@ class Dashboard(db.Model, CRUDMixin): class SampleDataType(db.Model, CRUDMixin): __tablename__ = "sample_data_type" sample_data_type_id = Column(Integer, primary_key=True) - data_id = Column(Unicode) - data_section = Column(Unicode) - data_key = Column(Unicode, nullable=False) + data_id = Column(UnicodeText) + data_section = Column(UnicodeText) + data_key = Column(UnicodeText, nullable=False) schema = Column( - Unicode, doc="A JSON Schema for validating and describing the data of this type" + UnicodeText, + doc="A JSON Schema for validating and describing the data of this type", ) @property @@ -202,7 +203,7 @@ class SampleData(db.Model, CRUDMixin): index=True, nullable=False, ) - value = Column(Unicode) + value = Column(UnicodeText) sample = relationship("Sample", back_populates="data") report = relationship("Report", back_populates="sample_data") data_type = relationship("SampleDataType", back_populates="sample_data") @@ -211,7 +212,7 @@ class SampleData(db.Model, CRUDMixin): class Sample(db.Model, CRUDMixin): __tablename__ = "sample" sample_id = Column(Integer, primary_key=True) - sample_name = Column(Unicode) + sample_name = Column(UnicodeText) report_id = Column( Integer, ForeignKey("report.report_id", ondelete="CASCADE"), @@ -226,10 +227,10 @@ class Sample(db.Model, CRUDMixin): class SampleFilter(db.Model, CRUDMixin): __tablename__ = "sample_filter" sample_filter_id = Column(Integer, primary_key=True) - sample_filter_name = Column(Unicode) - sample_filter_tag = Column(Unicode) + sample_filter_name = Column(UnicodeText) + sample_filter_tag = Column(UnicodeText) is_public = Column(Boolean, index=True) - sample_filter_data = Column(Unicode, nullable=False) + sample_filter_data = Column(UnicodeText, nullable=False) user_id = Column(Integer, ForeignKey("users.user_id"), index=True) user = relationship("User", back_populates="filters") @@ -242,9 +243,9 @@ def filter_json(self): class Upload(db.Model, CRUDMixin): __tablename__ = "uploads" upload_id = Column(Integer, primary_key=True) - status = Column(Unicode, index=True) - path = Column(Unicode) - message = Column(Unicode) + status = Column(UnicodeText, index=True) + path = Column(UnicodeText) + message = Column(UnicodeText) created_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) modified_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) user_id = Column(Integer, ForeignKey("users.user_id")) diff --git a/megaqc/settings.py b/megaqc/settings.py index b7017bf1..3fe4b66f 100644 --- a/megaqc/settings.py +++ b/megaqc/settings.py @@ -60,6 +60,22 @@ def update_db_uri(self): self.SQLALCHEMY_DBMS, self.DB_PATH ) self.SQLALCHEMY_DATABASE_URI_SAN = self.SQLALCHEMY_DATABASE_URI + elif self.SQLALCHEMY_HOST.startswith("/"): + # If the host starts with a /, it's probably a unix socket, which has a different URL format + self.SQLALCHEMY_DATABASE_URI = "{}://{}:{}@/{}?host={}".format( + self.SQLALCHEMY_DBMS, + self.SQLALCHEMY_USER, + self.SQLALCHEMY_PASS, + self.SQLALCHEMY_DATABASE, + self.SQLALCHEMY_HOST, + ) + self.SQLALCHEMY_DATABASE_URI_SAN = "{}://{}:{}@/{}?host={}".format( + self.SQLALCHEMY_DBMS, + self.SQLALCHEMY_USER, + "***" if self.SQLALCHEMY_PASS else "", + self.SQLALCHEMY_DATABASE, + self.SQLALCHEMY_HOST, + ) else: self.SQLALCHEMY_DATABASE_URI = "{}://{}:{}@{}/{}".format( self.SQLALCHEMY_DBMS, @@ -85,9 +101,13 @@ class ProdConfig(Config): ENV = "prod" DEBUG = False SQLALCHEMY_DBMS = "postgresql" - SQLALCHEMY_HOST = "{}:{}".format( - os.environ.get("DB_HOST", "localhost"), os.environ.get("DB_PORT", "5432") - ) + if "DB_UNIX_SOCKET" in os.environ: + # Unix sockets dont have a port + SQLALCHEMY_HOST = os.environ["DB_UNIX_SOCKET"] + else: + SQLALCHEMY_HOST = "{}:{}".format( + os.environ.get("DB_HOST", "localhost"), os.environ.get("DB_PORT", "5432") + ) SQLALCHEMY_USER = os.environ.get("DB_USER", "megaqc") SQLALCHEMY_PASS = os.environ.get("DB_PASS", "megaqcpswd") SQLALCHEMY_DATABASE = os.environ.get("DB_NAME", "megaqc") diff --git a/megaqc/user/models.py b/megaqc/user/models.py index d5d11c4c..75518d0d 100644 --- a/megaqc/user/models.py +++ b/megaqc/user/models.py @@ -22,7 +22,7 @@ ForeignKey, Integer, Table, - Unicode, + UnicodeText, ) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property @@ -51,7 +51,7 @@ class Role(db.Model, CRUDMixin): __tablename__ = "roles" role_id = Column(Integer, primary_key=True) - name = Column(Unicode, unique=True, nullable=False) + name = Column(UnicodeText, unique=True, nullable=False) user_id = Column(Integer, ForeignKey("users.user_id")) user = relationship("User", back_populates="roles") @@ -70,16 +70,16 @@ class User(db.Model, CRUDMixin, UserMixin): __tablename__ = "users" user_id = Column(Integer, primary_key=True) - username = Column(Unicode, unique=True, nullable=False) - email = Column(Unicode, unique=True, nullable=False) - salt = Column(Unicode, nullable=True) - password = Column(Unicode, nullable=True) + username = Column(UnicodeText, unique=True, nullable=False) + email = Column(UnicodeText, unique=True, nullable=False) + salt = Column(UnicodeText, nullable=True) + password = Column(UnicodeText, nullable=True) created_at = Column(DateTime, nullable=False, default=dt.datetime.utcnow) - first_name = Column(Unicode, nullable=True) - last_name = Column(Unicode, nullable=True) + first_name = Column(UnicodeText, nullable=True) + last_name = Column(UnicodeText, nullable=True) active = Column(Boolean(), default=False) is_admin = Column(Boolean(), default=False) - api_token = Column(Unicode, nullable=True) + api_token = Column(UnicodeText, nullable=True) reports = relationship("Report", back_populates="user") uploads = relationship("Upload", back_populates="user") From e4526f7dd064bba4e26c041f37c9ac24f171f60a Mon Sep 17 00:00:00 2001 From: Michael Milton Date: Wed, 5 Aug 2020 14:12:08 +1000 Subject: [PATCH 2/3] Delete legacy migration code, document migrations for developers and users --- docs/dev/migrations.md | 10 +++++ docs/installation/migrations.md | 26 ++++++++++- scripts/migrations/README.md | 14 ------ scripts/migrations/resync_pg_seqs.sh | 28 ------------ scripts/migrations/varchar2text.sql | 64 ---------------------------- 5 files changed, 35 insertions(+), 107 deletions(-) create mode 100644 docs/dev/migrations.md delete mode 100644 scripts/migrations/README.md delete mode 100755 scripts/migrations/resync_pg_seqs.sh delete mode 100644 scripts/migrations/varchar2text.sql diff --git a/docs/dev/migrations.md b/docs/dev/migrations.md new file mode 100644 index 00000000..28ac3f53 --- /dev/null +++ b/docs/dev/migrations.md @@ -0,0 +1,10 @@ +# Migrations (for developers) + +You need to generate a new migration whenever the database schema (ie any models class) changes. To generate a migration: + +```bash +cd megaqc +export FLASK_APP=wsgi.py +flask db upgrade # Update to the latest migration +flask db migrate +``` diff --git a/docs/installation/migrations.md b/docs/installation/migrations.md index 2b1b9e74..dc0e052f 100644 --- a/docs/installation/migrations.md +++ b/docs/installation/migrations.md @@ -1,11 +1,20 @@ # Migrations +## Introduction + Migrations are updates to a database schema. This is relevant if, for example, you set up a MegaQC database (using `initdb`), and then a new version of MegaQC is released that needs new tables or columns. +## When to migrate + Every time a new version of MegaQC is released, you should ensure your database is -up to date. Do so using the following commands: +up to date. You don't need to run the migrations the first time you install MegaQC, because the `megaqc initdb` command +replaces the need for migrations. + +## How to migrate + +To migrate, run the following commands: ```bash cd megaqc @@ -17,3 +26,18 @@ Note: when you run these migrations, you **must** have the same environment as y to run MegaQC normally, which means the same value of `FLASK_DEBUG` and `MEGAQC_PRODUCTION` environment variables. Otherwise it will migrate the wrong database (or a non-existing one). + +## Stamping your database + +The complete migration history has only recently been added. This means that, if you were using MegaQC in the past when +migrations were not included in the repo, your database won't know what version you're currently at. + +To fix this, first you need to work out which migration your database is up to. Browse through the files in +`megaqc/migrations/versions`, starting from the oldest date (at the top of each file), until you find a change that +wasn't present in your database. At this point, note the `revision` value at the top of the file, (e.g. `revision = "007c354223ec"`). + +Next, run the following command, replacing `` with the revision you noted above: + +```bash +flask db stamp +``` diff --git a/scripts/migrations/README.md b/scripts/migrations/README.md deleted file mode 100644 index eeaf3840..00000000 --- a/scripts/migrations/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Migrations - -In this stage of MegaQC's development, breaking changes are likely to be made. -Scripts in here are used to make your existing installs work with the new code. - -## PR #60 - -Pull request #60 changed database schemas and how table IDs are generated. - -- `varchar2text.sql` - run using psql to update table definitions in postgres -- `resync_pg_seqs.sh` - run directly (you may have to adjust the user and database - name) to ensure the primary key sequences match the values in the table. - If you are getting errors saying new records cannot be inserted with `$table_id = 1`, - then this should sort it out. diff --git a/scripts/migrations/resync_pg_seqs.sh b/scripts/migrations/resync_pg_seqs.sh deleted file mode 100755 index 94ccdfb2..00000000 --- a/scripts/migrations/resync_pg_seqs.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash - -# SQL from: https://wiki.postgresql.org/wiki/Fixing_Sequences -DBNAME=megaqc - -read -r -d '' SQL_GEN << EOF -SELECT 'SELECT SETVAL(' || - quote_literal(quote_ident(PGT.schemaname) || '.' || quote_ident(S.relname)) || - ', COALESCE(MAX(' ||quote_ident(C.attname)|| '), 1) ) FROM ' || - quote_ident(PGT.schemaname)|| '.'||quote_ident(T.relname)|| ';' -FROM pg_class AS S, - pg_depend AS D, - pg_class AS T, - pg_attribute AS C, - pg_tables AS PGT -WHERE S.relkind = 'S' - AND S.oid = D.objid - AND D.refobjid = T.oid - AND D.refobjid = C.attrelid - AND D.refobjsubid = C.attnum - AND T.relname = PGT.tablename -ORDER BY S.relname; -EOF - -CMDS=$(sudo -u postgres psql -Atq $DBNAME -c "$SQL_GEN") -sudo -u postgres psql -Atq $DBNAME -c "begin; $CMDS commit;" - -echo "Resync complete" diff --git a/scripts/migrations/varchar2text.sql b/scripts/migrations/varchar2text.sql deleted file mode 100644 index 21fbaceb..00000000 --- a/scripts/migrations/varchar2text.sql +++ /dev/null @@ -1,64 +0,0 @@ -begin; --- roles -alter table roles alter column name type text ; - --- users -alter table users alter column "username" type text ; -alter table users alter column "email" type text ; -alter table users alter column "salt" type text ; -alter table users alter column "password" type text ; -alter table users alter column "first_name" type text ; -alter table users alter column "last_name" type text ; -alter table users alter column "api_token" type text ; - --- report -alter table report alter column "report_hash" type text ; - --- report_meta -alter table report_meta alter column "report_meta_key" type text ; -alter table report_meta alter column "report_meta_value" type text ; - --- plot_config -alter table plot_config alter column "config_type" type text ; -alter table plot_config alter column "config_name" type text ; -alter table plot_config alter column "config_dataset" type text ; -alter table plot_config alter column "data" type text ; - --- plot_data -alter table plot_data alter column "data" type text ; - --- plot_category -alter table plot_category alter column "category_name" type text ; -alter table plot_category alter column "data" type text ; - --- plot_favourite -alter table plot_favourite alter column "title" type text ; -alter table plot_favourite alter column "description" type text ; -alter table plot_favourite alter column "plot_type" type text ; -alter table plot_favourite alter column "data" type text ; - --- dashboard -alter table dashboard alter column "title" type text ; -alter table dashboard alter column "data" type text ; - --- sample_data_type -alter table sample_data_type alter column "data_id" type text ; -alter table sample_data_type alter column "data_section" type text ; -alter table sample_data_type alter column "data_key" type text ; - --- sample_data -alter table sample_data alter column "value" type text ; - --- sample -alter table sample alter column "sample_name" type text ; - --- sample_filter -alter table sample_filter alter column "sample_filter_name" type text ; -alter table sample_filter alter column "sample_filter_tag" type text ; -alter table sample_filter alter column "sample_filter_data" type text ; - --- uploads -alter table uploads alter column "status" type text ; -alter table uploads alter column "path" type text ; -alter table uploads alter column "message" type text ; -commit ; From f2d31925be4ac0751f2e8ffb760a6c8ba3e85796 Mon Sep 17 00:00:00 2001 From: Michael Milton Date: Wed, 5 Aug 2020 15:12:45 +1000 Subject: [PATCH 3/3] Fix tests with stamping feature --- megaqc/database.py | 5 +++-- megaqc/extensions.py | 4 +++- tests/conftest.py | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/megaqc/database.py b/megaqc/database.py index 3f4e1411..1ec9d1b3 100644 --- a/megaqc/database.py +++ b/megaqc/database.py @@ -6,13 +6,14 @@ from builtins import object from copy import copy +from flask_migrate import stamp from past.builtins import basestring from sqlalchemy import create_engine, inspect from sqlalchemy.engine.url import make_url from sqlalchemy.exc import OperationalError, ProgrammingError from .compat import basestring -from .extensions import db, migrate +from .extensions import db # Alias common SQLAlchemy names Column = db.Column @@ -195,6 +196,6 @@ def init_db(url): db.metadata.create_all() # Tell alembic that we're at the latest migration, since we just created everything from scratch - migrate.stamp() + stamp() print("Initialized the database.") diff --git a/megaqc/extensions.py b/megaqc/extensions.py index 1243a27c..1fc8d138 100644 --- a/megaqc/extensions.py +++ b/megaqc/extensions.py @@ -4,6 +4,8 @@ Each extension is initialized in the app factory located in app.py. """ +from pathlib import Path + from flask_caching import Cache from flask_debugtoolbar import DebugToolbarExtension from flask_login import LoginManager @@ -21,5 +23,5 @@ cache = Cache() debug_toolbar = DebugToolbarExtension() restful = Api(prefix="/rest_api/v1") -migrate = Migrate() +migrate = Migrate(directory=str(Path(__file__).parent / "migrations")) json_api = JsonApi() diff --git a/tests/conftest.py b/tests/conftest.py index 6f7b4134..78490586 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,10 +19,10 @@ def app(): An application for the tests. """ config = TestConfig() - init_db(config.SQLALCHEMY_DATABASE_URI) _app = create_app(config) ctx = _app.test_request_context() ctx.push() + init_db(config.SQLALCHEMY_DATABASE_URI) yield _app