Skip to content

Commit

Permalink
Add tests for migration
Browse files Browse the repository at this point in the history
  • Loading branch information
soapy1 committed Dec 16, 2024
1 parent e50082f commit ebe9c21
Show file tree
Hide file tree
Showing 6 changed files with 229 additions and 54 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,6 @@ yarn-error.log*
conda-store.sqlite

*.lockb

# generated test assets
conda-store-server/tests/alembic.ini
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"""
from alembic import op
from sqlalchemy import Column, INTEGER, String, ForeignKey, table, select, inspect
from sqlalchemy import Column, INTEGER, String, ForeignKey, table, select


# revision identifiers, used by Alembic.
Expand All @@ -19,13 +19,17 @@
branch_labels = None
depends_on = None


# This function will go thru all the conda_package_build entries and ensure
# that the right package_id is associated with it
# Due to the issue fixed in https://github.com/conda-incubator/conda-store/pull/961
# many conda_package_build entries have the wrong package entry (but the right channel).
# Because the packages are duplicated, we can not recreate the _conda_package_build_uc
# constraint without the channel_id.
# So, this function will go thru each conda_package_build and re-associate it with the
# correct conda_package based on the channel id.
def fix_misrepresented_packages(conn):
# conda_packages is a hash of channel-id_name_version to conda_package id
conda_packages = {}

# dummy tables to run queries against
conda_package_build_table = table(
"conda_package_build",
Column("id", INTEGER),
Expand Down Expand Up @@ -88,36 +92,22 @@ def get_conda_package_id(conn, channel_id, name, version):
conn.commit()

def upgrade():
target_table = "conda_package_build"
bind = op.get_bind()

# If the channel_id column does not exist, then exit quickly
insp = inspect(bind)
columns = insp.get_columns(target_table)
if "channel_id" not in columns:
return

# Due to the issue fixed in https://github.com/conda-incubator/conda-store/pull/961
# many conda_package_build entries have the wrong package entry (but the right channel).
# Because the packages are duplicated, we can not recreate the _conda_package_build_uc
# constraint without the channel_id.
# So, go thru each conda_package_build and re-associate it with the correct conda_package
# based on the channel id.
fix_misrepresented_packages(bind)

# sqlite does not support altering tables
if bind.engine.name != "sqlite":
with op.batch_alter_table("conda_package_build") as batch_op:
# remove channel column from constraints
op.drop_constraint(
constraint_name="_conda_package_build_uc",
table_name=target_table,
batch_op.drop_constraint(
"_conda_package_build_uc",
)

# re-add the constraint without the channel column
op.create_unique_constraint(
constraint_name="_conda_package_build_uc",
table_name=target_table,
columns=[
batch_op.create_unique_constraint(
"_conda_package_build_uc",
[
"package_id",
"subdir",
"build",
Expand All @@ -126,39 +116,35 @@ def upgrade():
],
)

# remove channel column
op.drop_column(
target_table,
"channel_id",
mssql_drop_foreign_key=True,
)
# remove channel column
batch_op.drop_column(
"channel_id",
)


def downgrade():
target_table = "conda_package_build"
with op.batch_alter_table("conda_package_build") as batch_op:
# remove channel column from constraints
batch_op.drop_constraint(
constraint_name="_conda_package_build_uc",
)

# remove channel column from constraints
op.drop_constraint(
constraint_name="_conda_package_build_uc",
table_name=target_table,
)
# add channel column
batch_op.add_column(
Column("channel_id", INTEGER)
)

# add channel column
op.add_column(
target_table,
Column("channel_id", INTEGER, ForeignKey("conda_channel.id"))
)
batch_op.create_foreign_key("fk_channel_id", "conda_channel", ["channel_id"], ["id"])

# re-add the constraint with the channel column
op.create_unique_constraint(
constraint_name="_conda_package_build_uc",
table_name=target_table,
columns=[
"channel_id",
"package_id",
"subdir",
"build",
"build_number",
"sha256",
],
)
# re-add the constraint with the channel column
batch_op.create_unique_constraint(
constraint_name="_conda_package_build_uc",
columns=[
"channel_id",
"package_id",
"subdir",
"build",
"build_number",
"sha256",
],
)
3 changes: 3 additions & 0 deletions conda-store-server/tests/_internal/alembic/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Copyright (c) conda-store development team. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Copyright (c) conda-store development team. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
# Copyright (c) conda-store development team. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.

from sqlalchemy import insert, select, table, Column, INTEGER, String, ForeignKey, text

from conda_store_server import api
from conda_store_server._internal import orm


def setup_bad_data_db(conda_store):
"""A database fixture populated with
* 2 channels
* 2 conda packages
* 5 conda package builds
"""
with conda_store.session_factory() as db:
# create test channels
api.create_conda_channel(db, "test-channel-1")
api.create_conda_channel(db, "test-channel-2")
db.commit()

# create some sample conda_package's
# For simplicity, the channel_id's match the id of the conda_package.
# So, when checking that the package build entries have been reassembled
# the right way, check that the package_id in the conda_package_build is
# equal to what would have been the channel_id (before the migration is run)
conda_package_records = [
{
"id": 1,
"channel_id": 1,
"name": "test-package-1",
"version": "1.0.0",
},
{
"id": 2,
"channel_id": 2,
"name": "test-package-1",
"version": "1.0.0",
},
]
for cpb in conda_package_records:
conda_package = orm.CondaPackage(**cpb)
db.add(conda_package)
db.commit()

# create some conda_package_build's
conda_package_builds = [
{
"id": 1,
"build": "py310h06a4308_0",
"package_id": 1,
"build_number": 0,
"sha256": "one",
"subdir": "linux-64",
},
{
"id": 2,
"build": "py311h06a4308_0",
"package_id": 1,
"build_number": 0,
"sha256": "two",
"subdir": "linux-64",
},
{
"id": 3,
"build": "py38h06a4308_0",
"package_id": 1,
"build_number": 0,
"sha256": "three",
"subdir": "linux-64",
},
{
"id": 4,
"build": "py39h06a4308_0",
"package_id": 2,
"build_number": 0,
"sha256": "four",
"subdir": "linux-64",
},
{
"id": 5,
"build": "py310h06a4308_0",
"package_id": 2,
"build_number": 0,
"sha256": "five",
"subdir": "linux-64",
},
]
default_values = {
"depends": "",
"md5": "",
"timestamp": 0,
"constrains": "",
"size": 0,
}
for cpb in conda_package_builds:
conda_package = orm.CondaPackageBuild(**cpb, **default_values)
db.add(conda_package)
db.commit()

# force in some channel data
# conda_package_build 1 should have package_id 2 after migration
db.execute(text("UPDATE conda_package_build SET channel_id=2 WHERE id=1"))
# conda_package_build 2 should have package_id 1 after migration
db.execute(text("UPDATE conda_package_build SET channel_id=1 WHERE id=2"))
# conda_package_build 3 should have package_id 1 after migration
db.execute(text("UPDATE conda_package_build SET channel_id=1 WHERE id=3"))
# conda_package_build 4 should have package_id 2 after migration
db.execute(text("UPDATE conda_package_build SET channel_id=2 WHERE id=4"))

# don't set conda_package_build 5 channel_id as a test case
# conda_package_build 5 package_id should be unchanged (2) after migration

db.commit()

def test_remove_conda_package_build_channel_basic(conda_store, alembic_config, alembic_engine, alembic_runner):
"""Simply run the upgrade and downgrade for this migration"""
# migrate all the way to the target revision
alembic_runner.migrate_up_to('89637f546129')

# try downgrading
alembic_runner.migrate_down_one()

# try upgrading once more
alembic_runner.migrate_up_one()

def test_remove_conda_package_build_bad_data(conda_store, alembic_config, alembic_engine, alembic_runner):
"""Simply run the upgrade and downgrade for this migration"""
# migrate all the way to the target revision
alembic_runner.migrate_up_to('89637f546129')

# try downgrading
alembic_runner.migrate_down_one()

# seed db with data that has broken data
setup_bad_data_db(conda_store)

# try upgrading once more
alembic_runner.migrate_up_one()

# ensure all packages builds have the right package associated
with conda_store.session_factory() as db:
build = db.query(orm.CondaPackageBuild).filter(
orm.CondaPackageBuild.id == 1
).first()
assert build.package_id == 2

build = db.query(orm.CondaPackageBuild).filter(
orm.CondaPackageBuild.id == 2
).first()
assert build.package_id == 1

build = db.query(orm.CondaPackageBuild).filter(
orm.CondaPackageBuild.id == 3
).first()
assert build.package_id == 1

build = db.query(orm.CondaPackageBuild).filter(
orm.CondaPackageBuild.id == 4
).first()
assert build.package_id == 2

build = db.query(orm.CondaPackageBuild).filter(
orm.CondaPackageBuild.id == 5
).first()
assert build.package_id == 2
13 changes: 13 additions & 0 deletions conda-store-server/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import yaml
from fastapi.testclient import TestClient
from sqlalchemy.orm import Session
from pytest_alembic.config import Config

from conda_store_server import api, app, storage

Expand Down Expand Up @@ -290,6 +291,18 @@ def plugin_manager():
return pm


@pytest.fixture
def alembic_config(conda_store):
from conda_store_server._internal.dbutil import ALEMBIC_DIR, write_alembic_ini
ini_file = pathlib.Path(__file__).parent / "alembic.ini"
write_alembic_ini(ini_file, conda_store.database_url)
return {"file": ini_file}

@pytest.fixture
def alembic_engine(db):
return db


def _seed_conda_store(
db: Session,
conda_store,
Expand Down

0 comments on commit ebe9c21

Please sign in to comment.