diff --git a/CHANGELOG.md b/CHANGELOG.md index 26bb20a7c..74f21c916 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ Dependencies are updated to the latest available version during each release. Th Find changes for the upcoming release in the project's [changelog.d directory](https://github.com/lsst-sqre/gafaelfawr/tree/main/changelog.d/). +Gafaelfawr does not support direct upgrades from versions older than 10.0.0. When upgrading from an older version, first upgrade to a version of Gafaelfawr between 10.0.0 and 12.1.0, inclusive, and complete the schema migration. Then you can safely upgrade to the latest version. + diff --git a/alembic/env.py b/alembic/env.py index dfe9fafe2..7fa63b718 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -5,7 +5,7 @@ from safir.logging import configure_alembic_logging from gafaelfawr.dependencies.config import config_dependency -from gafaelfawr.schema import Base +from gafaelfawr.schema import SchemaBase # Load the Gafaelfawr configuration, which as a side effect also configures # logging using structlog. @@ -14,10 +14,10 @@ # Run the migrations. configure_alembic_logging() if context.is_offline_mode(): - run_migrations_offline(Base.metadata, config.database_url) + run_migrations_offline(SchemaBase.metadata, config.database_url) else: run_migrations_online( - Base.metadata, + SchemaBase.metadata, config.database_url, config.database_password, ) diff --git a/alembic/versions/20240209_2309_5c28ed7092c2_initial_schema.py b/alembic/versions/20240209_2309_5c28ed7092c2_initial_schema.py new file mode 100644 index 000000000..f0a4d7490 --- /dev/null +++ b/alembic/versions/20240209_2309_5c28ed7092c2_initial_schema.py @@ -0,0 +1,231 @@ +"""Initial schema. + +Revision ID: 5c28ed7092c2 +Revises: +Create Date: 2024-11-19 22:40:16.309715+00:00 +""" + +from collections.abc import Sequence + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision: str = "5c28ed7092c2" +down_revision: str | None = None +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "admin", + sa.Column("username", sa.String(length=64), nullable=False), + sa.PrimaryKeyConstraint("username"), + ) + op.create_table( + "admin_history", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=64), nullable=False), + sa.Column( + "action", + sa.Enum("add", "remove", name="adminchange"), + nullable=False, + ), + sa.Column("actor", sa.String(length=64), nullable=False), + sa.Column( + "ip_address", + sa.String(length=64).with_variant(postgresql.INET(), "postgresql"), + nullable=False, + ), + sa.Column("event_time", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "admin_history_by_time", + "admin_history", + ["event_time", "id"], + unique=False, + ) + op.create_table( + "token", + sa.Column( + "token", sa.String(length=64, collation="C"), nullable=False + ), + sa.Column("username", sa.String(length=64), nullable=False), + sa.Column( + "token_type", + sa.Enum( + "session", + "user", + "notebook", + "internal", + "service", + name="tokentype", + ), + nullable=False, + ), + sa.Column("token_name", sa.String(length=64), nullable=True), + sa.Column("scopes", sa.String(length=512), nullable=False), + sa.Column("service", sa.String(length=64), nullable=True), + sa.Column("created", sa.DateTime(), nullable=False), + sa.Column("last_used", sa.DateTime(), nullable=True), + sa.Column("expires", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("token"), + sa.UniqueConstraint("username", "token_name"), + ) + op.create_index( + "token_by_username", "token", ["username", "token_type"], unique=False + ) + op.create_table( + "token_auth_history", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("token", sa.String(length=64), nullable=False), + sa.Column("username", sa.String(length=64), nullable=False), + sa.Column( + "token_type", + sa.Enum( + "session", + "user", + "notebook", + "internal", + "service", + name="tokentype", + ), + nullable=False, + ), + sa.Column("token_name", sa.String(length=64), nullable=True), + sa.Column("parent", sa.String(length=64), nullable=True), + sa.Column("scopes", sa.String(length=512), nullable=True), + sa.Column("service", sa.String(length=64), nullable=True), + sa.Column( + "ip_address", + sa.String(length=64).with_variant(postgresql.INET(), "postgresql"), + nullable=True, + ), + sa.Column("event_time", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "token_auth_history_by_time", + "token_auth_history", + ["event_time", "id"], + unique=False, + ) + op.create_index( + "token_auth_history_by_token", + "token_auth_history", + ["token", "event_time", "id"], + unique=False, + ) + op.create_index( + "token_auth_history_by_username", + "token_auth_history", + ["username", "event_time", "id"], + unique=False, + ) + op.create_table( + "token_change_history", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("token", sa.String(length=64), nullable=False), + sa.Column("username", sa.String(length=64), nullable=False), + sa.Column( + "token_type", + sa.Enum( + "session", + "user", + "notebook", + "internal", + "service", + name="tokentype", + ), + nullable=False, + ), + sa.Column("token_name", sa.String(length=64), nullable=True), + sa.Column("parent", sa.String(length=64), nullable=True), + sa.Column("scopes", sa.String(length=512), nullable=False), + sa.Column("service", sa.String(length=64), nullable=True), + sa.Column("expires", sa.DateTime(), nullable=True), + sa.Column("actor", sa.String(length=64), nullable=True), + sa.Column( + "action", + sa.Enum("create", "revoke", "expire", "edit", name="tokenchange"), + nullable=False, + ), + sa.Column("old_token_name", sa.String(length=64), nullable=True), + sa.Column("old_scopes", sa.String(length=512), nullable=True), + sa.Column("old_expires", sa.DateTime(), nullable=True), + sa.Column( + "ip_address", + sa.String(length=64).with_variant(postgresql.INET(), "postgresql"), + nullable=True, + ), + sa.Column("event_time", sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + "token_change_history_by_time", + "token_change_history", + ["event_time", "id"], + unique=False, + ) + op.create_index( + "token_change_history_by_token", + "token_change_history", + ["token", "event_time", "id"], + unique=False, + ) + op.create_index( + "token_change_history_by_username", + "token_change_history", + ["username", "event_time", "id"], + unique=False, + ) + op.create_table( + "subtoken", + sa.Column("child", sa.String(length=64), nullable=False), + sa.Column("parent", sa.String(length=64), nullable=True), + sa.ForeignKeyConstraint( + ["child"], ["token.token"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["parent"], ["token.token"], ondelete="SET NULL" + ), + sa.PrimaryKeyConstraint("child"), + ) + op.create_index("subtoken_by_parent", "subtoken", ["parent"], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("subtoken_by_parent", table_name="subtoken") + op.drop_table("subtoken") + op.drop_index( + "token_change_history_by_username", table_name="token_change_history" + ) + op.drop_index( + "token_change_history_by_token", table_name="token_change_history" + ) + op.drop_index( + "token_change_history_by_time", table_name="token_change_history" + ) + op.drop_table("token_change_history") + op.drop_index( + "token_auth_history_by_username", table_name="token_auth_history" + ) + op.drop_index( + "token_auth_history_by_token", table_name="token_auth_history" + ) + op.drop_index( + "token_auth_history_by_time", table_name="token_auth_history" + ) + op.drop_table("token_auth_history") + op.drop_index("token_by_username", table_name="token") + op.drop_table("token") + op.drop_index("admin_history_by_time", table_name="admin_history") + op.drop_table("admin_history") + op.drop_table("admin") + # ### end Alembic commands ### diff --git a/alembic/versions/20240209_2310_2feb306dd1ee_add_oidc_token_type.py b/alembic/versions/20240209_2310_2feb306dd1ee_add_oidc_token_type.py index 72c5cd1b3..e601c152b 100644 --- a/alembic/versions/20240209_2310_2feb306dd1ee_add_oidc_token_type.py +++ b/alembic/versions/20240209_2310_2feb306dd1ee_add_oidc_token_type.py @@ -1,7 +1,7 @@ """Add oidc token type. Revision ID: 2feb306dd1ee -Revises: d2a7f04de565 +Revises: 5c28ed7092c2 Create Date: 2024-02-09 23:10:43.229238+00:00 """ @@ -11,7 +11,7 @@ # revision identifiers, used by Alembic. revision: str = "2feb306dd1ee" -down_revision: str | None = None +down_revision: str | None = "5c28ed7092c2" branch_labels: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None diff --git a/changelog.d/20241119_153825_rra_DM_47646.md b/changelog.d/20241119_153825_rra_DM_47646.md new file mode 100644 index 000000000..be969211c --- /dev/null +++ b/changelog.d/20241119_153825_rra_DM_47646.md @@ -0,0 +1,3 @@ +### Other changes + +- Gafaelfawr no longer supports direct upgrades from versions older than 10.0.0. When upgrading from an older version, upgrade to 12.1.0 or earlier first and complete the database schema migration, and then upgrade to the latest version. diff --git a/docs/dev/development.rst b/docs/dev/development.rst index f159be3a7..8acf274c1 100644 --- a/docs/dev/development.rst +++ b/docs/dev/development.rst @@ -229,69 +229,7 @@ Gafaelfawr uses Alembic_ to manage and perform database migrations. Alembic is invoked automatically when the Gafaelfawr server is started. Whenever the database schema changes, you will need to create an Alembic migration. -To do this, take the following steps. -You must have Docker running locally on your system and have the :command:`docker-compose` command installed. - -#. Start a PostgreSQL server into which the current database schema can be created. - - .. prompt:: bash - - docker-compose -f alembic/docker-compose.yaml up - -#. Install the *current* database schema into that PostgreSQL server. - This must be done with a Gafaelfawr working tree that does not contain any changes to the database schema. - If you have already made changes that would change the database schema, use :command:`git stash`, switch to another branch, or otherwise temporarily revert those changes before running this command. - - .. prompt:: bash - - tox run -e gafaelfawr -- init - -#. Apply the code changes that will change the database schema. - -#. Ask Alembic to autogenerate a database migration to the new schema. - - .. prompt:: bash - - tox run -e alembic -- revision --autogenerate -m "" - - Replace ```` with a short human-readable summary of the change, ending in a period. - This will create a new file in :file:`alembic/versions`. - -#. Edit the created file in :file:`alembic/versions` and adjust it as necessary. - See the `Alembic documentation `__ for details about what Alembic can and cannot autodetect. - - One common change that Alembic cannot autodetect is changes to the valid values of enum types. - You will need to add Alembic code to the ``upgrade`` function of the migration such as: - - .. code-block:: python - - op.execute("ALTER TYPE tokentype ADD VALUE 'oidc' IF NOT EXISTS") - - You may want to connect to the PostgreSQL database with the :command:`psql` command-line tool so that you can examine the schema to understand what the migration needs to do. - For example, you can see a description of a table with :samp:`\d {table}`, which will tell you the name of an enum type that you may need to modify. - To do this, run: - - .. prompt:: bash - - psql - - where ```` is the URI to the local PostgreSQL database, which you can find in the ``databaseUrl`` configuration parameter in :file:`alembic/gafaelfawr.yaml`. - -#. Stop the running PostgreSQL container. - - .. prompt:: bash - - docker-compose -f alembic/docker-compose.yaml down - -#. Generate and save the new schema: - - .. prompt:: bash - - tox run -e gafaelfawr -- generate-schema -o tests/data/schemas/ - - Replace ```` with the version of the Gafaelfawr release that will contain this schema version. - Then update the version in :file:`tests/support/constants.py` to match that new schema version. - This will update the test that ensures that there are no changes to the Gafaelfawr schema definition that would affect the SQL schema. +To do this, follow the `Safir schema migration documentation `__. Building documentation ====================== diff --git a/src/gafaelfawr/cli.py b/src/gafaelfawr/cli.py index d5c235d0b..86fcb84c0 100644 --- a/src/gafaelfawr/cli.py +++ b/src/gafaelfawr/cli.py @@ -33,7 +33,7 @@ from .keypair import RSAKeyPair from .main import create_openapi from .models.token import Token -from .schema import Base +from .schema import SchemaBase __all__ = [ "audit", @@ -134,7 +134,7 @@ async def delete_all_data(*, config_path: Path | None) -> None: engine = create_database_engine( config.database_url, config.database_password ) - tables = (t.name for t in Base.metadata.sorted_tables) + tables = (t.name for t in SchemaBase.metadata.sorted_tables) async with Factory.standalone(config, engine) as factory: admin_service = factory.create_admin_service() async with factory.session.begin(): diff --git a/src/gafaelfawr/database.py b/src/gafaelfawr/database.py index 76cf27af2..a8a1f01a0 100644 --- a/src/gafaelfawr/database.py +++ b/src/gafaelfawr/database.py @@ -13,7 +13,7 @@ from .config import Config from .factory import Factory -from .schema import Base, Token +from .schema import SchemaBase, Token __all__ = [ "initialize_gafaelfawr_database", @@ -36,12 +36,16 @@ def dump(sql: Any, *args: Any, **kwargs: Any) -> None: result += str(sql.compile(dialect=engine.dialect)) + ";\n" engine = create_mock_engine(str(config.database_url), dump) - Base.metadata.create_all(engine, checkfirst=False) + SchemaBase.metadata.create_all(engine, checkfirst=False) return result async def initialize_gafaelfawr_database( - config: Config, logger: BoundLogger, engine: AsyncEngine | None = None + config: Config, + logger: BoundLogger, + engine: AsyncEngine | None = None, + *, + reset: bool = False, ) -> None: """Initialize the database. @@ -59,12 +63,18 @@ async def initialize_gafaelfawr_database( engine If given, database engine to use, which avoids the need to create another one. + reset + Whether to reset the database. """ + engine_created = False if not engine: engine = create_database_engine( config.database_url, config.database_password ) - await initialize_database(engine, logger, schema=Base.metadata) + engine_created = True + await initialize_database( + engine, logger, schema=SchemaBase.metadata, reset=reset + ) async with Factory.standalone(config, engine) as factory: admin_service = factory.create_admin_service() logger.debug("Adding initial administrators") @@ -74,7 +84,8 @@ async def initialize_gafaelfawr_database( firestore = factory.create_firestore_storage() logger.debug("Initializing Firestore") await firestore.initialize() - await engine.dispose() + if engine_created: + await engine.dispose() async def is_database_initialized( @@ -99,10 +110,12 @@ async def is_database_initialized( exist, `False` otherwise. This may misdetect partial schemas that contain some tables and not others or that are missing indices. """ + engine_created = False if not engine: engine = create_database_engine( config.database_url, config.database_password ) + engine_created = True statement = select(Token).limit(1) try: for _ in range(5): @@ -124,3 +137,6 @@ async def is_database_initialized( except ProgrammingError: logger.info("Database appears not to be initialized") return False + finally: + if engine_created: + await engine.dispose() diff --git a/src/gafaelfawr/schema/__init__.py b/src/gafaelfawr/schema/__init__.py index 0d364a0f0..222b14eee 100644 --- a/src/gafaelfawr/schema/__init__.py +++ b/src/gafaelfawr/schema/__init__.py @@ -4,7 +4,7 @@ from .admin import Admin from .admin_history import AdminHistory -from .base import Base +from .base import SchemaBase from .subtoken import Subtoken from .token import Token from .token_auth_history import TokenAuthHistory @@ -13,7 +13,7 @@ __all__ = [ "Admin", "AdminHistory", - "Base", + "SchemaBase", "Subtoken", "Token", "TokenAuthHistory", diff --git a/src/gafaelfawr/schema/admin.py b/src/gafaelfawr/schema/admin.py index 97b981969..a2aefb7c9 100644 --- a/src/gafaelfawr/schema/admin.py +++ b/src/gafaelfawr/schema/admin.py @@ -9,12 +9,12 @@ from sqlalchemy import String from sqlalchemy.orm import Mapped, mapped_column -from .base import Base +from .base import SchemaBase __all__ = ["Admin"] -class Admin(Base): +class Admin(SchemaBase): """List of users with admin privileges.""" __tablename__ = "admin" diff --git a/src/gafaelfawr/schema/admin_history.py b/src/gafaelfawr/schema/admin_history.py index cd939f012..b364d328e 100644 --- a/src/gafaelfawr/schema/admin_history.py +++ b/src/gafaelfawr/schema/admin_history.py @@ -13,12 +13,12 @@ from sqlalchemy.orm import Mapped, mapped_column from ..models.history import AdminChange -from .base import Base +from .base import SchemaBase __all__ = ["AdminHistory"] -class AdminHistory(Base): +class AdminHistory(SchemaBase): """History of changes to the list of admins.""" __tablename__ = "admin_history" diff --git a/src/gafaelfawr/schema/base.py b/src/gafaelfawr/schema/base.py index 54adf9e5f..fd8d83a69 100644 --- a/src/gafaelfawr/schema/base.py +++ b/src/gafaelfawr/schema/base.py @@ -4,8 +4,8 @@ from sqlalchemy.orm import DeclarativeBase -__all__ = ["Base"] +__all__ = ["SchemaBase"] -class Base(DeclarativeBase): +class SchemaBase(DeclarativeBase): """Declarative base for the Gafaelfawr database schema.""" diff --git a/src/gafaelfawr/schema/subtoken.py b/src/gafaelfawr/schema/subtoken.py index 24ee49d77..d1dcf7c44 100644 --- a/src/gafaelfawr/schema/subtoken.py +++ b/src/gafaelfawr/schema/subtoken.py @@ -5,12 +5,12 @@ from sqlalchemy import ForeignKey, Index, String from sqlalchemy.orm import Mapped, mapped_column -from .base import Base +from .base import SchemaBase __all__ = ["Subtoken"] -class Subtoken(Base): +class Subtoken(SchemaBase): """Records parent/child relationships for tokens.""" __tablename__ = "subtoken" diff --git a/src/gafaelfawr/schema/token.py b/src/gafaelfawr/schema/token.py index d47590d7f..657848546 100644 --- a/src/gafaelfawr/schema/token.py +++ b/src/gafaelfawr/schema/token.py @@ -8,12 +8,12 @@ from sqlalchemy.orm import Mapped, mapped_column from ..models.token import TokenType -from .base import Base +from .base import SchemaBase __all__ = ["Token"] -class Token(Base): +class Token(SchemaBase): """Metadata for a token.""" __tablename__ = "token" diff --git a/src/gafaelfawr/schema/token_auth_history.py b/src/gafaelfawr/schema/token_auth_history.py index e6f82f542..c5e16165b 100644 --- a/src/gafaelfawr/schema/token_auth_history.py +++ b/src/gafaelfawr/schema/token_auth_history.py @@ -9,12 +9,12 @@ from sqlalchemy.orm import Mapped, mapped_column from ..models.token import TokenType -from .base import Base +from .base import SchemaBase __all__ = ["TokenAuthHistory"] -class TokenAuthHistory(Base): +class TokenAuthHistory(SchemaBase): """Authentication history by token.""" __tablename__ = "token_auth_history" diff --git a/src/gafaelfawr/schema/token_change_history.py b/src/gafaelfawr/schema/token_change_history.py index 0b52acb34..f3844b8bd 100644 --- a/src/gafaelfawr/schema/token_change_history.py +++ b/src/gafaelfawr/schema/token_change_history.py @@ -10,12 +10,12 @@ from ..models.history import TokenChange from ..models.token import TokenType -from .base import Base +from .base import SchemaBase __all__ = ["TokenChangeHistory"] -class TokenChangeHistory(Base): +class TokenChangeHistory(SchemaBase): """History of changes to tokens.""" __tablename__ = "token_change_history" diff --git a/tests/cli_test.py b/tests/cli_test.py index 6d2d3d48f..3c15f5ca9 100644 --- a/tests/cli_test.py +++ b/tests/cli_test.py @@ -10,6 +10,8 @@ import asyncio import json +import os +import subprocess from datetime import timedelta from pathlib import Path @@ -32,7 +34,7 @@ from gafaelfawr.models.history import TokenChange, TokenChangeHistoryEntry from gafaelfawr.models.oidc import OIDCAuthorizationCode, OIDCScope from gafaelfawr.models.token import Token, TokenData, TokenType, TokenUserInfo -from gafaelfawr.schema import Base +from gafaelfawr.schema import SchemaBase from gafaelfawr.storage.history import TokenChangeHistoryStore from gafaelfawr.storage.token import TokenDatabaseStore @@ -120,7 +122,7 @@ def test_delete_all_data( logger = structlog.get_logger("gafaelfawr") async def setup() -> OIDCAuthorizationCode: - await initialize_database(engine, logger, schema=Base.metadata) + await initialize_database(engine, logger, schema=SchemaBase.metadata) async with Factory.standalone(config, engine) as factory: token_service = factory.create_token_service() user_info = TokenUserInfo(username="some-user") @@ -368,8 +370,14 @@ def test_validate_schema( assert "Database has not been initialized" in result.output # Initialize the database from an old schema. This was the database schema - # before Alembic was introduced, so it should run all migrations. + # before Alembic was introduced, so it has to be stamped with the version + # of the Alembic database migration that includes the original schema. event_loop.run_until_complete(create_old_database(config, engine, "9.6.1")) + env = { + **os.environ, + "GAFAELFAWR_CONFIG_PATH": str(config_dependency.config_path), + } + subprocess.run(["alembic", "stamp", "5c28ed7092c2"], check=True, env=env) # Validating should fail with an appropriate error message. result = runner.invoke(main, ["validate-schema"], catch_exceptions=False) diff --git a/tests/conftest.py b/tests/conftest.py index 241209542..cb7edbddb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,23 +14,19 @@ from cryptography.fernet import Fernet from fastapi import FastAPI from httpx import ASGITransport, AsyncClient -from safir.database import ( - create_database_engine, - initialize_database, - stamp_database_async, -) +from safir.database import create_database_engine, stamp_database_async from safir.testing.slack import MockSlackWebhook, mock_slack_webhook from seleniumwire import webdriver from sqlalchemy.ext.asyncio import AsyncEngine from gafaelfawr.config import Config from gafaelfawr.constants import COOKIE_NAME +from gafaelfawr.database import initialize_gafaelfawr_database from gafaelfawr.factory import Factory from gafaelfawr.keypair import RSAKeyPair from gafaelfawr.main import create_app from gafaelfawr.models.state import State from gafaelfawr.models.token import Token, TokenType -from gafaelfawr.schema import Base from .pages.tokens import TokensPage from .support.config import config_path, configure @@ -135,11 +131,8 @@ async def empty_database(engine: AsyncEngine, config: Config) -> None: required. """ logger = structlog.get_logger(__name__) - await initialize_database(engine, logger, schema=Base.metadata, reset=True) + await initialize_gafaelfawr_database(config, logger, engine, reset=True) async with Factory.standalone(config, engine) as factory: - admin_service = factory.create_admin_service() - async with factory.session.begin(): - await admin_service.add_initial_admins(config.initial_admins) await factory._context.redis.flushdb() await stamp_database_async(engine) diff --git a/tests/data/schemas/10.0.0 b/tests/data/schemas/10.0.0 deleted file mode 100644 index d2906d395..000000000 --- a/tests/data/schemas/10.0.0 +++ /dev/null @@ -1,95 +0,0 @@ -CREATE TYPE adminchange AS ENUM ('add', 'remove'); -CREATE TYPE tokentype AS ENUM ('session', 'user', 'notebook', 'internal', 'service', 'oidc'); -CREATE TYPE tokenchange AS ENUM ('create', 'revoke', 'expire', 'edit'); - -CREATE TABLE admin ( - username VARCHAR(64) NOT NULL, - PRIMARY KEY (username) -) - -; - -CREATE TABLE admin_history ( - id SERIAL NOT NULL, - username VARCHAR(64) NOT NULL, - action adminchange NOT NULL, - actor VARCHAR(64) NOT NULL, - ip_address INET NOT NULL, - event_time TIMESTAMP WITHOUT TIME ZONE NOT NULL, - PRIMARY KEY (id) -) - -; -CREATE INDEX admin_history_by_time ON admin_history (event_time, id); - -CREATE TABLE token ( - token VARCHAR(64) COLLATE "C" NOT NULL, - username VARCHAR(64) NOT NULL, - token_type tokentype NOT NULL, - token_name VARCHAR(64), - scopes VARCHAR(512) NOT NULL, - service VARCHAR(64), - created TIMESTAMP WITHOUT TIME ZONE NOT NULL, - last_used TIMESTAMP WITHOUT TIME ZONE, - expires TIMESTAMP WITHOUT TIME ZONE, - PRIMARY KEY (token), - UNIQUE (username, token_name) -) - -; -CREATE INDEX token_by_username ON token (username, token_type); - -CREATE TABLE token_auth_history ( - id SERIAL NOT NULL, - token VARCHAR(64) NOT NULL, - username VARCHAR(64) NOT NULL, - token_type tokentype NOT NULL, - token_name VARCHAR(64), - parent VARCHAR(64), - scopes VARCHAR(512), - service VARCHAR(64), - ip_address INET, - event_time TIMESTAMP WITHOUT TIME ZONE NOT NULL, - PRIMARY KEY (id) -) - -; -CREATE INDEX token_auth_history_by_username ON token_auth_history (username, event_time, id); -CREATE INDEX token_auth_history_by_time ON token_auth_history (event_time, id); -CREATE INDEX token_auth_history_by_token ON token_auth_history (token, event_time, id); - -CREATE TABLE token_change_history ( - id SERIAL NOT NULL, - token VARCHAR(64) NOT NULL, - username VARCHAR(64) NOT NULL, - token_type tokentype NOT NULL, - token_name VARCHAR(64), - parent VARCHAR(64), - scopes VARCHAR(512) NOT NULL, - service VARCHAR(64), - expires TIMESTAMP WITHOUT TIME ZONE, - actor VARCHAR(64), - action tokenchange NOT NULL, - old_token_name VARCHAR(64), - old_scopes VARCHAR(512), - old_expires TIMESTAMP WITHOUT TIME ZONE, - ip_address INET, - event_time TIMESTAMP WITHOUT TIME ZONE NOT NULL, - PRIMARY KEY (id) -) - -; -CREATE INDEX token_change_history_by_time ON token_change_history (event_time, id); -CREATE INDEX token_change_history_by_username ON token_change_history (username, event_time, id); -CREATE INDEX token_change_history_by_token ON token_change_history (token, event_time, id); - -CREATE TABLE subtoken ( - child VARCHAR(64) NOT NULL, - parent VARCHAR(64), - PRIMARY KEY (child), - FOREIGN KEY(child) REFERENCES token (token) ON DELETE CASCADE, - FOREIGN KEY(parent) REFERENCES token (token) ON DELETE SET NULL -) - -; -CREATE INDEX subtoken_by_parent ON subtoken (parent); diff --git a/tests/database_test.py b/tests/database_test.py deleted file mode 100644 index e315309d9..000000000 --- a/tests/database_test.py +++ /dev/null @@ -1,47 +0,0 @@ -"""Tests for the Gafaelfawr database schema.""" - -from __future__ import annotations - -import os -import subprocess - -import pytest -from alembic.config import Config as AlembicConfig -from alembic.runtime.migration import MigrationContext -from alembic.script import ScriptDirectory -from sqlalchemy import Connection -from sqlalchemy.ext.asyncio import AsyncEngine - -from gafaelfawr.config import Config -from gafaelfawr.dependencies.config import config_dependency - -from .support.constants import CURRENT_SCHEMA -from .support.database import create_old_database, drop_database - - -@pytest.mark.asyncio -async def test_schema(config: Config, engine: AsyncEngine) -> None: - """Test for any unmanaged schema changes. - - Compare the current database schema in its SQLAlchemy ORM form against a - dump of the SQL generated from the last known Alembic migration and ensure - that Alembic doesn't detect any schema changes. - """ - await drop_database(engine) - await create_old_database(config, engine, CURRENT_SCHEMA) - alembic_config = AlembicConfig("alembic.ini") - alembic_scripts = ScriptDirectory.from_config(alembic_config) - current_head = alembic_scripts.get_current_head() - assert current_head - - def set_version(connection: Connection) -> None: - context = MigrationContext.configure(connection) - context.stamp(alembic_scripts, current_head) - - async with engine.begin() as connection: - await connection.run_sync(set_version) - env = { - **os.environ, - "GAFAELFAWR_CONFIG_PATH": str(config_dependency.config_path), - } - subprocess.run(["alembic", "check"], check=True, env=env) diff --git a/tests/schema_test.py b/tests/schema_test.py new file mode 100644 index 000000000..ffa36febd --- /dev/null +++ b/tests/schema_test.py @@ -0,0 +1,25 @@ +"""Tests for the Gafaelfawr database schema.""" + +from __future__ import annotations + +import os +import subprocess + +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine + +from gafaelfawr.config import Config +from gafaelfawr.dependencies.config import config_dependency + +from .support.database import drop_database + + +@pytest.mark.asyncio +async def test_schema(config: Config, engine: AsyncEngine) -> None: + await drop_database(engine) + env = { + **os.environ, + "GAFAELFAWR_CONFIG_PATH": str(config_dependency.config_path), + } + subprocess.run(["alembic", "upgrade", "head"], check=True, env=env) + subprocess.run(["alembic", "check"], check=True, env=env) diff --git a/tests/support/constants.py b/tests/support/constants.py index b53352813..51776332d 100644 --- a/tests/support/constants.py +++ b/tests/support/constants.py @@ -3,14 +3,10 @@ from gafaelfawr.keypair import RSAKeyPair __all__ = [ - "CURRENT_SCHEMA", "TEST_KEYPAIR", "TEST_HOSTNAME", ] -CURRENT_SCHEMA = "10.0.0" -"""Most recent saved schema version for schema compatibility tests.""" - TEST_HOSTNAME = "example.com" """The hostname used in ASGI requests to the application.""" diff --git a/tests/support/database.py b/tests/support/database.py index 2e7d6aaba..31b03c5bd 100644 --- a/tests/support/database.py +++ b/tests/support/database.py @@ -10,7 +10,7 @@ from gafaelfawr.config import Config from gafaelfawr.factory import Factory -from gafaelfawr.schema import Base +from gafaelfawr.schema import SchemaBase __all__ = [ "create_old_database", @@ -58,5 +58,5 @@ async def drop_database(engine: AsyncEngine) -> None: Engine to use to issue the SQL commands. """ async with engine.begin() as conn: - await conn.run_sync(Base.metadata.drop_all) + await conn.run_sync(SchemaBase.metadata.drop_all) await unstamp_database(engine)