From bc546ca2b77e51c10bf2b152d973d6b06a87fd27 Mon Sep 17 00:00:00 2001 From: romasku Date: Tue, 18 Aug 2020 18:08:20 +0300 Subject: [PATCH] Setup postgres and alembic (#1287) * Setup postgres and alembic * Run migrations for e2e tests Co-authored-by: Roman Skurikhin --- alembic.ini | 82 +++++++++++++++++++++++ alembic/README | 1 + alembic/env.py | 82 +++++++++++++++++++++++ alembic/script.py.mako | 24 +++++++ alembic/versions/.gitkeep | 0 platform_api/config.py | 17 +++++ platform_api/config_factory.py | 44 +++++++++++++ platform_api/postgres.py | 38 +++++++++++ run_e2e_tests.sh | 2 + setup.cfg | 9 +++ setup.py | 5 ++ tests/integration/conftest.py | 4 ++ tests/integration/postgres.py | 98 ++++++++++++++++++++++++++++ tests/integration/test_postgres.py | 9 +++ tests/k8s/platformapi.yml | 31 +++++++++ tests/k8s/platformapi_migrations.yml | 17 +++++ 16 files changed, 463 insertions(+) create mode 100644 alembic.ini create mode 100644 alembic/README create mode 100644 alembic/env.py create mode 100644 alembic/script.py.mako create mode 100644 alembic/versions/.gitkeep create mode 100644 platform_api/postgres.py create mode 100644 tests/integration/postgres.py create mode 100644 tests/integration/test_postgres.py create mode 100644 tests/k8s/platformapi_migrations.yml diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 000000000..e72122405 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,82 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# timezone to use when rendering the date +# within the migration file as well as the filename. +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks=black +# black.type=console_scripts +# black.entrypoint=black +# black.options=-l 79 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/README b/alembic/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 000000000..cf179460d --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,82 @@ +import sys + +from alembic import context +from platform_logging import init_logging +from sqlalchemy import engine_from_config, pool + +from platform_api.config_factory import EnvironConfigFactory + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if sys.argv[0].endswith("alembic"): + init_logging() + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + if not config.get_main_option("sqlalchemy.url"): + db_config = EnvironConfigFactory().create_postgres() + config.set_main_option("sqlalchemy.url", db_config.postgres_dsn) + + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/.gitkeep b/alembic/versions/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/platform_api/config.py b/platform_api/config.py index b2828b76a..e0bbb2f03 100644 --- a/platform_api/config.py +++ b/platform_api/config.py @@ -3,6 +3,7 @@ from pathlib import Path from typing import Optional, Sequence +from alembic.config import Config as AlembicConfig from yarl import URL from .redis import RedisConfig @@ -57,6 +58,21 @@ class DatabaseConfig: redis: Optional[RedisConfig] = None +@dataclass(frozen=True) +class PostgresConfig: + postgres_dsn: str + + alembic: AlembicConfig + + # based on defaults + # https://magicstack.github.io/asyncpg/current/api/index.html#asyncpg.connection.connect + pool_min_size: int = 10 + pool_max_size: int = 10 + + connect_timeout_s: float = 60.0 + command_timeout_s: Optional[float] = 60.0 + + @dataclass(frozen=True) class JobsConfig: deletion_delay_s: int = 0 @@ -93,6 +109,7 @@ class Config: server: ServerConfig database: DatabaseConfig + postgres: PostgresConfig auth: AuthConfig zipkin: ZipkinConfig notifications: NotificationsConfig diff --git a/platform_api/config_factory.py b/platform_api/config_factory.py index 5091fc360..691f00f31 100644 --- a/platform_api/config_factory.py +++ b/platform_api/config_factory.py @@ -1,7 +1,9 @@ import os +import pathlib from pathlib import Path, PurePath from typing import Any, Dict, List, Optional, Sequence +from alembic.config import Config as AlembicConfig from yarl import URL from .cluster_config import ( @@ -21,6 +23,7 @@ NotificationsConfig, OAuthConfig, PlatformConfig, + PostgresConfig, ServerConfig, SSHAuthConfig, ZipkinConfig, @@ -50,6 +53,7 @@ def create(self) -> Config: return Config( server=self.create_server(), database=self.create_database(), + postgres=self.create_postgres(), auth=auth, zipkin=self.create_zipkin(), oauth=self.try_create_oauth(), @@ -300,3 +304,43 @@ def create_cors(self) -> CORSConfig: if origins_str: origins = origins_str.split(",") return CORSConfig(allowed_origins=origins) + + def create_postgres(self) -> PostgresConfig: + try: + postgres_dsn = self._environ["NP_ADMIN_POSTGRES_DSN"] + except KeyError: + # Temporary fix until postgres deployment is set + postgres_dsn = "" + pool_min_size = int( + self._environ.get("NP_DB_POSTGRES_POOL_MIN", PostgresConfig.pool_min_size) + ) + pool_max_size = int( + self._environ.get("NP_DB_POSTGRES_POOL_MAX", PostgresConfig.pool_max_size) + ) + connect_timeout_s = float( + self._environ.get( + "NP_DB_POSTGRES_CONNECT_TIMEOUT", PostgresConfig.connect_timeout_s + ) + ) + command_timeout_s = PostgresConfig.command_timeout_s + if self._environ.get("NP_ADMIN_POSTGRES_COMMAND_TIMEOUT"): + command_timeout_s = float( + self._environ["NP_ADMIN_POSTGRES_COMMAND_TIMEOUT"] + ) + return PostgresConfig( + postgres_dsn=postgres_dsn, + alembic=self.create_alembic(postgres_dsn), + pool_min_size=pool_min_size, + pool_max_size=pool_max_size, + connect_timeout_s=connect_timeout_s, + command_timeout_s=command_timeout_s, + ) + + def create_alembic(self, postgres_dsn: str) -> AlembicConfig: + parent_path = pathlib.Path(__file__).resolve().parent.parent + ini_path = str(parent_path / "alembic.ini") + script_path = str(parent_path / "alembic") + config = AlembicConfig(ini_path) + config.set_main_option("script_location", script_path) + config.set_main_option("sqlalchemy.url", postgres_dsn) + return config diff --git a/platform_api/postgres.py b/platform_api/postgres.py new file mode 100644 index 000000000..380e40148 --- /dev/null +++ b/platform_api/postgres.py @@ -0,0 +1,38 @@ +import asyncio +from contextlib import asynccontextmanager + +import alembic +from asyncpg import create_pool +from asyncpg.pool import Pool + +from .config import PostgresConfig + + +@asynccontextmanager +async def create_postgres_pool(db_config: PostgresConfig) -> Pool: + async with create_pool( + dsn=db_config.postgres_dsn, + min_size=db_config.pool_min_size, + max_size=db_config.pool_max_size, + timeout=db_config.connect_timeout_s, + command_timeout=db_config.command_timeout_s, + ) as pool: + yield pool + + +class MigrationRunner: + def __init__(self, db_config: PostgresConfig) -> None: + self._db_config = db_config + self._loop = asyncio.get_event_loop() + + def _upgrade(self) -> None: + alembic.command.upgrade(self._db_config.alembic, "head") + + async def upgrade(self) -> None: + await self._loop.run_in_executor(None, self._upgrade) + + def _downgrade(self) -> None: + alembic.command.downgrade(self._db_config.alembic, "base") + + async def downgrade(self) -> None: + await self._loop.run_in_executor(None, self._downgrade) diff --git a/run_e2e_tests.sh b/run_e2e_tests.sh index bda99920b..55e4c2fcf 100755 --- a/run_e2e_tests.sh +++ b/run_e2e_tests.sh @@ -13,10 +13,12 @@ kubectl config use-context minikube kubectl delete -f deploy/platformapi/templates/rb.default.gke.yml kubectl delete -f tests/k8s/platformapi.yml kubectl delete -f tests/k8s/platformconfig.yml +kubectl delete -f tests/k8s/platformapi_migraions.yml kubectl create -f deploy/platformapi/templates/rb.default.gke.yml kubectl create -f tests/k8s/platformconfig.yml kubectl create -f tests/k8s/platformapi.yml +kubectl create -f tests/k8s/platformapi_migraions.yml # wait for containers to start diff --git a/setup.cfg b/setup.cfg index 632932f10..b42b372e8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -103,3 +103,12 @@ ignore_missing_imports = true [mypy-aiohttp_cors] ignore_missing_imports = true + +[mypy-asyncpg.*] +ignore_missing_imports = true + +[mypy-asyncpgsa] +ignore_missing_imports = true + +[mypy-alembic.*] +ignore_missing_imports = true diff --git a/setup.py b/setup.py index 3f5dc32a7..6e3d5fb07 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,11 @@ "multidict==4.7.6", "aiohttp-cors==0.7.0", "aiozipkin==0.7.0", + "asyncpg==0.21.0", + "sqlalchemy==1.3.18", + "asyncpgsa==0.26.3", + "alembic==1.4.2", + "psycopg2-binary==2.8.5", ) setup( diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 070bee314..b0617b97e 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -30,6 +30,7 @@ JobsConfig, NotificationsConfig, OAuthConfig, + PostgresConfig, ServerConfig, ZipkinConfig, ) @@ -53,6 +54,7 @@ "tests.integration.auth", "tests.integration.secrets", "tests.integration.notifications", + "tests.integration.postgres", ] @@ -542,6 +544,7 @@ def jobs_config() -> JobsConfig: def config_factory( kube_config: KubeConfig, redis_config: RedisConfig, + postgres_config: PostgresConfig, auth_config: AuthConfig, jobs_config: JobsConfig, notifications_config: NotificationsConfig, @@ -561,6 +564,7 @@ def _factory(**kwargs: Any) -> Config: return Config( server=server_config, database=database_config, + postgres=postgres_config, auth=auth_config, jobs=jobs_config, job_policy_enforcer=job_policy_enforcer, diff --git a/tests/integration/postgres.py b/tests/integration/postgres.py new file mode 100644 index 000000000..d7ca0e67c --- /dev/null +++ b/tests/integration/postgres.py @@ -0,0 +1,98 @@ +import time +from typing import AsyncIterator + +import aiodocker +import aiodocker.containers +import asyncpg +import pytest +from asyncpg import Connection +from asyncpg.pool import Pool + +from platform_api.config import PostgresConfig +from platform_api.config_factory import EnvironConfigFactory +from platform_api.postgres import MigrationRunner, create_postgres_pool + + +@pytest.fixture(scope="session") +async def _postgres_dsn( + docker: aiodocker.Docker, reuse_docker: bool +) -> AsyncIterator[str]: + image_name = "postgres:11.3" + container_name = "postgres" + container_config = { + "Image": image_name, + "AttachStdout": False, + "AttachStderr": False, + "HostConfig": {"PublishAllPorts": True}, + } + + if reuse_docker: + try: + container = await docker.containers.get(container_name) + if container["State"]["Running"]: + postgres_dsn = await _make_postgres_dsn(container) + await _wait_for_postgres_server(postgres_dsn) + yield postgres_dsn + return + except aiodocker.exceptions.DockerError: + pass + + try: + await docker.images.inspect(image_name) + except aiodocker.exceptions.DockerError: + await docker.images.pull(image_name) + + container = await docker.containers.create_or_replace( + name=container_name, config=container_config + ) + await container.start() + + postgres_dsn = await _make_postgres_dsn(container) + await _wait_for_postgres_server(postgres_dsn) + yield postgres_dsn + + if not reuse_docker: + await container.kill() + await container.delete(force=True) + + +async def _make_postgres_dsn(container: aiodocker.containers.DockerContainer,) -> str: + host = "0.0.0.0" + port = int((await container.port(5432))[0]["HostPort"]) + return f"postgresql://postgres@{host}:{port}/postgres" + + +async def _wait_for_postgres_server( + postgres_dsn: str, attempts: int = 5, interval_s: float = 1 +) -> None: + attempt = 0 + while attempt < attempts: + try: + attempt = attempt + 1 + conn: Connection = await asyncpg.connect(postgres_dsn, timeout=5.0) + await conn.close() + return + except Exception: + pass + time.sleep(interval_s) + + +@pytest.fixture +async def postgres_config(_postgres_dsn: str) -> AsyncIterator[PostgresConfig]: + + db_config = PostgresConfig( + postgres_dsn=_postgres_dsn, + alembic=EnvironConfigFactory().create_alembic(_postgres_dsn), + ) + migration_runner = MigrationRunner(db_config) + await migration_runner.upgrade() + + yield db_config + + await migration_runner.downgrade() + + +@pytest.fixture +async def postgres_pool(postgres_config: PostgresConfig) -> AsyncIterator[Pool]: + async with create_postgres_pool(postgres_config) as pool: + yield pool diff --git a/tests/integration/test_postgres.py b/tests/integration/test_postgres.py new file mode 100644 index 000000000..c992bfc84 --- /dev/null +++ b/tests/integration/test_postgres.py @@ -0,0 +1,9 @@ +import pytest +from asyncpg.pool import Pool + + +@pytest.mark.asyncio +async def test_postgres_available(postgres_pool: Pool) -> None: + async with postgres_pool.acquire() as connection: + result = await connection.fetchrow("SELECT 2 + 2;") + assert result[0] == 4 diff --git a/tests/k8s/platformapi.yml b/tests/k8s/platformapi.yml index 665c860c9..edf7d1149 100644 --- a/tests/k8s/platformapi.yml +++ b/tests/k8s/platformapi.yml @@ -29,6 +29,35 @@ spec: --- apiVersion: apps/v1 kind: ReplicaSet +metadata: + name: platformpostgres +spec: + replicas: 1 + selector: + matchLabels: + service: platformpostgres + template: + metadata: + labels: + service: platformpostgres + spec: + containers: + - name: platformpostgres + image: postgres:11.3 +--- +apiVersion: v1 +kind: Service +metadata: + name: platformpostgres +spec: + ports: + - port: 5432 + targetPort: 5432 + selector: + service: platformpostgres +--- +apiVersion: apps/v1 +kind: ReplicaSet metadata: name: platformauthapi spec: @@ -98,6 +127,8 @@ spec: value: ssh.platform.dev.neuromation.io - name: NP_DB_REDIS_URI value: redis://platformredis:6379/0 + - name: NP_DB_POSTGRES_URI + value: postgresql://postgres@platformpostgres:5432/postgres - name: NP_OAUTH_HEADLESS_CALLBACK_URL value: https://dev.neu.ro/oauth/show-code - name: NP_AUTH_URL diff --git a/tests/k8s/platformapi_migrations.yml b/tests/k8s/platformapi_migrations.yml new file mode 100644 index 000000000..4864bae5a --- /dev/null +++ b/tests/k8s/platformapi_migrations.yml @@ -0,0 +1,17 @@ +apiVersion: batch/v1 +kind: Job +metadata: + name: platformapi-migrations +spec: + template: + spec: + restartPolicy: Never + containers: + - name: platformapi-migrations + image: platformapi:latest + imagePullPolicy: Never + command: ["alembic", "upgrade", "head"] + env: + - name: NP_DB_POSTGRES_URI + value: postgresql://postgres@platformpostgres:5432/postgres + backoffLimit: 0 \ No newline at end of file