Skip to content

Commit

Permalink
Setup postgres and alembic (#1287)
Browse files Browse the repository at this point in the history
* Setup postgres and alembic

* Run migrations for e2e tests

Co-authored-by: Roman Skurikhin <[email protected]>
  • Loading branch information
romasku and Roman Skurikhin authored Aug 18, 2020
1 parent fdf3851 commit bc546ca
Show file tree
Hide file tree
Showing 16 changed files with 463 additions and 0 deletions.
82 changes: 82 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = alembic

# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
82 changes: 82 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import sys

from alembic import context
from platform_logging import init_logging
from sqlalchemy import engine_from_config, pool

from platform_api.config_factory import EnvironConfigFactory


# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if sys.argv[0].endswith("alembic"):
init_logging()

# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
if not config.get_main_option("sqlalchemy.url"):
db_config = EnvironConfigFactory().create_postgres()
config.set_main_option("sqlalchemy.url", db_config.postgres_dsn)

connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)

with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
Empty file added alembic/versions/.gitkeep
Empty file.
17 changes: 17 additions & 0 deletions platform_api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pathlib import Path
from typing import Optional, Sequence

from alembic.config import Config as AlembicConfig
from yarl import URL

from .redis import RedisConfig
Expand Down Expand Up @@ -57,6 +58,21 @@ class DatabaseConfig:
redis: Optional[RedisConfig] = None


@dataclass(frozen=True)
class PostgresConfig:
postgres_dsn: str

alembic: AlembicConfig

# based on defaults
# https://magicstack.github.io/asyncpg/current/api/index.html#asyncpg.connection.connect
pool_min_size: int = 10
pool_max_size: int = 10

connect_timeout_s: float = 60.0
command_timeout_s: Optional[float] = 60.0


@dataclass(frozen=True)
class JobsConfig:
deletion_delay_s: int = 0
Expand Down Expand Up @@ -93,6 +109,7 @@ class Config:
server: ServerConfig

database: DatabaseConfig
postgres: PostgresConfig
auth: AuthConfig
zipkin: ZipkinConfig
notifications: NotificationsConfig
Expand Down
44 changes: 44 additions & 0 deletions platform_api/config_factory.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import os
import pathlib
from pathlib import Path, PurePath
from typing import Any, Dict, List, Optional, Sequence

from alembic.config import Config as AlembicConfig
from yarl import URL

from .cluster_config import (
Expand All @@ -21,6 +23,7 @@
NotificationsConfig,
OAuthConfig,
PlatformConfig,
PostgresConfig,
ServerConfig,
SSHAuthConfig,
ZipkinConfig,
Expand Down Expand Up @@ -50,6 +53,7 @@ def create(self) -> Config:
return Config(
server=self.create_server(),
database=self.create_database(),
postgres=self.create_postgres(),
auth=auth,
zipkin=self.create_zipkin(),
oauth=self.try_create_oauth(),
Expand Down Expand Up @@ -300,3 +304,43 @@ def create_cors(self) -> CORSConfig:
if origins_str:
origins = origins_str.split(",")
return CORSConfig(allowed_origins=origins)

def create_postgres(self) -> PostgresConfig:
try:
postgres_dsn = self._environ["NP_ADMIN_POSTGRES_DSN"]
except KeyError:
# Temporary fix until postgres deployment is set
postgres_dsn = ""
pool_min_size = int(
self._environ.get("NP_DB_POSTGRES_POOL_MIN", PostgresConfig.pool_min_size)
)
pool_max_size = int(
self._environ.get("NP_DB_POSTGRES_POOL_MAX", PostgresConfig.pool_max_size)
)
connect_timeout_s = float(
self._environ.get(
"NP_DB_POSTGRES_CONNECT_TIMEOUT", PostgresConfig.connect_timeout_s
)
)
command_timeout_s = PostgresConfig.command_timeout_s
if self._environ.get("NP_ADMIN_POSTGRES_COMMAND_TIMEOUT"):
command_timeout_s = float(
self._environ["NP_ADMIN_POSTGRES_COMMAND_TIMEOUT"]
)
return PostgresConfig(
postgres_dsn=postgres_dsn,
alembic=self.create_alembic(postgres_dsn),
pool_min_size=pool_min_size,
pool_max_size=pool_max_size,
connect_timeout_s=connect_timeout_s,
command_timeout_s=command_timeout_s,
)

def create_alembic(self, postgres_dsn: str) -> AlembicConfig:
parent_path = pathlib.Path(__file__).resolve().parent.parent
ini_path = str(parent_path / "alembic.ini")
script_path = str(parent_path / "alembic")
config = AlembicConfig(ini_path)
config.set_main_option("script_location", script_path)
config.set_main_option("sqlalchemy.url", postgres_dsn)
return config
38 changes: 38 additions & 0 deletions platform_api/postgres.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import asyncio
from contextlib import asynccontextmanager

import alembic
from asyncpg import create_pool
from asyncpg.pool import Pool

from .config import PostgresConfig


@asynccontextmanager
async def create_postgres_pool(db_config: PostgresConfig) -> Pool:
async with create_pool(
dsn=db_config.postgres_dsn,
min_size=db_config.pool_min_size,
max_size=db_config.pool_max_size,
timeout=db_config.connect_timeout_s,
command_timeout=db_config.command_timeout_s,
) as pool:
yield pool


class MigrationRunner:
def __init__(self, db_config: PostgresConfig) -> None:
self._db_config = db_config
self._loop = asyncio.get_event_loop()

def _upgrade(self) -> None:
alembic.command.upgrade(self._db_config.alembic, "head")

async def upgrade(self) -> None:
await self._loop.run_in_executor(None, self._upgrade)

def _downgrade(self) -> None:
alembic.command.downgrade(self._db_config.alembic, "base")

async def downgrade(self) -> None:
await self._loop.run_in_executor(None, self._downgrade)
2 changes: 2 additions & 0 deletions run_e2e_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,12 @@ kubectl config use-context minikube
kubectl delete -f deploy/platformapi/templates/rb.default.gke.yml
kubectl delete -f tests/k8s/platformapi.yml
kubectl delete -f tests/k8s/platformconfig.yml
kubectl delete -f tests/k8s/platformapi_migraions.yml

kubectl create -f deploy/platformapi/templates/rb.default.gke.yml
kubectl create -f tests/k8s/platformconfig.yml
kubectl create -f tests/k8s/platformapi.yml
kubectl create -f tests/k8s/platformapi_migraions.yml

# wait for containers to start

Expand Down
9 changes: 9 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,12 @@ ignore_missing_imports = true

[mypy-aiohttp_cors]
ignore_missing_imports = true

[mypy-asyncpg.*]
ignore_missing_imports = true

[mypy-asyncpgsa]
ignore_missing_imports = true

[mypy-alembic.*]
ignore_missing_imports = true
5 changes: 5 additions & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@
"multidict==4.7.6",
"aiohttp-cors==0.7.0",
"aiozipkin==0.7.0",
"asyncpg==0.21.0",
"sqlalchemy==1.3.18",
"asyncpgsa==0.26.3",
"alembic==1.4.2",
"psycopg2-binary==2.8.5",
)

setup(
Expand Down
Loading

0 comments on commit bc546ca

Please sign in to comment.