From fd60985d9a7c9bee1de4586e4276aa3c636a1ab7 Mon Sep 17 00:00:00 2001 From: Chris Ostrouchov Date: Wed, 10 Aug 2022 17:15:39 -0400 Subject: [PATCH 1/6] Fixing local storage option to ensure working --- conda-store-server/conda_store_server/app.py | 4 + .../conda_store_server/server/app.py | 11 +++ .../conda_store_server/storage.py | 12 +-- examples/standalone/README.md | 4 + examples/standalone/conda_store_config.py | 71 ++++++++++++++++++ examples/standalone/docker-compose.yaml | 75 +++++++++++++++++++ 6 files changed, 171 insertions(+), 6 deletions(-) create mode 100644 examples/standalone/README.md create mode 100644 examples/standalone/conda_store_config.py create mode 100644 examples/standalone/docker-compose.yaml diff --git a/conda-store-server/conda_store_server/app.py b/conda-store-server/conda_store_server/app.py index a42c04238..8ee2b54b6 100644 --- a/conda-store-server/conda_store_server/app.py +++ b/conda-store-server/conda_store_server/app.py @@ -323,6 +323,10 @@ def storage(self): if hasattr(self, "_storage"): return self._storage self._storage = self.storage_class(parent=self, log=self.log) + + if isinstance(self._storage, storage.LocalStorage): + os.makedirs(self._storage.storage_path, exist_ok=True) + return self._storage @property diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index 4da98cf0e..1713b97b1 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -6,11 +6,13 @@ from fastapi import FastAPI, Request, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import RedirectResponse, JSONResponse +from fastapi.staticfiles import StaticFiles from starlette.middleware.sessions import SessionMiddleware from fastapi.templating import Jinja2Templates from traitlets import Bool, Unicode, Integer, Type, validate, Instance, default, Dict from traitlets.config import Application, catch_config_error +from conda_store_server import storage from conda_store_server.server import auth, views from conda_store_server.app import CondaStore from conda_store_server import __version__ @@ -133,6 +135,7 @@ def initialize(self, *args, **kwargs): dbutil.upgrade(self.config.CondaStore.database_url) self.authentication = self.authentication_class(parent=self, log=self.log) + # ensure checks on redis_url self.conda_store.redis_url @@ -227,6 +230,14 @@ def redirect_home(request: Request): prefix=trim_slash(self.url_prefix), ) + if isinstance(self.conda_store.storage, storage.LocalStorage): + self.conda_store.storage.storage_url = f"{trim_slash(self.url_prefix)}/storage" + app.mount( + self.conda_store.storage.storage_url, + StaticFiles(directory=self.conda_store.storage.storage_path), + name="static") + + self.conda_store.ensure_namespace() self.conda_store.ensure_conda_channels() diff --git a/conda-store-server/conda_store_server/storage.py b/conda-store-server/conda_store_server/storage.py index c81a414e0..dfb2a7d5c 100644 --- a/conda-store-server/conda_store_server/storage.py +++ b/conda-store-server/conda_store_server/storage.py @@ -199,17 +199,17 @@ class LocalStorage(Storage): ) def fset(self, db, build_id, key, filename, content_type=None, artifact_type=None): - filename = os.path.join(self.storage_path, key) - os.makedirs(os.path.dirname(filename), exist_ok=True) + destination_filename = os.path.abspath(os.path.join(self.storage_path, key)) + os.makedirs(os.path.dirname(destination_filename), exist_ok=True) - shutil.copyfile(filename, os.path.join(self.storage_path, key)) + shutil.copyfile(filename, destination_filename) super().fset(db, build_id, key, filename, artifact_type) def set(self, db, build_id, key, value, content_type=None, artifact_type=None): - filename = os.path.join(self.storage_path, key) - os.makedirs(os.path.dirname(filename), exist_ok=True) + destination_filename = os.path.join(self.storage_path, key) + os.makedirs(os.path.dirname(destination_filename), exist_ok=True) - with open(filename, "wb") as f: + with open(destination_filename, "wb") as f: f.write(value) super().set(db, build_id, key, value, artifact_type) diff --git a/examples/standalone/README.md b/examples/standalone/README.md new file mode 100644 index 000000000..0fb8ea198 --- /dev/null +++ b/examples/standalone/README.md @@ -0,0 +1,4 @@ +# Standalone + +A script for minimal configuration to run conda-store. The end goal is +that everything should run under a single container image. diff --git a/examples/standalone/conda_store_config.py b/examples/standalone/conda_store_config.py new file mode 100644 index 000000000..17e986b5e --- /dev/null +++ b/examples/standalone/conda_store_config.py @@ -0,0 +1,71 @@ +import logging + +from conda_store_server.storage import LocalStorage +from conda_store_server.server.auth import DummyAuthentication + +# ================================== +# conda-store settings +# ================================== +c.CondaStore.storage_class = LocalStorage +c.CondaStore.store_directory = "/var/lib/conda-store/" +c.CondaStore.environment_directory = "/opt/conda-store/envs/" +# c.CondaStore.database_url = "mysql+pymysql://admin:password@mysql/conda-store" +c.CondaStore.database_url = "postgresql+psycopg2://postgres:password@postgres/conda-store" +c.CondaStore.redis_url = "redis://:password@redis:6379/0" +c.CondaStore.default_uid = 1000 +c.CondaStore.default_gid = 1000 +c.CondaStore.default_permissions = "775" +c.CondaStore.conda_included_packages = [ + 'ipykernel' +] + +c.CondaStore.pypi_included_packages = [ + 'nothing' +] + + +# ================================== +# server settings +# ================================== +c.CondaStoreServer.log_level = logging.INFO +c.CondaStoreServer.enable_ui = True +c.CondaStoreServer.enable_api = True +c.CondaStoreServer.enable_registry = True +c.CondaStoreServer.enable_metrics = True +c.CondaStoreServer.address = "0.0.0.0" +c.CondaStoreServer.port = 5000 +# This MUST start with `/` +c.CondaStoreServer.url_prefix = "/conda-store" + + +# ================================== +# auth settings +# ================================== +c.CondaStoreServer.authentication_class = DummyAuthentication +c.CondaStoreServer.template_vars = { + "banner": '', + "logo": "https://quansight.com/_next/image?url=https%3A%2F%2Fa.storyblok.com%2Ff%2F147759%2F1076x520%2Fe6cd6af012%2Fquansight-logo-no-tagline.png&w=3840&q=75", +} + +# ================================== +# worker settings +# ================================== +c.CondaStoreWorker.log_level = logging.INFO +c.CondaStoreWorker.watch_paths = ["/opt/environments"] +c.CondaStoreWorker.concurrency = 4 + +# ================================== +# registry settings +# ================================== +# from python_docker.registry import Registry +# import os + +# def _configure_docker_registry(registry_url: str): +# return Registry( +# "https://registry-1.docker.io", +# username=os.environ.get('DOCKER_USERNAME'), +# password=os.environ.get('DOCKER_PASSWORD')) + +# c.ContainerRegistry.container_registries = { +# 'https://registry-1.docker.io': _configure_docker_registry +# } diff --git a/examples/standalone/docker-compose.yaml b/examples/standalone/docker-compose.yaml new file mode 100644 index 000000000..f0994b4f4 --- /dev/null +++ b/examples/standalone/docker-compose.yaml @@ -0,0 +1,75 @@ +version: "3.8" + +volumes: + conda_store_data: + +services: + initializer: + image: alpine + restart: "no" + # hack to set perimssions on volume + entrypoint: | + /bin/sh -c "chown -R 1000:1000 /var/lib/conda-store/" + volumes: + - conda_store_data:/var/lib/conda-store/ + + conda-store-worker: + build: ../../conda-store-server + user: 1000:1000 + volumes: + - conda_store_data:/var/lib/conda-store/ + - ./conda_store_config.py:/opt/conda_store/conda_store_config.py:ro + depends_on: + initializer: + condition: service_completed_successfully + conda-store-server: + condition: service_healthy + platform: linux/amd64 + command: ['conda-store-worker', '--config', '/opt/conda_store/conda_store_config.py'] + + conda-store-server: + build: ../../conda-store-server + user: 1000:1000 + depends_on: + initializer: + condition: service_completed_successfully + postgres: + condition: service_healthy + redis: + condition: service_healthy + volumes: + - conda_store_data:/var/lib/conda-store/ + - ./conda_store_config.py:/opt/conda_store/conda_store_config.py:ro + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5000/conda-store/api/v1/"] + interval: 10s + timeout: 5s + retries: 5 + platform: linux/amd64 + command: ['conda-store-server', '--config', '/opt/conda_store/conda_store_config.py'] + ports: + - "5000:5000" + + postgres: + image: postgres:13 + user: postgres + ports: + - 5432:5432 + healthcheck: + test: ["CMD-SHELL", "pg_isready"] + interval: 10s + timeout: 5s + retries: 5 + environment: + POSTGRES_PASSWORD: password + POSTGRES_DB: conda-store + + redis: + image: bitnami/redis + healthcheck: + test: ["CMD", "redis-cli","ping"] + interval: 10s + timeout: 5s + retries: 5 + environment: + REDIS_PASSWORD: password From 4c915f70422e972cae694e154f3602b6d9c0d908 Mon Sep 17 00:00:00 2001 From: Chris Ostrouchov Date: Wed, 10 Aug 2022 17:16:34 -0400 Subject: [PATCH 2/6] Black formatting --- conda-store-server/conda_store_server/server/app.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index 1713b97b1..72fa81dc3 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -231,12 +231,14 @@ def redirect_home(request: Request): ) if isinstance(self.conda_store.storage, storage.LocalStorage): - self.conda_store.storage.storage_url = f"{trim_slash(self.url_prefix)}/storage" + self.conda_store.storage.storage_url = ( + f"{trim_slash(self.url_prefix)}/storage" + ) app.mount( self.conda_store.storage.storage_url, StaticFiles(directory=self.conda_store.storage.storage_path), - name="static") - + name="static", + ) self.conda_store.ensure_namespace() self.conda_store.ensure_conda_channels() From b7aae383bc9eb95194bc8ec2d978900b7f4a1d50 Mon Sep 17 00:00:00 2001 From: Chris Ostrouchov Date: Thu, 11 Aug 2022 15:20:29 -0400 Subject: [PATCH 3/6] Alembic use bulk alter statements --- .../versions/48be4072fe58_initial_schema.py | 174 +++++++++--------- ...adding_container_registry_value_to_enum.py | 33 +++- ...d63a091aff8_add_environment_description.py | 10 +- conda-store-server/conda_store_server/orm.py | 2 +- .../conda_store_server/server/app.py | 40 +++- examples/standalone/conda_store_config.py | 2 +- examples/standalone/docker-compose.yaml | 32 +--- 7 files changed, 157 insertions(+), 136 deletions(-) diff --git a/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py b/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py index de5ec790c..262d69dc8 100644 --- a/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py +++ b/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py @@ -17,44 +17,40 @@ def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### op.create_table( "conda_channel", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.Unicode(length=255), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=False, unique=True), sa.Column("last_update", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), ) + op.create_table( "conda_store_configuration", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("disk_usage", sa.BigInteger(), nullable=True), sa.Column("free_storage", sa.BigInteger(), nullable=True), sa.Column("total_storage", sa.BigInteger(), nullable=True), - sa.PrimaryKeyConstraint("id"), ) + op.create_table( "namespace", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.Unicode(length=255), nullable=True), - sa.Column("deleted_on", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=True, unique=True), + sa.Column("deleted_on", sa.DateTime(), nullable=True) ) + op.create_table( "specification", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("name", sa.Unicode(length=255), nullable=False), sa.Column("spec", sa.JSON(), nullable=False), - sa.Column("sha256", sa.Unicode(length=255), nullable=False), + sa.Column("sha256", sa.Unicode(length=255), nullable=False, unique=True), sa.Column("created_on", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("sha256"), ) + op.create_table( "conda_package", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("channel_id", sa.Integer(), nullable=True), sa.Column("build", sa.Unicode(length=64), nullable=False), sa.Column("build_number", sa.Integer(), nullable=False), @@ -70,55 +66,30 @@ def upgrade(): sa.Column("timestamp", sa.BigInteger(), nullable=True), sa.Column("version", sa.Unicode(length=64), nullable=False), sa.Column("summary", sa.Text(), nullable=True), - sa.Column("description", sa.Text(), nullable=True), - sa.ForeignKeyConstraint( - ["channel_id"], - ["conda_channel.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint( - "channel_id", - "subdir", - "name", - "version", - "build", - "build_number", - "sha256", - name="_conda_package_uc", - ), + sa.Column("description", sa.Text(), nullable=True) ) + op.create_table( "environment", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("namespace_id", sa.Integer(), nullable=False), sa.Column("name", sa.Unicode(length=255), nullable=False), sa.Column("current_build_id", sa.Integer(), nullable=True), - sa.Column("deleted_on", sa.DateTime(), nullable=True), - # sa.ForeignKeyConstraint(['current_build_id'], ['build.id'], use_alter=True), - sa.ForeignKeyConstraint( - ["namespace_id"], - ["namespace.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("namespace_id", "name", name="_namespace_name_uc"), + sa.Column("deleted_on", sa.DateTime(), nullable=True) ) op.create_table( "solve", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("specification_id", sa.Integer(), nullable=False), sa.Column("scheduled_on", sa.DateTime(), nullable=True), sa.Column("started_on", sa.DateTime(), nullable=True), - sa.Column("ended_on", sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint( - ["specification_id"], - ["specification.id"], - ), - sa.PrimaryKeyConstraint("id"), + sa.Column("ended_on", sa.DateTime(), nullable=True) ) + op.create_table( "build", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("specification_id", sa.Integer(), nullable=False), sa.Column("environment_id", sa.Integer(), nullable=False), sa.Column( @@ -130,38 +101,18 @@ def upgrade(): sa.Column("scheduled_on", sa.DateTime(), nullable=True), sa.Column("started_on", sa.DateTime(), nullable=True), sa.Column("ended_on", sa.DateTime(), nullable=True), - sa.Column("deleted_on", sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint( - ["environment_id"], - ["environment.id"], - ), - sa.ForeignKeyConstraint( - ["specification_id"], - ["specification.id"], - ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_foreign_key( - constraint_name="fk_environment_current_build_id_build_id", - source_table="environment", - referent_table="build", - local_cols=["current_build_id"], - remote_cols=["id"], + sa.Column("deleted_on", sa.DateTime(), nullable=True) ) op.create_table( "solve_conda_package", - sa.Column("solve_id", sa.Integer(), nullable=False), - sa.Column("conda_package_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint( - ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" - ), - sa.ForeignKeyConstraint(["solve_id"], ["solve.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("solve_id", "conda_package_id"), + sa.Column("solve_id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("conda_package_id", sa.Integer(), nullable=False) ) + op.create_table( "build_artifact", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("build_id", sa.Integer(), nullable=True), sa.Column( "artifact_type", @@ -177,24 +128,77 @@ def upgrade(): ), nullable=False, ), - sa.Column("key", sa.Unicode(length=255), nullable=True), + sa.Column("key", sa.Unicode(length=255), nullable=True) + ) + + op.create_table( + "build_conda_package", + sa.Column("build_id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("conda_package_id", sa.Integer(), nullable=False) + ) + + with op.batch_alter_table("conda_package", table_args=[ + sa.ForeignKeyConstraint( + ["channel_id"], + ["conda_channel.id"], + ), + sa.UniqueConstraint( + "channel_id", + "subdir", + "name", + "version", + "build", + "build_number", + "sha256", + name="_conda_package_uc", + ), + ]) as batch_op: + pass + + with op.batch_alter_table("environment", table_args=[ + sa.ForeignKeyConstraint(['current_build_id'], ['build.id']), + sa.UniqueConstraint("namespace_id", "name", name="_namespace_name_uc"), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"]) + ]) as batch_op: + pass + + with op.batch_alter_table("solve", table_args=[ + sa.ForeignKeyConstraint( + ["specification_id"], + ["specification.id"], + ), + ]) as batch_op: + pass + + with op.batch_alter_table("build", table_args=[ + sa.ForeignKeyConstraint(["environment_id"], ["environment.id"]), + sa.ForeignKeyConstraint(["specification_id"], ["specification.id"]), + ]) as batch_op: + pass + + with op.batch_alter_table("solve_conda_package", table_args=[ + sa.ForeignKeyConstraint( + ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["solve_id"], ["solve.id"], ondelete="CASCADE"), + ]) as batch_op: + pass + + with op.batch_alter_table("build_artifact", table_args=[ sa.ForeignKeyConstraint( ["build_id"], ["build.id"], ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "build_conda_package", - sa.Column("build_id", sa.Integer(), nullable=False), - sa.Column("conda_package_id", sa.Integer(), nullable=False), + ]) as batch_op: + pass + + with op.batch_alter_table("build_conda_package", table_args=[ sa.ForeignKeyConstraint(["build_id"], ["build.id"], ondelete="CASCADE"), sa.ForeignKeyConstraint( ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" ), - sa.PrimaryKeyConstraint("build_id", "conda_package_id"), - ) - # ### end Alembic commands ### + ]) as batch_op: + pass def downgrade(): diff --git a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py index ea7ec664a..c9e6b7fd7 100644 --- a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py +++ b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py @@ -6,6 +6,7 @@ """ from alembic import op +import sqlalchemy as sa # revision identifiers, used by Alembic. @@ -16,8 +17,36 @@ def upgrade(): - with op.get_context().autocommit_block(): - op.execute("ALTER TYPE buildartifacttype ADD VALUE 'CONTAINER_REGISTRY'") + old_type = sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + name="buildartifacttype", + ) + + new_type = sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + "CONTAINER_REGISTRY", + name="buildartifacttype", + ) + + with op.batch_alter_table("build_artifact") as batch_op: + batch_op.alter_column('artifact_type', type_=new_type, existing_type=old_type) + + # context = op.get_context() + # if context.get_impl().bind.dialect.name == "postgresql": + # with context.autocommit_block(): + # op.execute("ALTER TYPE buildartifacttype ADD VALUE 'CONTAINER_REGISTRY'") def downgrade(): diff --git a/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py b/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py index f54766105..b3d8836fc 100644 --- a/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py +++ b/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py @@ -17,12 +17,10 @@ def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('environment', sa.Column('description', sa.UnicodeText(), nullable=True)) - # ### end Alembic commands ### + with op.batch_alter_table("environment") as batch_op: + batch_op.add_column(sa.Column('description', sa.UnicodeText(), nullable=True)) def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('environment', 'description') - # ### end Alembic commands ### + with op.batch_alter_table("environment") as batch_op: + batch_op.drop_column('description') diff --git a/conda-store-server/conda_store_server/orm.py b/conda-store-server/conda_store_server/orm.py index 8b126e4b9..a3868de86 100644 --- a/conda-store-server/conda_store_server/orm.py +++ b/conda-store-server/conda_store_server/orm.py @@ -270,7 +270,7 @@ class Environment(Base): name = Column(Unicode(255), nullable=False) - current_build_id = Column(Integer, ForeignKey("build.id", use_alter=True)) + current_build_id = Column(Integer, ForeignKey("build.id")) current_build = relationship( Build, foreign_keys=[current_build_id], post_update=True ) diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index 72fa81dc3..651c7402e 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -23,6 +23,7 @@ class CondaStoreServer(Application): aliases = { "config": "CondaStoreServer.config_file", + "standalone": "CondaStoreServer.standalone", } log_level = Integer( @@ -124,6 +125,12 @@ def _validate_config_file(self, proposal): 100, help="maximum number of items to return in a single page", config=True ) + standalone = Bool( + False, + help="Run application in standalone mode with workers running as subprocess", + config=True, + ) + @catch_config_error def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) @@ -248,13 +255,26 @@ def redirect_home(request: Request): from conda_store_server.worker import tasks # noqa - uvicorn.run( - app, - host=self.address, - port=self.port, - reload=False, - debug=(self.log_level == logging.DEBUG), - workers=1, - proxy_headers=self.behind_proxy, - forwarded_allow_ips=("*" if self.behind_proxy else None), - ) + # start worker if in standalone mode + if self.standalone: + import multiprocessing + multiprocessing.set_start_method('spawn') + + from conda_store_server.worker.app import CondaStoreWorker + process = multiprocessing.Process(target=CondaStoreWorker.launch_instance) + process.start() + + try: + uvicorn.run( + app, + host=self.address, + port=self.port, + reload=False, + debug=(self.log_level == logging.DEBUG), + workers=1, + proxy_headers=self.behind_proxy, + forwarded_allow_ips=("*" if self.behind_proxy else None), + ) + finally: + if self.standalone: + process.join() diff --git a/examples/standalone/conda_store_config.py b/examples/standalone/conda_store_config.py index 17e986b5e..c9fec7910 100644 --- a/examples/standalone/conda_store_config.py +++ b/examples/standalone/conda_store_config.py @@ -10,7 +10,7 @@ c.CondaStore.store_directory = "/var/lib/conda-store/" c.CondaStore.environment_directory = "/opt/conda-store/envs/" # c.CondaStore.database_url = "mysql+pymysql://admin:password@mysql/conda-store" -c.CondaStore.database_url = "postgresql+psycopg2://postgres:password@postgres/conda-store" +c.CondaStore.database_url = "sqlite:///conda-store.sqlite" c.CondaStore.redis_url = "redis://:password@redis:6379/0" c.CondaStore.default_uid = 1000 c.CondaStore.default_gid = 1000 diff --git a/examples/standalone/docker-compose.yaml b/examples/standalone/docker-compose.yaml index f0994b4f4..e2e16f8f0 100644 --- a/examples/standalone/docker-compose.yaml +++ b/examples/standalone/docker-compose.yaml @@ -13,28 +13,12 @@ services: volumes: - conda_store_data:/var/lib/conda-store/ - conda-store-worker: - build: ../../conda-store-server - user: 1000:1000 - volumes: - - conda_store_data:/var/lib/conda-store/ - - ./conda_store_config.py:/opt/conda_store/conda_store_config.py:ro - depends_on: - initializer: - condition: service_completed_successfully - conda-store-server: - condition: service_healthy - platform: linux/amd64 - command: ['conda-store-worker', '--config', '/opt/conda_store/conda_store_config.py'] - conda-store-server: build: ../../conda-store-server user: 1000:1000 depends_on: initializer: condition: service_completed_successfully - postgres: - condition: service_healthy redis: condition: service_healthy volumes: @@ -46,24 +30,10 @@ services: timeout: 5s retries: 5 platform: linux/amd64 - command: ['conda-store-server', '--config', '/opt/conda_store/conda_store_config.py'] + command: ['conda-store-server', '--config', '/opt/conda_store/conda_store_config.py', '--standalone=true'] ports: - "5000:5000" - postgres: - image: postgres:13 - user: postgres - ports: - - 5432:5432 - healthcheck: - test: ["CMD-SHELL", "pg_isready"] - interval: 10s - timeout: 5s - retries: 5 - environment: - POSTGRES_PASSWORD: password - POSTGRES_DB: conda-store - redis: image: bitnami/redis healthcheck: From e8a4b03d82d3a5a0904cbec2002e3b29152ffba8 Mon Sep 17 00:00:00 2001 From: Chris Ostrouchov Date: Thu, 11 Aug 2022 15:22:03 -0400 Subject: [PATCH 4/6] Black and Flake8 formatting --- .../versions/48be4072fe58_initial_schema.py | 125 ++++++++++-------- ...adding_container_registry_value_to_enum.py | 6 +- ...d63a091aff8_add_environment_description.py | 8 +- .../conda_store_server/server/app.py | 4 +- 4 files changed, 83 insertions(+), 60 deletions(-) diff --git a/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py b/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py index 262d69dc8..373d704ce 100644 --- a/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py +++ b/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py @@ -36,7 +36,7 @@ def upgrade(): "namespace", sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("name", sa.Unicode(length=255), nullable=True, unique=True), - sa.Column("deleted_on", sa.DateTime(), nullable=True) + sa.Column("deleted_on", sa.DateTime(), nullable=True), ) op.create_table( @@ -66,7 +66,7 @@ def upgrade(): sa.Column("timestamp", sa.BigInteger(), nullable=True), sa.Column("version", sa.Unicode(length=64), nullable=False), sa.Column("summary", sa.Text(), nullable=True), - sa.Column("description", sa.Text(), nullable=True) + sa.Column("description", sa.Text(), nullable=True), ) op.create_table( @@ -75,7 +75,7 @@ def upgrade(): sa.Column("namespace_id", sa.Integer(), nullable=False), sa.Column("name", sa.Unicode(length=255), nullable=False), sa.Column("current_build_id", sa.Integer(), nullable=True), - sa.Column("deleted_on", sa.DateTime(), nullable=True) + sa.Column("deleted_on", sa.DateTime(), nullable=True), ) op.create_table( @@ -84,7 +84,7 @@ def upgrade(): sa.Column("specification_id", sa.Integer(), nullable=False), sa.Column("scheduled_on", sa.DateTime(), nullable=True), sa.Column("started_on", sa.DateTime(), nullable=True), - sa.Column("ended_on", sa.DateTime(), nullable=True) + sa.Column("ended_on", sa.DateTime(), nullable=True), ) op.create_table( @@ -101,13 +101,13 @@ def upgrade(): sa.Column("scheduled_on", sa.DateTime(), nullable=True), sa.Column("started_on", sa.DateTime(), nullable=True), sa.Column("ended_on", sa.DateTime(), nullable=True), - sa.Column("deleted_on", sa.DateTime(), nullable=True) + sa.Column("deleted_on", sa.DateTime(), nullable=True), ) op.create_table( "solve_conda_package", sa.Column("solve_id", sa.Integer(), nullable=False, primary_key=True), - sa.Column("conda_package_id", sa.Integer(), nullable=False) + sa.Column("conda_package_id", sa.Integer(), nullable=False, primary_key=True), ) op.create_table( @@ -128,76 +128,97 @@ def upgrade(): ), nullable=False, ), - sa.Column("key", sa.Unicode(length=255), nullable=True) + sa.Column("key", sa.Unicode(length=255), nullable=True), ) op.create_table( "build_conda_package", sa.Column("build_id", sa.Integer(), nullable=False, primary_key=True), - sa.Column("conda_package_id", sa.Integer(), nullable=False) + sa.Column("conda_package_id", sa.Integer(), nullable=False, primary_key=True), ) - with op.batch_alter_table("conda_package", table_args=[ - sa.ForeignKeyConstraint( - ["channel_id"], - ["conda_channel.id"], - ), - sa.UniqueConstraint( - "channel_id", - "subdir", - "name", - "version", - "build", - "build_number", - "sha256", - name="_conda_package_uc", - ), - ]) as batch_op: + with op.batch_alter_table( + "conda_package", + table_args=[ + sa.ForeignKeyConstraint( + ["channel_id"], + ["conda_channel.id"], + ), + sa.UniqueConstraint( + "channel_id", + "subdir", + "name", + "version", + "build", + "build_number", + "sha256", + name="_conda_package_uc", + ), + ], + ): pass - with op.batch_alter_table("environment", table_args=[ - sa.ForeignKeyConstraint(['current_build_id'], ['build.id']), + with op.batch_alter_table( + "environment", + table_args=[ + sa.ForeignKeyConstraint(["current_build_id"], ["build.id"]), sa.UniqueConstraint("namespace_id", "name", name="_namespace_name_uc"), - sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"]) - ]) as batch_op: + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"]), + ], + ): pass - with op.batch_alter_table("solve", table_args=[ - sa.ForeignKeyConstraint( - ["specification_id"], - ["specification.id"], - ), - ]) as batch_op: + with op.batch_alter_table( + "solve", + table_args=[ + sa.ForeignKeyConstraint( + ["specification_id"], + ["specification.id"], + ), + ], + ): pass - with op.batch_alter_table("build", table_args=[ - sa.ForeignKeyConstraint(["environment_id"], ["environment.id"]), - sa.ForeignKeyConstraint(["specification_id"], ["specification.id"]), - ]) as batch_op: + with op.batch_alter_table( + "build", + table_args=[ + sa.ForeignKeyConstraint(["environment_id"], ["environment.id"]), + sa.ForeignKeyConstraint(["specification_id"], ["specification.id"]), + ], + ): pass - with op.batch_alter_table("solve_conda_package", table_args=[ + with op.batch_alter_table( + "solve_conda_package", + table_args=[ sa.ForeignKeyConstraint( ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" ), sa.ForeignKeyConstraint(["solve_id"], ["solve.id"], ondelete="CASCADE"), - ]) as batch_op: + ], + ): pass - with op.batch_alter_table("build_artifact", table_args=[ - sa.ForeignKeyConstraint( - ["build_id"], - ["build.id"], - ), - ]) as batch_op: + with op.batch_alter_table( + "build_artifact", + table_args=[ + sa.ForeignKeyConstraint( + ["build_id"], + ["build.id"], + ), + ], + ): pass - with op.batch_alter_table("build_conda_package", table_args=[ - sa.ForeignKeyConstraint(["build_id"], ["build.id"], ondelete="CASCADE"), - sa.ForeignKeyConstraint( - ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" - ), - ]) as batch_op: + with op.batch_alter_table( + "build_conda_package", + table_args=[ + sa.ForeignKeyConstraint(["build_id"], ["build.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" + ), + ], + ): pass diff --git a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py index c9e6b7fd7..bec054abd 100644 --- a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py +++ b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py @@ -10,8 +10,8 @@ # revision identifiers, used by Alembic. -revision = '5ad723de2abd' -down_revision = '8d63a091aff8' +revision = "5ad723de2abd" +down_revision = "8d63a091aff8" branch_labels = None depends_on = None @@ -41,7 +41,7 @@ def upgrade(): ) with op.batch_alter_table("build_artifact") as batch_op: - batch_op.alter_column('artifact_type', type_=new_type, existing_type=old_type) + batch_op.alter_column("artifact_type", type_=new_type, existing_type=old_type) # context = op.get_context() # if context.get_impl().bind.dialect.name == "postgresql": diff --git a/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py b/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py index b3d8836fc..3f848a0d8 100644 --- a/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py +++ b/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py @@ -10,17 +10,17 @@ # revision identifiers, used by Alembic. -revision = '8d63a091aff8' -down_revision = '48be4072fe58' +revision = "8d63a091aff8" +down_revision = "48be4072fe58" branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("environment") as batch_op: - batch_op.add_column(sa.Column('description', sa.UnicodeText(), nullable=True)) + batch_op.add_column(sa.Column("description", sa.UnicodeText(), nullable=True)) def downgrade(): with op.batch_alter_table("environment") as batch_op: - batch_op.drop_column('description') + batch_op.drop_column("description") diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index 651c7402e..678a2cc51 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -258,9 +258,11 @@ def redirect_home(request: Request): # start worker if in standalone mode if self.standalone: import multiprocessing - multiprocessing.set_start_method('spawn') + + multiprocessing.set_start_method("spawn") from conda_store_server.worker.app import CondaStoreWorker + process = multiprocessing.Process(target=CondaStoreWorker.launch_instance) process.start() From cce794c4495953b4598308a6cb933bb22cf08dde Mon Sep 17 00:00:00 2001 From: Chris Ostrouchov Date: Thu, 11 Aug 2022 16:47:58 -0400 Subject: [PATCH 5/6] Removing commented out code --- .../5ad723de2abd_adding_container_registry_value_to_enum.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py index bec054abd..9ae851e0e 100644 --- a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py +++ b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py @@ -43,11 +43,6 @@ def upgrade(): with op.batch_alter_table("build_artifact") as batch_op: batch_op.alter_column("artifact_type", type_=new_type, existing_type=old_type) - # context = op.get_context() - # if context.get_impl().bind.dialect.name == "postgresql": - # with context.autocommit_block(): - # op.execute("ALTER TYPE buildartifacttype ADD VALUE 'CONTAINER_REGISTRY'") - def downgrade(): # harmless to keep extra enum around From e659b8a48d0b14f4de454553323e82fea26a4df1 Mon Sep 17 00:00:00 2001 From: Chris Ostrouchov Date: Tue, 16 Aug 2022 17:03:48 -0400 Subject: [PATCH 6/6] Adding documentation --- conda-store-server/conda_store_server/app.py | 20 ++++++++------ docs/administration.md | 20 +++++++------- docs/contributing.md | 28 ++++++++++++++++++++ docs/installation.md | 7 +++++ examples/standalone/conda_store_config.py | 2 -- examples/standalone/docker-compose.yaml | 12 --------- 6 files changed, 58 insertions(+), 31 deletions(-) diff --git a/conda-store-server/conda_store_server/app.py b/conda-store-server/conda_store_server/app.py index 8ee2b54b6..653e73dbd 100644 --- a/conda-store-server/conda_store_server/app.py +++ b/conda-store-server/conda_store_server/app.py @@ -1,7 +1,6 @@ import os import datetime -import redis from celery import Celery, group from traitlets import ( Type, @@ -180,18 +179,17 @@ class CondaStore(LoggingConfigurable): ) redis_url = Unicode( + None, help="Redis connection url in form 'redis://:@:/0'. Connection is used by Celery along with conda-store internally", config=True, + allow_none=True, ) - @default("redis_url") - def _default_redis(self): - raise TraitError("c.CondaStore.redis_url Redis connection url is required") - @validate("redis_url") def _check_redis(self, proposal): try: - self.redis.ping() + if self.redis_url is not None: + self.redis.ping() except Exception: raise TraitError( f'c.CondaStore.redis_url unable to connect with Redis database at "{self.redis_url}"' @@ -236,7 +234,9 @@ def _check_redis(self, proposal): @default("celery_broker_url") def _default_celery_broker_url(self): - return self.redis_url + if self.redis_url is not None: + return self.redis_url + return f"sqla+{self.database_url}" celery_results_backend = Unicode( help="backend to use for celery task results", @@ -245,7 +245,9 @@ def _default_celery_broker_url(self): @default("celery_results_backend") def _default_celery_results_backend(self): - return self.redis_url + if self.redis_url is not None: + return self.redis_url + return f"db+{self.database_url}" default_namespace = Unicode( "default", help="default namespace for conda-store", config=True @@ -309,6 +311,8 @@ def db(self): @property def redis(self): + import redis + if hasattr(self, "_redis"): return self._redis self._redis = redis.Redis.from_url(self.redis_url) diff --git a/docs/administration.md b/docs/administration.md index edbf95a5a..eedcc6f53 100644 --- a/docs/administration.md +++ b/docs/administration.md @@ -154,10 +154,9 @@ docs](https://docs.sqlalchemy.org/en/14/core/engines.html) for connecting to your specific database. conda-store will automatically create the tables if they do not already exist. -`CondaStore.redis_url` is a required argument to a running Redis -instance. This became a dependency as of release `0.4.1` due to the -massive advantages of features that conda-store can provide with this -dependency. See +`CondaStore.redis_url` is an optional argument to a running Redis +instance. This was removed as a dependency as of release `0.4.10` due +to the need to have a simple deployment option for conda-store. See [documentation](https://github.com/redis/redis-py/#connecting-to-redis) for proper specification. This url is used by default for the Celery broker and results backend. @@ -166,9 +165,10 @@ broker and results backend. celery. Celery supports a [wide range of brokers](https://docs.celeryproject.org/en/stable/getting-started/backends-and-brokers/index.html) each with different guarantees. By default the Redis based broker is -used. It is production ready and has worked well in practice. The url -must be provided in a format that celery understands. The default -value is `CondaStore.redis_url`. +used if a `CondaStore.redis_url` if provided otherwise defaults to +sqlalchemy. It is production ready and has worked well in +practice. The url must be provided in a format that celery +understands. The default value is `CondaStore.redis_url`. `CondaStore.build_artifacts` is the list of artifacts for conda-store to build. By default it is all the artifacts that conda-store is @@ -187,8 +187,10 @@ the given build. `CondaStore.celery_results_backend` is the backend to use for storing all results from celery task execution. conda-store currently does not leverage the backend results but it may be needed for future work -using celery. The backend defaults to using the Redis backend. This -choice works great in production. Please consult the [celery docs on +using celery. The backend defaults to using the Redis backend if +`CondaStore.redis_url` is specified otherwise uses the +`CondaStore.database_url`. This choice works great in +production. Please consult the [celery docs on backend](https://docs.celeryproject.org/en/stable/getting-started/backends-and-brokers/index.html). `CondaStore.default_namespace` is the default namespace for diff --git a/docs/contributing.md b/docs/contributing.md index 0b96beba4..484dfeabd 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -8,6 +8,13 @@ lowercase when beginning a sentence. ## Development +Significant effort has been put into simplifying the development and +deployment process of `conda-store`. There is a docker based +development workflow along with a non-containerized workflow if you +are using Linux. + +### Containerized development + Install the following dependencies before developing on conda-store. - [docker](https://docs.docker.com/engine/install/) @@ -44,6 +51,27 @@ docker-compose down # not always necessary docker-compose up --build ``` +### Linux development + +Install the following dependencies before developing on conda-store. + + - [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/linux.html) + +Install the development dependencies and activate the environment. + +```shell +conda env create -f conda-store-server/environment-dev.yaml +conda activate conda-store-server-dev +``` + +Running `conda-store`. `--standalone` mode launched celery as a +subprocess of the web server. + +python -m conda_store_server.server --standalone tests/assets/conda_store_standalone_config.py +``` + +Visit [localhost:5000](http://localhost:5000/) + ### Changes to API The REST API is considered somewhat stable. If any changes are made to diff --git a/docs/installation.md b/docs/installation.md index 36a93c70e..7d5bfb684 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,5 +1,12 @@ # Installation +## Linux + +```shell +conda install conda-store-server>=0.4.10 +conda-store-server --standalone --config tests/assets/conda_store_standalone_config.py +``` + ## Kubernetes ![conda-store Kubernetes architecture diagram](_static/images/conda-store-installation-kubernetes.png) diff --git a/examples/standalone/conda_store_config.py b/examples/standalone/conda_store_config.py index c9fec7910..6d351b85f 100644 --- a/examples/standalone/conda_store_config.py +++ b/examples/standalone/conda_store_config.py @@ -9,9 +9,7 @@ c.CondaStore.storage_class = LocalStorage c.CondaStore.store_directory = "/var/lib/conda-store/" c.CondaStore.environment_directory = "/opt/conda-store/envs/" -# c.CondaStore.database_url = "mysql+pymysql://admin:password@mysql/conda-store" c.CondaStore.database_url = "sqlite:///conda-store.sqlite" -c.CondaStore.redis_url = "redis://:password@redis:6379/0" c.CondaStore.default_uid = 1000 c.CondaStore.default_gid = 1000 c.CondaStore.default_permissions = "775" diff --git a/examples/standalone/docker-compose.yaml b/examples/standalone/docker-compose.yaml index e2e16f8f0..13bd22000 100644 --- a/examples/standalone/docker-compose.yaml +++ b/examples/standalone/docker-compose.yaml @@ -19,8 +19,6 @@ services: depends_on: initializer: condition: service_completed_successfully - redis: - condition: service_healthy volumes: - conda_store_data:/var/lib/conda-store/ - ./conda_store_config.py:/opt/conda_store/conda_store_config.py:ro @@ -33,13 +31,3 @@ services: command: ['conda-store-server', '--config', '/opt/conda_store/conda_store_config.py', '--standalone=true'] ports: - "5000:5000" - - redis: - image: bitnami/redis - healthcheck: - test: ["CMD", "redis-cli","ping"] - interval: 10s - timeout: 5s - retries: 5 - environment: - REDIS_PASSWORD: password