diff --git a/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py b/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py index de5ec790c..373d704ce 100644 --- a/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py +++ b/conda-store-server/alembic/versions/48be4072fe58_initial_schema.py @@ -17,44 +17,40 @@ def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### op.create_table( "conda_channel", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.Unicode(length=255), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=False, unique=True), sa.Column("last_update", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), ) + op.create_table( "conda_store_configuration", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("disk_usage", sa.BigInteger(), nullable=True), sa.Column("free_storage", sa.BigInteger(), nullable=True), sa.Column("total_storage", sa.BigInteger(), nullable=True), - sa.PrimaryKeyConstraint("id"), ) + op.create_table( "namespace", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("name", sa.Unicode(length=255), nullable=True), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=True, unique=True), sa.Column("deleted_on", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), ) + op.create_table( "specification", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("name", sa.Unicode(length=255), nullable=False), sa.Column("spec", sa.JSON(), nullable=False), - sa.Column("sha256", sa.Unicode(length=255), nullable=False), + sa.Column("sha256", sa.Unicode(length=255), nullable=False, unique=True), sa.Column("created_on", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("sha256"), ) + op.create_table( "conda_package", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("channel_id", sa.Integer(), nullable=True), sa.Column("build", sa.Unicode(length=64), nullable=False), sa.Column("build_number", sa.Integer(), nullable=False), @@ -71,54 +67,29 @@ def upgrade(): sa.Column("version", sa.Unicode(length=64), nullable=False), sa.Column("summary", sa.Text(), nullable=True), sa.Column("description", sa.Text(), nullable=True), - sa.ForeignKeyConstraint( - ["channel_id"], - ["conda_channel.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint( - "channel_id", - "subdir", - "name", - "version", - "build", - "build_number", - "sha256", - name="_conda_package_uc", - ), ) + op.create_table( "environment", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("namespace_id", sa.Integer(), nullable=False), sa.Column("name", sa.Unicode(length=255), nullable=False), sa.Column("current_build_id", sa.Integer(), nullable=True), sa.Column("deleted_on", sa.DateTime(), nullable=True), - # sa.ForeignKeyConstraint(['current_build_id'], ['build.id'], use_alter=True), - sa.ForeignKeyConstraint( - ["namespace_id"], - ["namespace.id"], - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("namespace_id", "name", name="_namespace_name_uc"), ) op.create_table( "solve", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("specification_id", sa.Integer(), nullable=False), sa.Column("scheduled_on", sa.DateTime(), nullable=True), sa.Column("started_on", sa.DateTime(), nullable=True), sa.Column("ended_on", sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint( - ["specification_id"], - ["specification.id"], - ), - sa.PrimaryKeyConstraint("id"), ) + op.create_table( "build", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("specification_id", sa.Integer(), nullable=False), sa.Column("environment_id", sa.Integer(), nullable=False), sa.Column( @@ -131,37 +102,17 @@ def upgrade(): sa.Column("started_on", sa.DateTime(), nullable=True), sa.Column("ended_on", sa.DateTime(), nullable=True), sa.Column("deleted_on", sa.DateTime(), nullable=True), - sa.ForeignKeyConstraint( - ["environment_id"], - ["environment.id"], - ), - sa.ForeignKeyConstraint( - ["specification_id"], - ["specification.id"], - ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_foreign_key( - constraint_name="fk_environment_current_build_id_build_id", - source_table="environment", - referent_table="build", - local_cols=["current_build_id"], - remote_cols=["id"], ) op.create_table( "solve_conda_package", - sa.Column("solve_id", sa.Integer(), nullable=False), - sa.Column("conda_package_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint( - ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" - ), - sa.ForeignKeyConstraint(["solve_id"], ["solve.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("solve_id", "conda_package_id"), + sa.Column("solve_id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("conda_package_id", sa.Integer(), nullable=False, primary_key=True), ) + op.create_table( "build_artifact", - sa.Column("id", sa.Integer(), nullable=False), + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), sa.Column("build_id", sa.Integer(), nullable=True), sa.Column( "artifact_type", @@ -178,23 +129,97 @@ def upgrade(): nullable=False, ), sa.Column("key", sa.Unicode(length=255), nullable=True), - sa.ForeignKeyConstraint( - ["build_id"], - ["build.id"], - ), - sa.PrimaryKeyConstraint("id"), ) + op.create_table( "build_conda_package", - sa.Column("build_id", sa.Integer(), nullable=False), - sa.Column("conda_package_id", sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(["build_id"], ["build.id"], ondelete="CASCADE"), - sa.ForeignKeyConstraint( - ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" - ), - sa.PrimaryKeyConstraint("build_id", "conda_package_id"), + sa.Column("build_id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("conda_package_id", sa.Integer(), nullable=False, primary_key=True), ) - # ### end Alembic commands ### + + with op.batch_alter_table( + "conda_package", + table_args=[ + sa.ForeignKeyConstraint( + ["channel_id"], + ["conda_channel.id"], + ), + sa.UniqueConstraint( + "channel_id", + "subdir", + "name", + "version", + "build", + "build_number", + "sha256", + name="_conda_package_uc", + ), + ], + ): + pass + + with op.batch_alter_table( + "environment", + table_args=[ + sa.ForeignKeyConstraint(["current_build_id"], ["build.id"]), + sa.UniqueConstraint("namespace_id", "name", name="_namespace_name_uc"), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"]), + ], + ): + pass + + with op.batch_alter_table( + "solve", + table_args=[ + sa.ForeignKeyConstraint( + ["specification_id"], + ["specification.id"], + ), + ], + ): + pass + + with op.batch_alter_table( + "build", + table_args=[ + sa.ForeignKeyConstraint(["environment_id"], ["environment.id"]), + sa.ForeignKeyConstraint(["specification_id"], ["specification.id"]), + ], + ): + pass + + with op.batch_alter_table( + "solve_conda_package", + table_args=[ + sa.ForeignKeyConstraint( + ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["solve_id"], ["solve.id"], ondelete="CASCADE"), + ], + ): + pass + + with op.batch_alter_table( + "build_artifact", + table_args=[ + sa.ForeignKeyConstraint( + ["build_id"], + ["build.id"], + ), + ], + ): + pass + + with op.batch_alter_table( + "build_conda_package", + table_args=[ + sa.ForeignKeyConstraint(["build_id"], ["build.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" + ), + ], + ): + pass def downgrade(): diff --git a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py index ea7ec664a..9ae851e0e 100644 --- a/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py +++ b/conda-store-server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py @@ -6,18 +6,42 @@ """ from alembic import op +import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '5ad723de2abd' -down_revision = '8d63a091aff8' +revision = "5ad723de2abd" +down_revision = "8d63a091aff8" branch_labels = None depends_on = None def upgrade(): - with op.get_context().autocommit_block(): - op.execute("ALTER TYPE buildartifacttype ADD VALUE 'CONTAINER_REGISTRY'") + old_type = sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + name="buildartifacttype", + ) + + new_type = sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + "CONTAINER_REGISTRY", + name="buildartifacttype", + ) + + with op.batch_alter_table("build_artifact") as batch_op: + batch_op.alter_column("artifact_type", type_=new_type, existing_type=old_type) def downgrade(): diff --git a/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py b/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py index f54766105..3f848a0d8 100644 --- a/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py +++ b/conda-store-server/alembic/versions/8d63a091aff8_add_environment_description.py @@ -10,19 +10,17 @@ # revision identifiers, used by Alembic. -revision = '8d63a091aff8' -down_revision = '48be4072fe58' +revision = "8d63a091aff8" +down_revision = "48be4072fe58" branch_labels = None depends_on = None def upgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.add_column('environment', sa.Column('description', sa.UnicodeText(), nullable=True)) - # ### end Alembic commands ### + with op.batch_alter_table("environment") as batch_op: + batch_op.add_column(sa.Column("description", sa.UnicodeText(), nullable=True)) def downgrade(): - # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('environment', 'description') - # ### end Alembic commands ### + with op.batch_alter_table("environment") as batch_op: + batch_op.drop_column("description") diff --git a/conda-store-server/conda_store_server/app.py b/conda-store-server/conda_store_server/app.py index a42c04238..653e73dbd 100644 --- a/conda-store-server/conda_store_server/app.py +++ b/conda-store-server/conda_store_server/app.py @@ -1,7 +1,6 @@ import os import datetime -import redis from celery import Celery, group from traitlets import ( Type, @@ -180,18 +179,17 @@ class CondaStore(LoggingConfigurable): ) redis_url = Unicode( + None, help="Redis connection url in form 'redis://:@:/0'. Connection is used by Celery along with conda-store internally", config=True, + allow_none=True, ) - @default("redis_url") - def _default_redis(self): - raise TraitError("c.CondaStore.redis_url Redis connection url is required") - @validate("redis_url") def _check_redis(self, proposal): try: - self.redis.ping() + if self.redis_url is not None: + self.redis.ping() except Exception: raise TraitError( f'c.CondaStore.redis_url unable to connect with Redis database at "{self.redis_url}"' @@ -236,7 +234,9 @@ def _check_redis(self, proposal): @default("celery_broker_url") def _default_celery_broker_url(self): - return self.redis_url + if self.redis_url is not None: + return self.redis_url + return f"sqla+{self.database_url}" celery_results_backend = Unicode( help="backend to use for celery task results", @@ -245,7 +245,9 @@ def _default_celery_broker_url(self): @default("celery_results_backend") def _default_celery_results_backend(self): - return self.redis_url + if self.redis_url is not None: + return self.redis_url + return f"db+{self.database_url}" default_namespace = Unicode( "default", help="default namespace for conda-store", config=True @@ -309,6 +311,8 @@ def db(self): @property def redis(self): + import redis + if hasattr(self, "_redis"): return self._redis self._redis = redis.Redis.from_url(self.redis_url) @@ -323,6 +327,10 @@ def storage(self): if hasattr(self, "_storage"): return self._storage self._storage = self.storage_class(parent=self, log=self.log) + + if isinstance(self._storage, storage.LocalStorage): + os.makedirs(self._storage.storage_path, exist_ok=True) + return self._storage @property diff --git a/conda-store-server/conda_store_server/orm.py b/conda-store-server/conda_store_server/orm.py index 8b126e4b9..a3868de86 100644 --- a/conda-store-server/conda_store_server/orm.py +++ b/conda-store-server/conda_store_server/orm.py @@ -270,7 +270,7 @@ class Environment(Base): name = Column(Unicode(255), nullable=False) - current_build_id = Column(Integer, ForeignKey("build.id", use_alter=True)) + current_build_id = Column(Integer, ForeignKey("build.id")) current_build = relationship( Build, foreign_keys=[current_build_id], post_update=True ) diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index 4da98cf0e..678a2cc51 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -6,11 +6,13 @@ from fastapi import FastAPI, Request, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import RedirectResponse, JSONResponse +from fastapi.staticfiles import StaticFiles from starlette.middleware.sessions import SessionMiddleware from fastapi.templating import Jinja2Templates from traitlets import Bool, Unicode, Integer, Type, validate, Instance, default, Dict from traitlets.config import Application, catch_config_error +from conda_store_server import storage from conda_store_server.server import auth, views from conda_store_server.app import CondaStore from conda_store_server import __version__ @@ -21,6 +23,7 @@ class CondaStoreServer(Application): aliases = { "config": "CondaStoreServer.config_file", + "standalone": "CondaStoreServer.standalone", } log_level = Integer( @@ -122,6 +125,12 @@ def _validate_config_file(self, proposal): 100, help="maximum number of items to return in a single page", config=True ) + standalone = Bool( + False, + help="Run application in standalone mode with workers running as subprocess", + config=True, + ) + @catch_config_error def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) @@ -133,6 +142,7 @@ def initialize(self, *args, **kwargs): dbutil.upgrade(self.config.CondaStore.database_url) self.authentication = self.authentication_class(parent=self, log=self.log) + # ensure checks on redis_url self.conda_store.redis_url @@ -227,6 +237,16 @@ def redirect_home(request: Request): prefix=trim_slash(self.url_prefix), ) + if isinstance(self.conda_store.storage, storage.LocalStorage): + self.conda_store.storage.storage_url = ( + f"{trim_slash(self.url_prefix)}/storage" + ) + app.mount( + self.conda_store.storage.storage_url, + StaticFiles(directory=self.conda_store.storage.storage_path), + name="static", + ) + self.conda_store.ensure_namespace() self.conda_store.ensure_conda_channels() @@ -235,13 +255,28 @@ def redirect_home(request: Request): from conda_store_server.worker import tasks # noqa - uvicorn.run( - app, - host=self.address, - port=self.port, - reload=False, - debug=(self.log_level == logging.DEBUG), - workers=1, - proxy_headers=self.behind_proxy, - forwarded_allow_ips=("*" if self.behind_proxy else None), - ) + # start worker if in standalone mode + if self.standalone: + import multiprocessing + + multiprocessing.set_start_method("spawn") + + from conda_store_server.worker.app import CondaStoreWorker + + process = multiprocessing.Process(target=CondaStoreWorker.launch_instance) + process.start() + + try: + uvicorn.run( + app, + host=self.address, + port=self.port, + reload=False, + debug=(self.log_level == logging.DEBUG), + workers=1, + proxy_headers=self.behind_proxy, + forwarded_allow_ips=("*" if self.behind_proxy else None), + ) + finally: + if self.standalone: + process.join() diff --git a/conda-store-server/conda_store_server/storage.py b/conda-store-server/conda_store_server/storage.py index c81a414e0..dfb2a7d5c 100644 --- a/conda-store-server/conda_store_server/storage.py +++ b/conda-store-server/conda_store_server/storage.py @@ -199,17 +199,17 @@ class LocalStorage(Storage): ) def fset(self, db, build_id, key, filename, content_type=None, artifact_type=None): - filename = os.path.join(self.storage_path, key) - os.makedirs(os.path.dirname(filename), exist_ok=True) + destination_filename = os.path.abspath(os.path.join(self.storage_path, key)) + os.makedirs(os.path.dirname(destination_filename), exist_ok=True) - shutil.copyfile(filename, os.path.join(self.storage_path, key)) + shutil.copyfile(filename, destination_filename) super().fset(db, build_id, key, filename, artifact_type) def set(self, db, build_id, key, value, content_type=None, artifact_type=None): - filename = os.path.join(self.storage_path, key) - os.makedirs(os.path.dirname(filename), exist_ok=True) + destination_filename = os.path.join(self.storage_path, key) + os.makedirs(os.path.dirname(destination_filename), exist_ok=True) - with open(filename, "wb") as f: + with open(destination_filename, "wb") as f: f.write(value) super().set(db, build_id, key, value, artifact_type) diff --git a/docs/administration.md b/docs/administration.md index edbf95a5a..eedcc6f53 100644 --- a/docs/administration.md +++ b/docs/administration.md @@ -154,10 +154,9 @@ docs](https://docs.sqlalchemy.org/en/14/core/engines.html) for connecting to your specific database. conda-store will automatically create the tables if they do not already exist. -`CondaStore.redis_url` is a required argument to a running Redis -instance. This became a dependency as of release `0.4.1` due to the -massive advantages of features that conda-store can provide with this -dependency. See +`CondaStore.redis_url` is an optional argument to a running Redis +instance. This was removed as a dependency as of release `0.4.10` due +to the need to have a simple deployment option for conda-store. See [documentation](https://github.com/redis/redis-py/#connecting-to-redis) for proper specification. This url is used by default for the Celery broker and results backend. @@ -166,9 +165,10 @@ broker and results backend. celery. Celery supports a [wide range of brokers](https://docs.celeryproject.org/en/stable/getting-started/backends-and-brokers/index.html) each with different guarantees. By default the Redis based broker is -used. It is production ready and has worked well in practice. The url -must be provided in a format that celery understands. The default -value is `CondaStore.redis_url`. +used if a `CondaStore.redis_url` if provided otherwise defaults to +sqlalchemy. It is production ready and has worked well in +practice. The url must be provided in a format that celery +understands. The default value is `CondaStore.redis_url`. `CondaStore.build_artifacts` is the list of artifacts for conda-store to build. By default it is all the artifacts that conda-store is @@ -187,8 +187,10 @@ the given build. `CondaStore.celery_results_backend` is the backend to use for storing all results from celery task execution. conda-store currently does not leverage the backend results but it may be needed for future work -using celery. The backend defaults to using the Redis backend. This -choice works great in production. Please consult the [celery docs on +using celery. The backend defaults to using the Redis backend if +`CondaStore.redis_url` is specified otherwise uses the +`CondaStore.database_url`. This choice works great in +production. Please consult the [celery docs on backend](https://docs.celeryproject.org/en/stable/getting-started/backends-and-brokers/index.html). `CondaStore.default_namespace` is the default namespace for diff --git a/docs/contributing.md b/docs/contributing.md index 0b96beba4..484dfeabd 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -8,6 +8,13 @@ lowercase when beginning a sentence. ## Development +Significant effort has been put into simplifying the development and +deployment process of `conda-store`. There is a docker based +development workflow along with a non-containerized workflow if you +are using Linux. + +### Containerized development + Install the following dependencies before developing on conda-store. - [docker](https://docs.docker.com/engine/install/) @@ -44,6 +51,27 @@ docker-compose down # not always necessary docker-compose up --build ``` +### Linux development + +Install the following dependencies before developing on conda-store. + + - [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/linux.html) + +Install the development dependencies and activate the environment. + +```shell +conda env create -f conda-store-server/environment-dev.yaml +conda activate conda-store-server-dev +``` + +Running `conda-store`. `--standalone` mode launched celery as a +subprocess of the web server. + +python -m conda_store_server.server --standalone tests/assets/conda_store_standalone_config.py +``` + +Visit [localhost:5000](http://localhost:5000/) + ### Changes to API The REST API is considered somewhat stable. If any changes are made to diff --git a/docs/installation.md b/docs/installation.md index 36a93c70e..7d5bfb684 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,5 +1,12 @@ # Installation +## Linux + +```shell +conda install conda-store-server>=0.4.10 +conda-store-server --standalone --config tests/assets/conda_store_standalone_config.py +``` + ## Kubernetes ![conda-store Kubernetes architecture diagram](_static/images/conda-store-installation-kubernetes.png) diff --git a/examples/standalone/README.md b/examples/standalone/README.md new file mode 100644 index 000000000..0fb8ea198 --- /dev/null +++ b/examples/standalone/README.md @@ -0,0 +1,4 @@ +# Standalone + +A script for minimal configuration to run conda-store. The end goal is +that everything should run under a single container image. diff --git a/examples/standalone/conda_store_config.py b/examples/standalone/conda_store_config.py new file mode 100644 index 000000000..6d351b85f --- /dev/null +++ b/examples/standalone/conda_store_config.py @@ -0,0 +1,69 @@ +import logging + +from conda_store_server.storage import LocalStorage +from conda_store_server.server.auth import DummyAuthentication + +# ================================== +# conda-store settings +# ================================== +c.CondaStore.storage_class = LocalStorage +c.CondaStore.store_directory = "/var/lib/conda-store/" +c.CondaStore.environment_directory = "/opt/conda-store/envs/" +c.CondaStore.database_url = "sqlite:///conda-store.sqlite" +c.CondaStore.default_uid = 1000 +c.CondaStore.default_gid = 1000 +c.CondaStore.default_permissions = "775" +c.CondaStore.conda_included_packages = [ + 'ipykernel' +] + +c.CondaStore.pypi_included_packages = [ + 'nothing' +] + + +# ================================== +# server settings +# ================================== +c.CondaStoreServer.log_level = logging.INFO +c.CondaStoreServer.enable_ui = True +c.CondaStoreServer.enable_api = True +c.CondaStoreServer.enable_registry = True +c.CondaStoreServer.enable_metrics = True +c.CondaStoreServer.address = "0.0.0.0" +c.CondaStoreServer.port = 5000 +# This MUST start with `/` +c.CondaStoreServer.url_prefix = "/conda-store" + + +# ================================== +# auth settings +# ================================== +c.CondaStoreServer.authentication_class = DummyAuthentication +c.CondaStoreServer.template_vars = { + "banner": '', + "logo": "https://quansight.com/_next/image?url=https%3A%2F%2Fa.storyblok.com%2Ff%2F147759%2F1076x520%2Fe6cd6af012%2Fquansight-logo-no-tagline.png&w=3840&q=75", +} + +# ================================== +# worker settings +# ================================== +c.CondaStoreWorker.log_level = logging.INFO +c.CondaStoreWorker.watch_paths = ["/opt/environments"] +c.CondaStoreWorker.concurrency = 4 + +# ================================== +# registry settings +# ================================== +# from python_docker.registry import Registry +# import os + +# def _configure_docker_registry(registry_url: str): +# return Registry( +# "https://registry-1.docker.io", +# username=os.environ.get('DOCKER_USERNAME'), +# password=os.environ.get('DOCKER_PASSWORD')) + +# c.ContainerRegistry.container_registries = { +# 'https://registry-1.docker.io': _configure_docker_registry +# } diff --git a/examples/standalone/docker-compose.yaml b/examples/standalone/docker-compose.yaml new file mode 100644 index 000000000..13bd22000 --- /dev/null +++ b/examples/standalone/docker-compose.yaml @@ -0,0 +1,33 @@ +version: "3.8" + +volumes: + conda_store_data: + +services: + initializer: + image: alpine + restart: "no" + # hack to set perimssions on volume + entrypoint: | + /bin/sh -c "chown -R 1000:1000 /var/lib/conda-store/" + volumes: + - conda_store_data:/var/lib/conda-store/ + + conda-store-server: + build: ../../conda-store-server + user: 1000:1000 + depends_on: + initializer: + condition: service_completed_successfully + volumes: + - conda_store_data:/var/lib/conda-store/ + - ./conda_store_config.py:/opt/conda_store/conda_store_config.py:ro + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5000/conda-store/api/v1/"] + interval: 10s + timeout: 5s + retries: 5 + platform: linux/amd64 + command: ['conda-store-server', '--config', '/opt/conda_store/conda_store_config.py', '--standalone=true'] + ports: + - "5000:5000"