diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml index 962a883af..9f2843a2d 100644 --- a/.github/workflows/documentation.yaml +++ b/.github/workflows/documentation.yaml @@ -17,26 +17,6 @@ jobs: steps: - name: 'Checkout Infrastructure' uses: actions/checkout@v2 - - name: 'Add git safe directory' - run: | - git config --global --add safe.directory /github/workspace - - name: Documentation Quality Check - uses: errata-ai/vale-action@v1.4.0 - with: - files: '["docs", "README.md", "CHANGELOG.md"]' - onlyAnnotateModifiedLines: true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # # https://github.com/gaurav-nelson/github-action-markdown-link-check/issues/132 - # # Disabled until resolved - # - name: Broken link checker - # uses: gaurav-nelson/github-action-markdown-link-check@v1 - # with: - # config-file: 'tests/links-check/markdown.links.config.json' - # folder-path: 'docs/' - # check-modified-files-only: 'yes' - # file-extension: '.md' - # base-branch: 'main' - name: Set up Python uses: actions/setup-python@v2 with: diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index cc15deff8..981dd2d62 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -21,13 +21,13 @@ jobs: python-version: 3.8 - name: Install twine run: | - pip install twine + pip install twine build - name: Build and Distribute env: PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - python setup.py sdist + python -m build --sdist twine upload -u $PYPI_USERNAME -p $PYPI_PASSWORD dist/* release-conda-store: @@ -45,14 +45,14 @@ jobs: python-version: 3.8 - name: Install twine and jupyter-packaging run: | - pip install twine + pip install twine build - name: Build and Distribute env: PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} shell: bash -l {0} run: | - python setup.py sdist + python -m build --sdist twine upload -u $PYPI_USERNAME -p $PYPI_PASSWORD dist/* docker: @@ -66,7 +66,7 @@ jobs: steps: - name: 'Checkout Infrastructure' uses: actions/checkout@master - + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 @@ -103,3 +103,81 @@ jobs: labels: ${{ steps.meta.outputs.labels }} cache-from: type=gha cache-to: type=gha,mode=max + + build-publish-helm-chart: + name: Build and publish Helm chart + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Setup the repository + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Setup Python + uses: actions/setup-python@v3 + + - name: Install chart publishing dependencies + run: | + pip install chartpress pyyaml + pip list + helm version + + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + + - name: Login to Docker Hub + uses: docker/login-action@v1 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Configure a git user + run: | + git config --global user.email "github-actions@example.local" + git config --global user.name "GitHub Actions user" + + - name: Build and publish Helm chart with chartpress + env: + GITHUB_ACTOR: "" + GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" + run: | + PUBLISH_ARGS="--push --publish-chart \ + --builder=docker-buildx \ + --platform=linux/amd64 \ + --platform=linux/arm64" + + # chartpress needs to run next to resources/helm/chartpress.yaml + cd resources/helm/ + + # chartpress use git to push to our Helm chart repository. + # Ensure that the permissions to the Docker registry are + # already configured. + + if [[ $GITHUB_REF != refs/tags/* ]]; then + # Using --extra-message, we help readers of merged PRs to know what version + # they need to bump to in order to make use of the PR. + # + # ref: https://github.com/jupyterhub/chartpress#usage + # + # NOTE: GitHub merge commits contain a PR reference like #123. `sed` is used + # to extract a PR reference like #123 or a commit hash reference like + # @123abcd. + + PR_OR_HASH=$(git log -1 --pretty=%h-%B | head -n1 | sed 's/^.*\(#[0-9]*\).*/\1/' | sed 's/^\([0-9a-f]*\)-.*/@\1/') + LATEST_COMMIT_TITLE=$(git log -1 --pretty=%B | head -n1) + EXTRA_MESSAGE="${GITHUB_REPOSITORY}${PR_OR_HASH} ${LATEST_COMMIT_TITLE}" + + chartpress $PUBLISH_ARGS --extra-message "${EXTRA_MESSAGE}" + else + # Setting a tag explicitly enforces a rebuild if this tag had already been + # built and we wanted to override it. + + chartpress $PUBLISH_ARGS --tag "${GITHUB_REF:10}" + fi diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index c4c27fa59..282fd2f1d 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -36,6 +36,13 @@ jobs: run: | flake8 --version flake8 + - name: Release Check + run: | + python -m build --sdist + twine check dist/* + - name: Unit Tests + run: | + pytest - name: Deploy docker-compose run: | docker-compose up -d @@ -94,13 +101,70 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - name: Install Python build dependencies + - name: Install Dependencies run: | - pip install jupyter-packaging jupyterlab>=3.0.0 - - name: Install package + pip install .[dev] + sudo apt install wait-for-it -y + - name: Black Formatting run: | - yarn - pip install . + black --version + black conda_store --diff + black --check conda_store + - name: Flake8 Formatting + run: | + flake8 --version + flake8 + - name: Release Check + run: | + python -m build --sdist + twine check dist/* + - name: Deploy docker-compose + run: | + docker-compose up -d + docker ps + + wait-for-it localhost:5432 # postgresql + wait-for-it localhost:9000 # minio + wait-for-it localhost:5000 # conda-store-server + - name: Basic tests not authenticated + run: | + sleep 20 + + export CONDA_STORE_URL=http://localhost:5000/conda-store + export CONDA_STORE_AUTH=none + conda-store --version + conda-store info + conda-store token + conda-store list namespace + conda-store list build + conda-store list environment + + - name: Basic tests authenticated + run: | + export CONDA_STORE_URL=http://localhost:5000/conda-store + export CONDA_STORE_AUTH=basic + export CONDA_STORE_USERNAME=username + export CONDA_STORE_PASSWORD=password + + conda-store --version + conda-store info + conda-store token + conda-store list build + echo "waiting for build 1 to finish" + conda-store wait 1 + conda-store wait filesystem/python-flask-env + conda-store wait filesystem/python-flask-env:1 + conda-store wait filesystem/python-flask-env:1 --artifact=archive + conda-store list namespace + conda-store list build + conda-store list environment + conda-store list environment --package python --package ipykernel --status COMPLETED --artifact CONDA_PACK + conda-store download 1 --artifact yaml + conda-store download 1 --artifact lockfile + conda-store download 1 --artifact archive + time conda-store run 1 -- python -c "print(1 + 3)" + time conda-store run 1 -- python -c "print(1 + 4)" + ../tests/scripts/shebang.py build-docker-image: name: 'Build docker images' diff --git a/.gitignore b/.gitignore index aea43a145..327bfeeeb 100644 --- a/.gitignore +++ b/.gitignore @@ -21,3 +21,8 @@ docs/_build # nix .direnv **/.DS_Store + +# vscode +.history/ +.vscode/ +*.code-workspace diff --git a/.hadolint.yml b/.hadolint.yml deleted file mode 100644 index 651e690aa..000000000 --- a/.hadolint.yml +++ /dev/null @@ -1,10 +0,0 @@ ---- -ignored: - - DL3029 - - DL3006 - - DL3015 - - DL3003 - - DL3008 - - DL3002 - - DL3060 - - DL3042 diff --git a/.vale.ini b/.vale.ini deleted file mode 100644 index cdcbfb10c..000000000 --- a/.vale.ini +++ /dev/null @@ -1,5 +0,0 @@ -StylesPath = tests/vale/styles -MinAlertLevel = suggestion - -[*.md] -BasedOnStyles = Vale, Google diff --git a/CHANGELOG.md b/CHANGELOG.md index 1dd3d4fff..f5dd67817 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,113 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Security +## [0.4.11] - 2022-08-17 + +### Fixed + + - including package_data #379 + + +## [0.4.10] - 2022-08-16 + +### Added + + - `conda-store-server --standalone` now runs conda-store-server without any service dependencies (#378, #376) + - Initial helm chart skeleton still work to be done to have official helm chart + +### Fixed + + - Bug in LocalStorage implmentation #376 + - Bug in docker builds when pushed to container registries #377 + - CORS endpoint error on login when using POST #375 + +## [0.4.9] - 2022-08-09 + +### Added + + - push/pull container images to/from additionall registries (#370) + - adding description associated with environments #363 + +## [0.4.8] - 2022-08-04 + +### Added + + - Adding shebang support for conda-store (#362) + +### Fixed + + - Fixed example demo for docker + - Fixing docker registry implementation in conda-store (#368) + +## Security + + - Adding authentication behind docker registry (#369) + +## [0.4.7] - 2022-07-28 + +### Added + + - Adding additional query parameters environment_id, namespace, name in list api methods in build/environment #350 + - Adding ability to sort based on start/schedule/ended for list builds #352 + - Adding repo.anaconda.com to default channels #354 + - Empty list for conda_allowed_channels now will allow any channel #358 + +### Fixed + + - Changed docker images to no longer run as root by default #355 + +## [0.4.6] - 2022-07-08 + +### Added + + - Added `c.CondaStoreServer.template_vars` for easy customization #347 + - Consistent naming of `conda-store` throughout project #345 + - Adding token endpoint #335 + - Adding token UI create button #348 + +### Fixed + + - Bug with user being able to modify `c.RBACAuthorizationBackend.authenticated_role_bindings` #346 + +## [0.4.5] - 2022-06-29 + +### Added + + - Adding cli command `solve` to call remote conda solve api (#329) + - New filters for build and environment api calls status, artifact, package (#329) + - Adding Alembic migration integration (#314) + +## [0.4.4] - 2022-06-25 + +### Added + + - `wait` option in cli for waiting on artifacts and builds (#328) + - `list build` command (#328) + - tests for client conda-store (#328) + +### Fixed + + - issue with caching option in run command (#328) + +### Changed + + - api now exposes the build_artifacts field on `api/v1/build//` + +## [0.4.2] - 2022-06-24 + +### Fixed + + - fixed release process using build toolchain + +## [0.4.1] - 2022-06-24 + +### Added + + - Command line client for conda-store (#327) + - Adding searchbar for UI (#326) + - OpenAPI specification in documentation + - Added namespace query parameter to `/api/v1/environment/?namespace=` (#324) + ## [0.4.0] - 2022-05-04 ### Changed @@ -76,7 +183,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `CondaStore.conda_...` options for managing channels and packages in specification #256 - Ability to modify/validate specifications server side #252 - - Concurrency option for Conda-Store celery worker #250 + - Concurrency option for conda-store celery worker #250 - Flask webserver has a `CondaStore.behind_proxy` option to properly handle X-Forward- headers #249 - Docker layer chaching to CI for docker image builds #234 @@ -171,7 +278,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - support for credentials supplied for object storage including IAM credentials #176 - - namespace UI to Conda-Store server #183 + - namespace UI to conda-store server #183 - create/read/delete methods for namespaces `/api/v1/namespace/` #181 - distinct_on query parameter to list REST API methods #164 - adding sorting query parameter to list REST API methods #162 @@ -211,7 +318,7 @@ notable things done in the past 3-6 months. - enabling rollbacks of environment builds [#93](https://github.com/Quansight/conda-store/pull/93) - adding `conda env export` for pinned YAML file [#92](https://github.com/Quansight/conda-store/pull/92) - celery integration for true task based builds [#90](https://github.com/Quansight/conda-store/pull/90) - - Conda-Store configuration is configured via Traitlets [#87](https://github.com/Quansight/conda-store/pull/87) + - conda-store configuration is configured via Traitlets [#87](https://github.com/Quansight/conda-store/pull/87) - Prometheus metrics endpoint [#84](https://github.com/Quansight/conda-store/pull/84) - help button in top right hand corner [#83](https://github.com/Quansight/conda-store/pull/83) - support for internal and external url for s3 bucket [#81](https://github.com/Quansight/conda-store/pull/81) diff --git a/README.md b/README.md index f4b079ec0..8f15b7b18 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Conda Store +# conda-store Documentation Status @@ -20,11 +20,41 @@ Conda -![Conda-Store UI](docs/_static/images/conda-store-authenticated.png) +![conda-store UI](docs/_static/images/conda-store-authenticated.png) + +## Documentation + +All documentation can be found on Read the Docs including how to develop +and contribute to the +project. [conda-store.readthedocs.io](https://conda-store.readthedocs.io). + +## Installation + +`conda-store` has two packages a `conda-store` (the client) and +`conda-store-server` (the server). Make sure that either `conda` or +`mamba` are in your path. + +```shell +conda install -c conda-forge conda-store-server conda-store +``` + +The `conda-store` server can be easily launched in standalone mode +that carries no dependencies on databases, object storage, redis. + +``` +conda-store-server --standalone +``` + +Standalone mode runs all components together to simplify +deployment. For more complex and scalable setups there are +[installation examples for containerized and distributed +environments](docs/installation.md). + +## Motivation End users think in terms of environments not packages. The core -philosophy of Conda-Store is to serve identical Conda environments in -as many ways as possible. Conda-Store controls the environment +philosophy of conda-store is to serve identical Conda environments in +as many ways as possible. conda-store controls the environment lifecycle: management, builds, and serving of environments. It **manages** Conda environments by: @@ -39,15 +69,9 @@ environment builds. It **serves** Conda environments via a filesystem, lockfile, tarball, and a docker registry. Tarballs and Docker images can carry a lot of -bandwidth which is why Conda-Store integrates optionally with `s3` to +bandwidth which is why conda-store integrates optionally with `s3` to actually serve the blobs. -## Documentation - -All documentation can be found on Read the Docs including how to develop -and contribute to the -project. [conda-store.readthedocs.io](https://conda-store.readthedocs.io). - ## Terminology - A `namespace` is a way of scoping environments @@ -66,11 +90,11 @@ This design has several advantages: - because each `environment` update is a new separate build the environment can be archived and uniquely identified -![Conda-Store terminology](docs/_static/images/conda-store-terminology.png) +![conda-store terminology](docs/_static/images/conda-store-terminology.png) ## Philosophy -We mentioned above that `Conda-Store` was influenced by +We mentioned above that `conda-store` was influenced by [nix](https://nixos.org/). While Conda is not as pure as nix (when it comes to reproducible builds) we can achieve close to the same results with many of the great benefits. Motivation @@ -95,7 +119,7 @@ caching, and rollbacks to previous environment states. ## License -Conda-Store is [BSD-3 LICENSED](./LICENSE) +conda-store is [BSD-3 LICENSED](./LICENSE) ## Contributing diff --git a/conda-store-server/Dockerfile b/conda-store-server/Dockerfile index df7dc1501..522db4d78 100644 --- a/conda-store-server/Dockerfile +++ b/conda-store-server/Dockerfile @@ -22,3 +22,13 @@ RUN cd /opt/conda-store-server && \ ENV PATH=/opt/conda/condabin:/opt/conda/envs/conda-store-server/bin:/opt/conda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:${PATH} ENV TZ=America/New_York + +RUN chown -R 1000:1000 /opt/conda/pkgs && \ + mkdir -p /var/lib/conda-store && \ + chown 1000:1000 /var/lib/conda-store && \ + mkdir -p /opt/conda-store/envs && \ + chown 1000:1000 /opt/conda-store/envs + +USER 1000:1000 + +WORKDIR /var/lib/conda-store diff --git a/conda-store-server/README.md b/conda-store-server/README.md index 018f3fff5..fbcd9016a 100644 --- a/conda-store-server/README.md +++ b/conda-store-server/README.md @@ -1,4 +1,4 @@ -# Conda Store Server +# conda-store server [![Documentation Status](https://readthedocs.org/projects/conda-store/badge/?version=latest)](https://conda-store.readthedocs.io/en/latest/?badge=latest) diff --git a/conda-store-server/conda_store_server/__init__.py b/conda-store-server/conda_store_server/__init__.py index 8cb3a76a2..58ce5cd17 100644 --- a/conda-store-server/conda_store_server/__init__.py +++ b/conda-store-server/conda_store_server/__init__.py @@ -1 +1 @@ -__version__ = "0.4.0" # noqa +__version__ = "0.4.11" diff --git a/conda-store-server/conda_store_server/alembic.ini b/conda-store-server/conda_store_server/alembic.ini new file mode 100644 index 000000000..56e062121 --- /dev/null +++ b/conda-store-server/conda_store_server/alembic.ini @@ -0,0 +1,65 @@ +# A generic, single database configuration. + +[alembic] +script_location = {alembic_dir} +sqlalchemy.url = {db_url} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; this defaults +# to jupyterhub/alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path +# version_locations = %(here)s/bar %(here)s/bat jupyterhub/alembic/versions + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/conda-store-server/conda_store_server/alembic/README b/conda-store-server/conda_store_server/alembic/README new file mode 100644 index 000000000..98e4f9c44 --- /dev/null +++ b/conda-store-server/conda_store_server/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/conda-store-server/conda_store_server/alembic/env.py b/conda-store-server/conda_store_server/alembic/env.py new file mode 100644 index 000000000..8ac99f79b --- /dev/null +++ b/conda-store-server/conda_store_server/alembic/env.py @@ -0,0 +1,80 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from conda_store_server.orm import Base # noqa E402 + +target_metadata = Base.metadata + + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + alembic_config = config.get_section(config.config_ini_section) + connectable = engine_from_config( + alembic_config, + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/conda-store-server/conda_store_server/alembic/script.py.mako b/conda-store-server/conda_store_server/alembic/script.py.mako new file mode 100644 index 000000000..2c0156303 --- /dev/null +++ b/conda-store-server/conda_store_server/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/conda-store-server/conda_store_server/alembic/versions/48be4072fe58_initial_schema.py b/conda-store-server/conda_store_server/alembic/versions/48be4072fe58_initial_schema.py new file mode 100644 index 000000000..373d704ce --- /dev/null +++ b/conda-store-server/conda_store_server/alembic/versions/48be4072fe58_initial_schema.py @@ -0,0 +1,238 @@ +"""initial schema + +Revision ID: 48be4072fe58 +Revises: +Create Date: 2022-06-01 18:37:12.396138 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "48be4072fe58" +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "conda_channel", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=False, unique=True), + sa.Column("last_update", sa.DateTime(), nullable=True), + ) + + op.create_table( + "conda_store_configuration", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("disk_usage", sa.BigInteger(), nullable=True), + sa.Column("free_storage", sa.BigInteger(), nullable=True), + sa.Column("total_storage", sa.BigInteger(), nullable=True), + ) + + op.create_table( + "namespace", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=True, unique=True), + sa.Column("deleted_on", sa.DateTime(), nullable=True), + ) + + op.create_table( + "specification", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("name", sa.Unicode(length=255), nullable=False), + sa.Column("spec", sa.JSON(), nullable=False), + sa.Column("sha256", sa.Unicode(length=255), nullable=False, unique=True), + sa.Column("created_on", sa.DateTime(), nullable=True), + ) + + op.create_table( + "conda_package", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("channel_id", sa.Integer(), nullable=True), + sa.Column("build", sa.Unicode(length=64), nullable=False), + sa.Column("build_number", sa.Integer(), nullable=False), + sa.Column("constrains", sa.JSON(), nullable=True), + sa.Column("depends", sa.JSON(), nullable=False), + sa.Column("license", sa.Text(), nullable=True), + sa.Column("license_family", sa.Unicode(length=64), nullable=True), + sa.Column("md5", sa.Unicode(length=255), nullable=False), + sa.Column("name", sa.Unicode(length=255), nullable=False), + sa.Column("sha256", sa.Unicode(length=64), nullable=False), + sa.Column("size", sa.BigInteger(), nullable=False), + sa.Column("subdir", sa.Unicode(length=64), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.Column("version", sa.Unicode(length=64), nullable=False), + sa.Column("summary", sa.Text(), nullable=True), + sa.Column("description", sa.Text(), nullable=True), + ) + + op.create_table( + "environment", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("namespace_id", sa.Integer(), nullable=False), + sa.Column("name", sa.Unicode(length=255), nullable=False), + sa.Column("current_build_id", sa.Integer(), nullable=True), + sa.Column("deleted_on", sa.DateTime(), nullable=True), + ) + + op.create_table( + "solve", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("specification_id", sa.Integer(), nullable=False), + sa.Column("scheduled_on", sa.DateTime(), nullable=True), + sa.Column("started_on", sa.DateTime(), nullable=True), + sa.Column("ended_on", sa.DateTime(), nullable=True), + ) + + op.create_table( + "build", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("specification_id", sa.Integer(), nullable=False), + sa.Column("environment_id", sa.Integer(), nullable=False), + sa.Column( + "status", + sa.Enum("QUEUED", "BUILDING", "COMPLETED", "FAILED", name="buildstatus"), + nullable=True, + ), + sa.Column("size", sa.BigInteger(), nullable=True), + sa.Column("scheduled_on", sa.DateTime(), nullable=True), + sa.Column("started_on", sa.DateTime(), nullable=True), + sa.Column("ended_on", sa.DateTime(), nullable=True), + sa.Column("deleted_on", sa.DateTime(), nullable=True), + ) + + op.create_table( + "solve_conda_package", + sa.Column("solve_id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("conda_package_id", sa.Integer(), nullable=False, primary_key=True), + ) + + op.create_table( + "build_artifact", + sa.Column("id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("build_id", sa.Integer(), nullable=True), + sa.Column( + "artifact_type", + sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + name="buildartifacttype", + ), + nullable=False, + ), + sa.Column("key", sa.Unicode(length=255), nullable=True), + ) + + op.create_table( + "build_conda_package", + sa.Column("build_id", sa.Integer(), nullable=False, primary_key=True), + sa.Column("conda_package_id", sa.Integer(), nullable=False, primary_key=True), + ) + + with op.batch_alter_table( + "conda_package", + table_args=[ + sa.ForeignKeyConstraint( + ["channel_id"], + ["conda_channel.id"], + ), + sa.UniqueConstraint( + "channel_id", + "subdir", + "name", + "version", + "build", + "build_number", + "sha256", + name="_conda_package_uc", + ), + ], + ): + pass + + with op.batch_alter_table( + "environment", + table_args=[ + sa.ForeignKeyConstraint(["current_build_id"], ["build.id"]), + sa.UniqueConstraint("namespace_id", "name", name="_namespace_name_uc"), + sa.ForeignKeyConstraint(["namespace_id"], ["namespace.id"]), + ], + ): + pass + + with op.batch_alter_table( + "solve", + table_args=[ + sa.ForeignKeyConstraint( + ["specification_id"], + ["specification.id"], + ), + ], + ): + pass + + with op.batch_alter_table( + "build", + table_args=[ + sa.ForeignKeyConstraint(["environment_id"], ["environment.id"]), + sa.ForeignKeyConstraint(["specification_id"], ["specification.id"]), + ], + ): + pass + + with op.batch_alter_table( + "solve_conda_package", + table_args=[ + sa.ForeignKeyConstraint( + ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["solve_id"], ["solve.id"], ondelete="CASCADE"), + ], + ): + pass + + with op.batch_alter_table( + "build_artifact", + table_args=[ + sa.ForeignKeyConstraint( + ["build_id"], + ["build.id"], + ), + ], + ): + pass + + with op.batch_alter_table( + "build_conda_package", + table_args=[ + sa.ForeignKeyConstraint(["build_id"], ["build.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint( + ["conda_package_id"], ["conda_package.id"], ondelete="CASCADE" + ), + ], + ): + pass + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("build_conda_package") + op.drop_table("build_artifact") + op.drop_table("solve_conda_package") + op.drop_table("build") + op.drop_table("solve") + op.drop_table("environment") + op.drop_table("conda_package") + op.drop_table("specification") + op.drop_table("namespace") + op.drop_table("conda_store_configuration") + op.drop_table("conda_channel") + # ### end Alembic commands ### diff --git a/conda-store-server/conda_store_server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py b/conda-store-server/conda_store_server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py new file mode 100644 index 000000000..9ae851e0e --- /dev/null +++ b/conda-store-server/conda_store_server/alembic/versions/5ad723de2abd_adding_container_registry_value_to_enum.py @@ -0,0 +1,49 @@ +"""Adding CONTAINER_REGISTRY value to enum + +Revision ID: 5ad723de2abd +Revises: 8d63a091aff8 +Create Date: 2022-08-05 22:14:34.110642 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "5ad723de2abd" +down_revision = "8d63a091aff8" +branch_labels = None +depends_on = None + + +def upgrade(): + old_type = sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + name="buildartifacttype", + ) + + new_type = sa.Enum( + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + "CONTAINER_REGISTRY", + name="buildartifacttype", + ) + + with op.batch_alter_table("build_artifact") as batch_op: + batch_op.alter_column("artifact_type", type_=new_type, existing_type=old_type) + + +def downgrade(): + # harmless to keep extra enum around + pass diff --git a/conda-store-server/conda_store_server/alembic/versions/8d63a091aff8_add_environment_description.py b/conda-store-server/conda_store_server/alembic/versions/8d63a091aff8_add_environment_description.py new file mode 100644 index 000000000..3f848a0d8 --- /dev/null +++ b/conda-store-server/conda_store_server/alembic/versions/8d63a091aff8_add_environment_description.py @@ -0,0 +1,26 @@ +"""Add Environment.description + +Revision ID: 8d63a091aff8 +Revises: 48be4072fe58 +Create Date: 2022-07-15 14:22:00.351131 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "8d63a091aff8" +down_revision = "48be4072fe58" +branch_labels = None +depends_on = None + + +def upgrade(): + with op.batch_alter_table("environment") as batch_op: + batch_op.add_column(sa.Column("description", sa.UnicodeText(), nullable=True)) + + +def downgrade(): + with op.batch_alter_table("environment") as batch_op: + batch_op.drop_column("description") diff --git a/conda-store-server/conda_store_server/api.py b/conda-store-server/conda_store_server/api.py index eed67956e..3b8cf3562 100644 --- a/conda-store-server/conda_store_server/api.py +++ b/conda-store-server/conda_store_server/api.py @@ -1,7 +1,7 @@ from typing import List import re -from sqlalchemy import func, null +from sqlalchemy import func, null, or_ from conda_store_server import orm, schema from conda_store_server.conda import conda_platform @@ -46,20 +46,56 @@ def delete_namespace(db, name: str = None, id: int = None): def list_environments( db, namespace: str = None, + name: str = None, + status: schema.BuildStatus = None, + packages: List[str] = None, + artifact: schema.BuildArtifactType = None, search: str = None, show_soft_deleted: bool = False, ): - filters = [] + query = db.query(orm.Environment).join(orm.Environment.namespace) + if namespace: - filters.append(orm.Namespace.name == namespace) + query = query.filter(orm.Namespace.name == namespace) + + if name: + query = query.filter(orm.Environment.name == name) if search: - filters.append(orm.Environment.name.contains(search, autoescape=True)) + query = query.filter( + or_( + orm.Namespace.name.contains(search, autoescape=True), + orm.Environment.name.contains(search, autoescape=True), + ) + ) if not show_soft_deleted: - filters.append(orm.Environment.deleted_on == null()) + query = query.filter(orm.Environment.deleted_on == null()) + + if status or artifact or packages: + query = query.join(orm.Environment.current_build) - return db.query(orm.Environment).join(orm.Environment.namespace).filter(*filters) + if status: + query = query.filter(orm.Build.status == status) + + if artifact: + # DOCKER_BLOB can return multiple results + # use DOCKER_MANIFEST instead + if artifact == schema.BuildArtifactType.DOCKER_BLOB: + artifact = schema.BuildArtifactType.DOCKER_MANIFEST + query = query.join(orm.Build.build_artifacts).filter( + orm.BuildArtifact.artifact_type == artifact + ) + + if packages: + query = ( + query.join(orm.Build.packages) + .filter(orm.CondaPackage.name.in_(packages)) + .group_by(orm.Namespace.name, orm.Environment.name, orm.Environment.id) + .having(func.count() == len(packages)) + ) + + return query def get_environment( @@ -105,15 +141,53 @@ def get_solve(db, solve_id: int): return db.query(orm.Solve).filter(orm.Solve.id == solve_id).first() -def list_builds(db, status: schema.BuildStatus = None, show_soft_deleted: bool = False): - filters = [] +def list_builds( + db, + status: schema.BuildStatus = None, + packages: List[str] = None, + artifact: schema.BuildArtifactType = None, + environment_id: str = None, + name: str = None, + namespace: str = None, + show_soft_deleted: bool = False, +): + query = ( + db.query(orm.Build).join(orm.Build.environment).join(orm.Environment.namespace) + ) + if status: - filters.append(orm.Build.status == status) + query = query.filter(orm.Build.status == status) + + if environment_id: + query = query.filter(orm.Build.environment_id == environment_id) + + if name: + query = query.filter(orm.Environment.name == name) + + if namespace: + query = query.filter(orm.Namespace.name == namespace) if not show_soft_deleted: - filters.append(orm.Build.deleted_on == null()) + query = query.filter(orm.Build.deleted_on == null()) + + if artifact: + # DOCKER_BLOB can return multiple results + # use DOCKER_MANIFEST instead + if artifact == schema.BuildArtifactType.DOCKER_BLOB: + artifact = schema.BuildArtifactType.DOCKER_MANIFEST + query = query.join(orm.Build.build_artifacts).filter( + orm.BuildArtifact.artifact_type == artifact + ) - return db.query(orm.Build).filter(*filters) + if packages: + query = ( + query.join(orm.Build.packages) + .filter(orm.CondaPackage.name.in_(packages)) + .group_by(orm.Build.id) + .having(func.count() == len(packages)) + ) + + return query def get_build(db, build_id: int): @@ -195,6 +269,12 @@ def list_conda_channels(db): return db.query(orm.CondaChannel).filter(*filters) +def create_conda_channel(db, channel_name: str): + channel = orm.CondaChannel(name=channel_name, last_update=None) + db.add(channel) + return channel + + def get_conda_channel(db, channel_name: str): return ( db.query(orm.CondaChannel).filter(orm.CondaChannel.name == channel_name).first() diff --git a/conda-store-server/conda_store_server/app.py b/conda-store-server/conda_store_server/app.py index 4cb52a8bf..01f795d65 100644 --- a/conda-store-server/conda_store_server/app.py +++ b/conda-store-server/conda_store_server/app.py @@ -1,7 +1,6 @@ import os import datetime -import redis from celery import Celery, group from traitlets import ( Type, @@ -13,11 +12,21 @@ Bool, validate, TraitError, + Union, ) from traitlets.config import LoggingConfigurable from sqlalchemy.pool import NullPool -from conda_store_server import orm, utils, storage, schema, api, conda, environment +from conda_store_server import ( + orm, + utils, + storage, + schema, + api, + conda, + environment, + registry, +) def conda_store_validate_specification( @@ -49,12 +58,19 @@ def conda_store_validate_specification( class CondaStore(LoggingConfigurable): storage_class = Type( - default_value=storage.S3Storage, + default_value=storage.LocalStorage, klass=storage.Storage, allow_none=False, config=True, ) + container_registry_class = Type( + default_value=registry.ContainerRegistry, + klass=registry.ContainerRegistry, + allow_none=False, + config=True, + ) + store_directory = Unicode( "conda-store-state", help="directory for conda-store to build environments and store state", @@ -101,8 +117,9 @@ class CondaStore(LoggingConfigurable): [ "main", "conda-forge", + "https://repo.anaconda.com/pkgs/main", ], - help="Allowed conda channels to be used in conda environments", + help="Allowed conda channels to be used in conda environments. If set to empty list all channels are accepted. Defaults to main and conda-forge", config=True, ) @@ -154,19 +171,26 @@ class CondaStore(LoggingConfigurable): config=True, ) - redis_url = Unicode( - help="Redis connection url in form 'redis://:@:/0'. Connection is used by Celery along with Conda-Store internally", + upgrade_db = Bool( + True, + help="""Upgrade the database automatically on start. + Only safe if database is regularly backed up. + """, config=True, ) - @default("redis_url") - def _default_redis(self): - raise TraitError("c.CondaStore.redis_url Redis connection url is required") + redis_url = Unicode( + None, + help="Redis connection url in form 'redis://:@:/0'. Connection is used by Celery along with conda-store internally", + config=True, + allow_none=True, + ) @validate("redis_url") def _check_redis(self, proposal): try: - self.redis.ping() + if self.redis_url is not None: + self.redis.ping() except Exception: raise TraitError( f'c.CondaStore.redis_url unable to connect with Redis database at "{self.redis_url}"' @@ -184,6 +208,7 @@ def _check_redis(self, proposal): schema.BuildArtifactType.YAML, schema.BuildArtifactType.CONDA_PACK, schema.BuildArtifactType.DOCKER_MANIFEST, + schema.BuildArtifactType.CONTAINER_REGISTRY, ], help="artifacts to build in conda-store. By default all of the artifacts", config=True, @@ -194,6 +219,9 @@ def _check_redis(self, proposal): schema.BuildArtifactType.LOGS, schema.BuildArtifactType.LOCKFILE, schema.BuildArtifactType.YAML, + # no possible way to delete these artifacts + # in most container registries via api + schema.BuildArtifactType.CONTAINER_REGISTRY, ], help="artifacts to keep on build deletion", config=True, @@ -207,7 +235,9 @@ def _check_redis(self, proposal): @default("celery_broker_url") def _default_celery_broker_url(self): - return self.redis_url + if self.redis_url is not None: + return self.redis_url + return f"sqla+{self.database_url}" celery_results_backend = Unicode( help="backend to use for celery task results", @@ -216,7 +246,9 @@ def _default_celery_broker_url(self): @default("celery_results_backend") def _default_celery_results_backend(self): - return self.redis_url + if self.redis_url is not None: + return self.redis_url + return f"db+{self.database_url}" default_namespace = Unicode( "default", help="default namespace for conda-store", config=True @@ -246,12 +278,19 @@ def _default_celery_results_backend(self): config=True, ) - default_docker_base_image = Unicode( - "frolvlad/alpine-glibc:latest", - help="default base image used for the Dockerized environments", + default_docker_base_image = Union( + [Unicode(), Callable()], + help="default base image used for the Dockerized environments. Make sure to have a proper glibc within image (highly discourage alpine/musl based images). Can also be callable function which takes the `orm.Build` object as input which has access to all attributes about the build such as install packages, requested packages, name, namespace, etc", config=True, ) + @default("default_docker_base_image") + def _default_docker_base_image(self): + def _docker_base_image(build: orm.Build): + return "registry-1.docker.io/library/debian:sid-slim" + + return _docker_base_image + validate_specification = Callable( conda_store_validate_specification, help="callable function taking conda_store and specification as input arguments to apply for validating and modifying a given specification. If there are validation issues with the environment ValueError with message should be raised. If changed you may need to call the default function to preseve many of the trait effects e.g. `c.CondaStore.default_channels` etc", @@ -280,6 +319,8 @@ def db(self): @property def redis(self): + import redis + if hasattr(self, "_redis"): return self._redis self._redis = redis.Redis.from_url(self.redis_url) @@ -294,8 +335,21 @@ def storage(self): if hasattr(self, "_storage"): return self._storage self._storage = self.storage_class(parent=self, log=self.log) + + if isinstance(self._storage, storage.LocalStorage): + os.makedirs(self._storage.storage_path, exist_ok=True) + return self._storage + @property + def container_registry(self): + if hasattr(self, "_container_registry"): + return self._container_registry + self._container_registry = self.container_registry_class( + parent=self, log=self.log + ) + return self._container_registry + @property def celery_app(self): if hasattr(self, "_celery_app"): @@ -324,17 +378,6 @@ def celery_app(self): }, } - if self.celery_results_backend.startswith("sqla"): - # https://github.com/celery/celery/issues/4653#issuecomment-400029147 - # race condition in table construction in celery - # despite issue being closed still causes first task to fail - # in celery if tables not created - from celery.backends.database import SessionManager - - session = SessionManager() - engine = session.get_engine(self._celery_app.backend.url) - session.prepare_models(engine) - return self._celery_app def ensure_namespace(self): @@ -448,9 +491,13 @@ def register_environment( environment = orm.Environment( name=specification.name, namespace_id=namespace.id, + description=specification.spec["description"], ) self.db.add(environment) self.db.commit() + else: + environment.description = specification.spec["description"] + self.db.commit() build = self.create_build(environment.id, specification.sha256) @@ -478,7 +525,10 @@ def create_build(self, environment_id: int, specification_sha256: str): artifact_tasks.append(tasks.task_build_conda_env_export.si(build.id)) if schema.BuildArtifactType.CONDA_PACK in self.build_artifacts: artifact_tasks.append(tasks.task_build_conda_pack.si(build.id)) - if schema.BuildArtifactType.DOCKER_MANIFEST in self.build_artifacts: + if ( + schema.BuildArtifactType.DOCKER_MANIFEST in self.build_artifacts + or schema.BuildArtifactType.CONTAINER_REGISTRY in self.build_artifacts + ): artifact_tasks.append(tasks.task_build_conda_docker.si(build.id)) ( @@ -520,6 +570,17 @@ def update_environment_build(self, namespace, name, build_id): tasks.task_update_environment_build.si(environment.id).apply_async() + def update_environment_description(self, namespace, name, description): + + environment = api.get_environment(self.db, namespace=namespace, name=name) + if environment is None: + raise utils.CondaStoreError( + f"environment namespace={namespace} name={name} does not exist" + ) + + environment.description = description + self.db.commit() + def delete_namespace(self, namespace): namespace = api.get_namespace(self.db, name=namespace) if namespace is None: diff --git a/conda-store-server/conda_store_server/build.py b/conda-store-server/conda_store_server/build.py index ce70fdd22..fa63ee98c 100644 --- a/conda-store-server/conda_store_server/build.py +++ b/conda-store-server/conda_store_server/build.py @@ -1,6 +1,4 @@ import datetime -import gzip -import hashlib import os import stat import subprocess @@ -62,12 +60,16 @@ def set_build_completed(conda_store, build, logs, packages): # ignore pypi package for now continue - channel_id = api.get_conda_channel(conda_store.db, channel) - if channel_id is None: - raise ValueError( - f"channel url={channel} not recognized in conda-store channel database" - ) - package["channel_id"] = channel_id.id + channel_orm = api.get_conda_channel(conda_store.db, channel) + if channel_orm is None: + if len(conda_store.conda_allowed_channels) == 0: + channel_orm = api.create_conda_channel(conda_store.db, channel) + conda_store.db.commit() + else: + raise ValueError( + f"channel url={channel} not recognized in conda-store channel database" + ) + package["channel_id"] = channel_orm.id # Retrieve the package from the DB if it already exists @@ -249,12 +251,16 @@ def solve_conda_environment(conda_store, solve): # ignore pypi package for now continue - channel_id = api.get_conda_channel(conda_store.db, channel) - if channel_id is None: - raise ValueError( - f"channel url={channel} not recognized in conda-store channel database" - ) - package["channel_id"] = channel_id.id + channel_orm = api.get_conda_channel(conda_store.db, channel) + if channel_orm is None: + if len(conda_store.conda_allowed_channels) == 0: + channel_orm = api.create_conda_channel(conda_store.db, channel) + conda_store.db.commit() + else: + raise ValueError( + f"channel url={channel} not recognized in conda-store channel database" + ) + package["channel_id"] = channel_orm.id _package = ( conda_store.db.query(orm.CondaPackage) @@ -330,8 +336,11 @@ def build_conda_docker(conda_store, build): download_dir = info["pkgs_dirs"][0] precs = precs_from_environment_prefix(conda_prefix, download_dir, user_conda) records = fetch_precs(download_dir, precs) + base_image = conda_store.container_registry.pull_image( + utils.callable_or_value(conda_store.default_docker_base_image, build) + ) image = build_docker_environment_image( - base_image=conda_store.default_docker_base_image, + base_image=base_image, output_image=f"{build.specification.name}:{build.build_key}", records=records, default_prefix=info["env_vars"]["CONDA_ROOT"], @@ -341,78 +350,8 @@ def build_conda_docker(conda_store, build): layering_strategy="layered", ) - # https://docs.docker.com/registry/spec/manifest-v2-2/#example-image-manifest - docker_manifest = schema.DockerManifest.construct() - docker_config = schema.DockerConfig.construct( - config=schema.DockerConfigConfig(), - container_config=schema.DockerConfigConfig(), - rootfs=schema.DockerConfigRootFS(), - ) - - for layer in image.layers: - # https://github.com/google/nixery/pull/64#issuecomment-541019077 - # docker manifest expects compressed hash while configuration file - # expects uncompressed hash -- good luck finding this detail in docs :) - content_uncompressed_hash = hashlib.sha256(layer.content).hexdigest() - content_compressed = gzip.compress(layer.content) - content_compressed_hash = hashlib.sha256(content_compressed).hexdigest() - conda_store.storage.set( - conda_store.db, - build.id, - build.docker_blob_key(content_compressed_hash), - content_compressed, - content_type="application/gzip", - artifact_type=schema.BuildArtifactType.DOCKER_BLOB, - ) - - docker_layer = schema.DockerManifestLayer( - size=len(content_compressed), digest=f"sha256:{content_compressed_hash}" - ) - docker_manifest.layers.append(docker_layer) - - docker_config_history = schema.DockerConfigHistory() - docker_config.history.append(docker_config_history) - - docker_config.rootfs.diff_ids.append(f"sha256:{content_uncompressed_hash}") - - docker_config_content = docker_config.json().encode("utf-8") - docker_config_hash = hashlib.sha256(docker_config_content).hexdigest() - docker_manifest.config = schema.DockerManifestConfig( - size=len(docker_config_content), digest=f"sha256:{docker_config_hash}" - ) - docker_manifest_content = docker_manifest.json().encode("utf-8") - docker_manifest_hash = hashlib.sha256(docker_manifest_content).hexdigest() - - conda_store.storage.set( - conda_store.db, - build.id, - build.docker_blob_key(docker_config_hash), - docker_config_content, - content_type="application/vnd.docker.container.image.v1+json", - artifact_type=schema.BuildArtifactType.DOCKER_BLOB, - ) - - # docker likes to have a sha256 key version of the manifest this - # is sort of hack to avoid having to figure out which sha256 - # refers to which manifest. - conda_store.storage.set( - conda_store.db, - build.id, - f"docker/manifest/sha256:{docker_manifest_hash}", - docker_manifest_content, - content_type="application/vnd.docker.distribution.manifest.v2+json", - artifact_type=schema.BuildArtifactType.DOCKER_BLOB, - ) - - conda_store.storage.set( - conda_store.db, - build.id, - build.docker_manifest_key, - docker_manifest_content, - content_type="application/vnd.docker.distribution.manifest.v2+json", - artifact_type=schema.BuildArtifactType.DOCKER_MANIFEST, - ) + if schema.BuildArtifactType.DOCKER_MANIFEST in conda_store.build_artifacts: + conda_store.container_registry.store_image(conda_store, build, image) - conda_store.log.info( - f"built docker image: {image.name}:{image.tag} layers={len(image.layers)}" - ) + if schema.BuildArtifactType.CONTAINER_REGISTRY in conda_store.build_artifacts: + conda_store.container_registry.push_image(conda_store, build, image) diff --git a/conda-store-server/conda_store_server/conda.py b/conda-store-server/conda_store_server/conda.py index 94f61528f..70a7e1f68 100644 --- a/conda-store-server/conda_store_server/conda.py +++ b/conda-store-server/conda_store_server/conda.py @@ -1,4 +1,4 @@ -"""Interface between Conda-Store and conda +"""Interface between conda-store and conda This module provides all the functionality that is required for executing conda commands diff --git a/conda-store-server/conda_store_server/dbutil.py b/conda-store-server/conda_store_server/dbutil.py new file mode 100644 index 000000000..b8095546c --- /dev/null +++ b/conda-store-server/conda_store_server/dbutil.py @@ -0,0 +1,97 @@ +import os +from contextlib import contextmanager +from subprocess import check_call +from tempfile import TemporaryDirectory + +from sqlalchemy import create_engine, inspect +from alembic import command +from alembic.config import Config + +_here = os.path.abspath(os.path.dirname(__file__)) + +ALEMBIC_INI_TEMPLATE_PATH = os.path.join(_here, "alembic.ini") +ALEMBIC_DIR = os.path.join(_here, "alembic") + + +def write_alembic_ini(alembic_ini, db_url): + """Write a complete alembic.ini from a template. + + Parameters + ---------- + alembic_ini : str + path to the alembic.ini file that should be written. + db_url : str + The SQLAlchemy database url + """ + with open(ALEMBIC_INI_TEMPLATE_PATH) as f: + alembic_ini_tpl = f.read() + + with open(alembic_ini, "w") as f: + f.write( + alembic_ini_tpl.format( + alembic_dir=ALEMBIC_DIR, + # If there are any %s in the URL, they should be replaced with %%, since ConfigParser + # by default uses %() for substitution. You'll get %s in your URL when you have usernames + # with special chars (such as '@') that need to be URL encoded. URL Encoding is done with %s. + # YAY for nested templates? + db_url=str(db_url).replace("%", "%%"), + ) + ) + + +@contextmanager +def _temp_alembic_ini(db_url): + """Context manager for temporary JupyterHub alembic directory + + Temporarily write an alembic.ini file for use with alembic migration scripts. + + Context manager yields alembic.ini path. + + Parameters + ---------- + db_url : str + The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`. + + Returns + ------- + alembic_ini: str + The path to the temporary alembic.ini that we have created. + This file will be cleaned up on exit from the context manager. + """ + with TemporaryDirectory() as td: + alembic_ini = os.path.join(td, "alembic.ini") + write_alembic_ini(alembic_ini, db_url) + yield alembic_ini + + +def upgrade(db_url, revision="head"): + """Upgrade the given database to revision. + + db_url: str + The SQLAlchemy database url, e.g. `sqlite:///jupyterhub.sqlite`. + revision: str [default: head] + The alembic revision to upgrade to. + """ + + engine = create_engine(db_url) + + # retrieves the names of tables in the DB + current_table_names = set(inspect(engine).get_table_names()) + + with _temp_alembic_ini(db_url) as alembic_ini: + + if ( + "alembic_version" not in current_table_names + and len(current_table_names) > 0 + ): + # If table alembic_version is missing, + # we stamp the revision at the first one, that introduces the alembic revisions. + # I chose the leave the revision number hardcoded as it's not something + # dynamic, not something we want to change, and tightly related to the codebase + command.stamp(Config(alembic_ini), "48be4072fe58") + # After this point, whatever is in the database, Alembic will + # believe it's at the first revision. If there are more upgrades/migrations + # to run, they'll be at the next step : + + # run the upgrade. + check_call(["alembic", "-c", alembic_ini, "upgrade", revision]) diff --git a/conda-store-server/conda_store_server/environment.py b/conda-store-server/conda_store_server/environment.py index 6836eef51..bd25f522b 100644 --- a/conda-store-server/conda_store_server/environment.py +++ b/conda-store-server/conda_store_server/environment.py @@ -3,8 +3,6 @@ import yaml import pydantic -from conda.models.match_spec import MatchSpec -from pkg_resources import Requirement from conda_store_server import schema, conda @@ -56,7 +54,9 @@ def validate_environment_channels( conda.normalize_channel_name(conda_channel_alias, _) for _ in allowed_channels ) - if not (normalized_conda_channels <= normalized_conda_allowed_channels): + if len(allowed_channels) and not ( + normalized_conda_channels <= normalized_conda_allowed_channels + ): raise ValueError( f"Conda channels {normalized_conda_channels - normalized_conda_allowed_channels} not allowed in specification" ) @@ -71,6 +71,8 @@ def validate_environment_conda_packages( required_packages: List[str], ) -> schema.Specification: def _package_names(dependencies): + from conda.models.match_spec import MatchSpec + return {MatchSpec(_).name: _ for _ in dependencies if isinstance(_, str)} if len(specification.dependencies) == 0: @@ -101,6 +103,8 @@ def validate_environment_pypi_packages( required_packages: List[str], ) -> schema.Specification: def _package_names(packages): + from pkg_resources import Requirement + result = {} for p in packages: if isinstance(p, str): diff --git a/conda-store-server/conda_store_server/orm.py b/conda-store-server/conda_store_server/orm.py index 27889febc..c3ef33a4f 100644 --- a/conda-store-server/conda_store_server/orm.py +++ b/conda-store-server/conda_store_server/orm.py @@ -287,6 +287,8 @@ class Environment(Base): deleted_on = Column(DateTime, default=None) + description = Column(UnicodeText, default=None) + class CondaChannel(Base): __tablename__ = "conda_channel" @@ -608,10 +610,5 @@ def update_storage_metrics(cls, db, store_directory): def new_session_factory(url="sqlite:///:memory:", reset=False, **kwargs): engine = create_engine(url, **kwargs) - if reset: - Base.metadata.drop_all(engine) - - Base.metadata.create_all(engine) - session_factory = scoped_session(sessionmaker(bind=engine)) return session_factory diff --git a/conda-store-server/conda_store_server/registry.py b/conda-store-server/conda_store_server/registry.py new file mode 100644 index 000000000..1afdb9daa --- /dev/null +++ b/conda-store-server/conda_store_server/registry.py @@ -0,0 +1,188 @@ +import hashlib +import gzip +import urllib.parse + +from traitlets.config import LoggingConfigurable +from traitlets import Dict, Callable, default +from python_docker.registry import Image, Registry + +from conda_store_server import schema, orm, utils + + +class ContainerRegistry(LoggingConfigurable): + container_registries = Dict( + {}, + help="Registries url to upload built container images with callable function to configure registry instance with credentials", + config=True, + ) + + container_registry_image_name = Callable( + help="Image name to assign to docker image pushed for particular registry", + config=True, + ) + + @default("container_registry_image_name") + def _default_container_registry_image_name(self): + def _container_registry_image_name(registry: Registry, build: orm.Build): + return f"{registry.username}/{build.environment.namespace.name}-{build.environment.name}" + + return _container_registry_image_name + + container_registry_image_tag = Callable( + help="Image name and tag to assign to docker image pushed for particular registry", + config=True, + ) + + @default("container_registry_image_tag") + def _default_container_registry_image_tag(self): + def _container_registry_image_tag(registry: Registry, build: orm.Build): + return build.key + + return _container_registry_image_tag + + def store_image(self, conda_store, build: orm.Build, image: Image): + self.log.info("storing container image locally") + with utils.timer(self.log, "storing container image locally"): + # https://docs.docker.com/registry/spec/manifest-v2-2/#example-image-manifest + docker_manifest = schema.DockerManifest.construct() + docker_config = schema.DockerConfig.construct( + config=schema.DockerConfigConfig(), + container_config=schema.DockerConfigConfig(), + rootfs=schema.DockerConfigRootFS(), + ) + + for layer in image.layers: + # https://github.com/google/nixery/pull/64#issuecomment-541019077 + # docker manifest expects compressed hash while configuration file + # expects uncompressed hash -- good luck finding this detail in docs :) + content_uncompressed_hash = hashlib.sha256(layer.content).hexdigest() + content_compressed = gzip.compress(layer.content) + content_compressed_hash = hashlib.sha256(content_compressed).hexdigest() + conda_store.storage.set( + conda_store.db, + build.id, + build.docker_blob_key(content_compressed_hash), + content_compressed, + content_type="application/gzip", + artifact_type=schema.BuildArtifactType.DOCKER_BLOB, + ) + + docker_layer = schema.DockerManifestLayer( + size=len(content_compressed), + digest=f"sha256:{content_compressed_hash}", + ) + docker_manifest.layers.append(docker_layer) + + docker_config_history = schema.DockerConfigHistory() + docker_config.history.append(docker_config_history) + + docker_config.rootfs.diff_ids.append( + f"sha256:{content_uncompressed_hash}" + ) + + docker_config_content = docker_config.json().encode("utf-8") + docker_config_hash = hashlib.sha256(docker_config_content).hexdigest() + docker_manifest.config = schema.DockerManifestConfig( + size=len(docker_config_content), digest=f"sha256:{docker_config_hash}" + ) + docker_manifest_content = docker_manifest.json().encode("utf-8") + docker_manifest_hash = hashlib.sha256(docker_manifest_content).hexdigest() + + conda_store.storage.set( + conda_store.db, + build.id, + build.docker_blob_key(docker_config_hash), + docker_config_content, + content_type="application/vnd.docker.container.image.v1+json", + artifact_type=schema.BuildArtifactType.DOCKER_BLOB, + ) + + # docker likes to have a sha256 key version of the manifest this + # is sort of hack to avoid having to figure out which sha256 + # refers to which manifest. + conda_store.storage.set( + conda_store.db, + build.id, + f"docker/manifest/sha256:{docker_manifest_hash}", + docker_manifest_content, + content_type="application/vnd.docker.distribution.manifest.v2+json", + artifact_type=schema.BuildArtifactType.DOCKER_BLOB, + ) + + conda_store.storage.set( + conda_store.db, + build.id, + build.docker_manifest_key, + docker_manifest_content, + content_type="application/vnd.docker.distribution.manifest.v2+json", + artifact_type=schema.BuildArtifactType.DOCKER_MANIFEST, + ) + + conda_store.log.info( + f"built docker image: {image.name}:{image.tag} layers={len(image.layers)}" + ) + + @staticmethod + def parse_image_uri(image_name: str): + """Must be in fully specified format [://]/:""" + if not image_name.startswith("http"): + image_name = f"https://{image_name}" + + parsed_url = urllib.parse.urlparse(image_name) + registry_url = f"{parsed_url.scheme}://{parsed_url.netloc}" + image_name, tag_name = parsed_url.path.split(":", 1) + image_name = image_name[1:] # remove beginning "/" + return registry_url, image_name, tag_name + + def pull_image(self, image_name: str) -> Image: + """Must be in fully specified format [://]/: + + Docker is the only weird registry where you must use: + - `https://registry-1.docker.io` + """ + registry_url, name, tag = self.parse_image_uri(image_name) + + for url in self.container_registries: + if registry_url in url: + registry = self.container_registries[registry_url](url) + break + else: + self.log.warning( + f"registry {registry_url} not configured using registry without authentication" + ) + registry = Registry(hostname=registry_url) + + return registry.pull_image(name, tag) + + def push_image(self, conda_store, build, image: Image): + for registry_url, configure_registry in self.container_registries.items(): + self.log.info(f"beginning upload of image to registry {registry_url}") + with utils.timer(self.log, f"uploading image to registry {registry_url}"): + registry = configure_registry(registry_url) + image.name = self.container_registry_image_name(registry, build) + image.tag = self.container_registry_image_tag(registry, build) + registry.push_image(image) + + registry_build_artifact = orm.BuildArtifact( + build_id=build.id, + artifact_type=schema.BuildArtifactType.CONTAINER_REGISTRY, + key=f"{registry_url}/{image.name}:{image.tag}", + ) + conda_store.db.add(registry_build_artifact) + conda_store.db.commit() + + def delete_image(self, image_name: str): + registry_url, name, tag = self.parse_image_uri(image_name) + + for url in self.container_registries: + if registry_url in url: + registry = self.container_registries[registry_url](url) + break + else: + self.log.warning( + f"registry {registry_url} not configured using registry without authentication" + ) + registry = Registry(hostname=registry_url) + + self.log.info(f"deleting container image {image_name}") + registry.delete_image(name, tag) diff --git a/conda-store-server/conda_store_server/schema.py b/conda-store-server/conda_store_server/schema.py index aade74cac..dc71b9cf1 100644 --- a/conda-store-server/conda_store_server/schema.py +++ b/conda-store-server/conda_store_server/schema.py @@ -3,7 +3,6 @@ import enum from typing import List, Optional, Union, Dict, Any import functools -from pkg_resources import Requirement from pydantic import BaseModel, Field, constr, validator @@ -13,6 +12,12 @@ def _datetime_factory(offset: datetime.timedelta): return datetime.datetime.utcnow() + offset +# namespace and name cannot contain "*" ":" "#" " " "/" +# this is a more restrictive list +ALLOWED_CHARACTERS = "A-Za-z0-9-+_@$&?^~.=" +ARN_ALLOWED = f"^([{ALLOWED_CHARACTERS}*]+)/([{ALLOWED_CHARACTERS}*]+)$" + + ######################### # Authentication Schema ######################### @@ -34,7 +39,7 @@ class AuthenticationToken(BaseModel): default_factory=functools.partial(_datetime_factory, datetime.timedelta(days=1)) ) primary_namespace: str = "default" - role_bindings: Dict[str, List[str]] = {} + role_bindings: Dict[constr(regex=ARN_ALLOWED), List[str]] = {} ########################## @@ -82,11 +87,6 @@ class Config: orm_mode = True -# namespace and name cannot contain "*" ":" "#" " " "/" -# this is a more restrictive list -ALLOWED_CHARACTERS = "A-Za-z0-9-+_=@$&?^|~." - - class Namespace(BaseModel): id: int name: constr(regex=f"^[{ALLOWED_CHARACTERS}]+$") # noqa: F722 @@ -114,6 +114,7 @@ class BuildArtifactType(enum.Enum): CONDA_PACK = "CONDA_PACK" DOCKER_BLOB = "DOCKER_BLOB" DOCKER_MANIFEST = "DOCKER_MANIFEST" + CONTAINER_REGISTRY = "CONTAINER_REGISTRY" class BuildStatus(enum.Enum): @@ -123,6 +124,16 @@ class BuildStatus(enum.Enum): FAILED = "FAILED" +class BuildArtifact(BaseModel): + id: int + artifact_type: BuildArtifactType + key: str + + class Config: + orm_mode = True + use_enum_values = True + + class Build(BaseModel): id: int environment_id: int @@ -133,6 +144,7 @@ class Build(BaseModel): scheduled_on: datetime.datetime started_on: Optional[datetime.datetime] ended_on: Optional[datetime.datetime] + build_artifacts: Optional[List[BuildArtifact]] class Config: orm_mode = True @@ -146,6 +158,8 @@ class Environment(BaseModel): current_build_id: int current_build: Optional[Build] + description: str + class Config: orm_mode = True @@ -156,6 +170,7 @@ class CondaSpecificationPip(BaseModel): @validator("pip", each_item=True) def check_pip(cls, v): + from pkg_resources import Requirement allowed_pip_params = ["--index-url", "--extra-index-url", "--trusted-host"] @@ -179,6 +194,7 @@ class CondaSpecification(BaseModel): channels: List[str] = [] dependencies: List[Union[str, CondaSpecificationPip]] = [] prefix: Optional[str] + description: Optional[str] = "" @validator("dependencies", each_item=True) def check_dependencies(cls, v): @@ -339,14 +355,25 @@ class APIGetStatus(APIResponse): # GET /api/v1/permission class APIGetPermissionData(BaseModel): authenticated: bool - entity_permissions: Dict[str, List[str]] primary_namespace: str + entity_permissions: Dict[str, List[str]] + entity_roles: Dict[str, List[str]] + expiration: Optional[datetime.datetime] class APIGetPermission(APIResponse): data: APIGetPermissionData +# POST /api/v1/token +class APIPostTokenData(BaseModel): + token: str + + +class APIPostToken(APIResponse): + data: APIPostTokenData + + # GET /api/v1/namespace class APIListNamespace(APIPaginatedResponse): data: List[Namespace] diff --git a/conda-store-server/conda_store_server/server/app.py b/conda-store-server/conda_store_server/server/app.py index 54db39bfa..f63e3850a 100644 --- a/conda-store-server/conda_store_server/server/app.py +++ b/conda-store-server/conda_store_server/server/app.py @@ -6,21 +6,36 @@ from fastapi import FastAPI, Request, HTTPException from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import RedirectResponse, JSONResponse +from fastapi.staticfiles import StaticFiles from starlette.middleware.sessions import SessionMiddleware from fastapi.templating import Jinja2Templates -from traitlets import Bool, Unicode, Integer, Type, validate, Instance, default +from traitlets import Bool, Unicode, Integer, Type, validate, Instance, default, Dict from traitlets.config import Application, catch_config_error +from conda_store_server import storage from conda_store_server.server import auth, views from conda_store_server.app import CondaStore from conda_store_server import __version__ +import conda_store_server.dbutil as dbutil + class CondaStoreServer(Application): aliases = { "config": "CondaStoreServer.config_file", } + flags = { + "standalone": ( + { + "CondaStoreServer": { + "standalone": True, + } + }, + "Run conda-store-server in standalone mode with celery worker as a subprocess of webserver", + ), + } + log_level = Integer( logging.INFO, help="log level to use", @@ -82,6 +97,12 @@ class CondaStoreServer(Application): config=True, ) + template_vars = Dict( + {}, + help="Extra variables to be passed into jinja templates for page rendering", + config=True, + ) + @default("templates") def _default_templates(self): import conda_store_server.server @@ -114,13 +135,24 @@ def _validate_config_file(self, proposal): 100, help="maximum number of items to return in a single page", config=True ) + standalone = Bool( + False, + help="Run application in standalone mode with workers running as subprocess", + config=True, + ) + @catch_config_error def initialize(self, *args, **kwargs): super().initialize(*args, **kwargs) self.load_config_file(self.config_file) self.conda_store = CondaStore(parent=self, log=self.log) + + if self.conda_store.upgrade_db: + dbutil.upgrade(self.conda_store.database_url) + self.authentication = self.authentication_class(parent=self, log=self.log) + # ensure checks on redis_url self.conda_store.redis_url @@ -129,7 +161,7 @@ def trim_slash(url): return url[:-1] if url.endswith("/") else url app = FastAPI( - title="Conda-Store", + title="conda-store", version=__version__, openapi_url=os.path.join(self.url_prefix, "openapi.json"), docs_url=os.path.join(self.url_prefix, "docs"), @@ -148,11 +180,16 @@ def trim_slash(url): CORSMiddleware, allow_origins=["*"], allow_credentials=True, + allow_headers=["*"], + allow_methods=["*"], ) app.add_middleware( SessionMiddleware, secret_key=self.authentication.authentication.secret ) + # ensure that template variables are inserted into templates + self.templates.env.globals.update(self.template_vars) + @app.middleware("http") async def conda_store_middleware(request: Request, call_next): try: @@ -210,6 +247,16 @@ def redirect_home(request: Request): prefix=trim_slash(self.url_prefix), ) + if isinstance(self.conda_store.storage, storage.LocalStorage): + self.conda_store.storage.storage_url = ( + f"{trim_slash(self.url_prefix)}/storage" + ) + app.mount( + self.conda_store.storage.storage_url, + StaticFiles(directory=self.conda_store.storage.storage_path), + name="static", + ) + self.conda_store.ensure_namespace() self.conda_store.ensure_conda_channels() @@ -218,13 +265,28 @@ def redirect_home(request: Request): from conda_store_server.worker import tasks # noqa - uvicorn.run( - app, - host=self.address, - port=self.port, - reload=False, - debug=(self.log_level == logging.DEBUG), - workers=1, - proxy_headers=self.behind_proxy, - forwarded_allow_ips=("*" if self.behind_proxy else None), - ) + # start worker if in standalone mode + if self.standalone: + import multiprocessing + + multiprocessing.set_start_method("spawn") + + from conda_store_server.worker.app import CondaStoreWorker + + process = multiprocessing.Process(target=CondaStoreWorker.launch_instance) + process.start() + + try: + uvicorn.run( + app, + host=self.address, + port=self.port, + reload=False, + debug=(self.log_level == logging.DEBUG), + workers=1, + proxy_headers=self.behind_proxy, + forwarded_allow_ips=("*" if self.behind_proxy else None), + ) + finally: + if self.standalone: + process.join() diff --git a/conda-store-server/conda_store_server/server/auth.py b/conda-store-server/conda_store_server/server/auth.py index 0ed427685..ada2cb928 100644 --- a/conda-store-server/conda_store_server/server/auth.py +++ b/conda-store-server/conda_store_server/server/auth.py @@ -2,6 +2,7 @@ import secrets import datetime from typing import Optional +import base64 import jwt import requests @@ -10,14 +11,13 @@ from fastapi import APIRouter, Request, Response, HTTPException, Depends from fastapi.responses import RedirectResponse from sqlalchemy import or_, and_ +import yarl -from conda_store_server import schema, orm +from conda_store_server import schema, orm, utils from conda_store_server.server import dependencies -ARN_ALLOWED_REGEX = re.compile( - f"^([{schema.ALLOWED_CHARACTERS}*]+)/([{schema.ALLOWED_CHARACTERS}*]+)$" -) +ARN_ALLOWED_REGEX = re.compile(schema.ARN_ALLOWED) class AuthenticationBackend(LoggingConfigurable): @@ -33,6 +33,12 @@ class AuthenticationBackend(LoggingConfigurable): config=True, ) + predefined_tokens = Dict( + {}, + help="Set of tokens with predefined permissions. The feature is helpful for service-accounts", + config=True, + ) + def encrypt_token(self, token: schema.AuthenticationToken): return jwt.encode(token.dict(), self.secret, algorithm=self.jwt_algorithm) @@ -41,7 +47,11 @@ def decrypt_token(self, token: str): def authenticate(self, token): try: - return schema.AuthenticationToken.parse_obj(self.decrypt_token(token)) + if token in self.predefined_tokens: + authentication_token = self.predefined_tokens[token] + else: + authentication_token = self.decrypt_token(token) + return schema.AuthenticationToken.parse_obj(authentication_token) except Exception: return None @@ -88,10 +98,11 @@ class RBACAuthorizationBackend(LoggingConfigurable): "filesystem/*": {"viewer"}, }, help="default permissions to apply to specific resources", + config=True, ) @staticmethod - def compile_arn_regex(arn): + def compile_arn_regex(arn: str) -> re.Pattern: """Take an arn of form "example-*/example-*" and compile to regular expression The expression "example-*/example-*" will match: @@ -108,14 +119,37 @@ def compile_arn_regex(arn): return re.compile(regex_arn) @staticmethod - def compile_arn_sql_like(arn): + def compile_arn_sql_like(arn: str) -> str: match = ARN_ALLOWED_REGEX.match(arn) if match is None: raise ValueError(f"invalid arn={arn}") return re.sub(r"\*", "%", match.group(1)), re.sub(r"\*", "%", match.group(2)) - def get_entity_bindings(self, entity_bindings, authenticated=False): + @staticmethod + def is_arn_subset(arn_1: str, arn_2: str): + """Return true if arn_1 is a subset of arn_2 + + conda-store allows flexible arn statements such as "a*b*/c*" + with "*" being a wildcard seen in regexes. This makes the + calculation of if a arn is a subset of another non + trivial. This codes solves this problem. + """ + arn_1_matches_arn_2 = ( + re.fullmatch( + re.sub(r"\*", f"[{schema.ALLOWED_CHARACTERS}*]*", arn_1), arn_2 + ) + is not None + ) + arn_2_matches_arn_1 = ( + re.fullmatch( + re.sub(r"\*", f"[{schema.ALLOWED_CHARACTERS}*]*", arn_2), arn_1 + ) + is not None + ) + return (arn_1_matches_arn_2 and arn_2_matches_arn_1) or arn_2_matches_arn_1 + + def get_entity_bindings(self, entity_bindings, authenticated: bool = False): if authenticated: return { **self.authenticated_role_bindings, @@ -133,7 +167,9 @@ def convert_roles_to_permissions(self, roles): permissions = permissions | self.role_mappings[role] return permissions - def get_entity_binding_permissions(self, entity_bindings, authenticated=False): + def get_entity_binding_permissions( + self, entity_bindings, authenticated: bool = False + ): entity_bindings = self.get_entity_bindings( entity_bindings=entity_bindings, authenticated=authenticated ) @@ -142,7 +178,17 @@ def get_entity_binding_permissions(self, entity_bindings, authenticated=False): for entity_arn, entity_roles in entity_bindings.items() } - def get_entity_permissions(self, entity_bindings, arn, authenticated=False): + def get_entity_permissions( + self, entity_bindings, arn: str, authenticated: bool = False + ): + """Get set of permissions for given ARN given AUTHENTICATION + state and entity_bindings + + ARN is a specific "/" + AUTHENTICATION is either True/False + ENTITY_BINDINGS is a mapping of ARN with regex support to ROLES + ROLES is a set of roles defined in `RBACAuthorizationBackend.role_mappings` + """ entity_binding_permissions = self.get_entity_binding_permissions( entity_bindings=entity_bindings, authenticated=authenticated ) @@ -152,6 +198,34 @@ def get_entity_permissions(self, entity_bindings, arn, authenticated=False): permissions = permissions | set(entity_permissions) return permissions + def is_subset_entity_permissions( + self, entity_bindings, new_entity_bindings, authenticated=False + ): + """Determine if new_entity_bindings is a strict subset of entity_bindings + + This feature is required to allow authenticated entitys to + create new permissions that are a strict subset of its + permissions. + """ + entity_binding_permissions = self.get_entity_binding_permissions( + entity_bindings=entity_bindings, authenticated=authenticated + ) + new_entity_binding_permissions = self.get_entity_binding_permissions( + entity_bindings=new_entity_bindings, authenticated=authenticated + ) + for ( + new_entity_binding, + new_permissions, + ) in new_entity_binding_permissions.items(): + _permissions = set() + for entity_binding, permissions in entity_binding_permissions.items(): + if self.is_arn_subset(new_entity_binding, entity_binding): + _permissions = _permissions | permissions + + if not new_permissions <= _permissions: + return False + return True + def authorize( self, entity_bindings, arn, required_permissions, authenticated=False ): @@ -225,7 +299,7 @@ def router(self): }); if (response.ok) { - window.location = "{{ url_for('ui_get_user') }}"; + window.location = "{{ url_for('ui_list_environments') }}"; } else { let data = await response.json(); bannerMessage(`
${data.message}
`); @@ -288,7 +362,7 @@ async def post_login_method( next: Optional[str] = None, templates=Depends(dependencies.get_templates), ): - redirect_url = next or request.url_for("ui_get_user") + redirect_url = next or request.url_for("ui_list_environments") response = RedirectResponse(redirect_url, status_code=303) authentication_token = await self.authenticate(request) if authentication_token is None: @@ -319,10 +393,16 @@ def authenticate_request(self, request: Request, require=False): # cookie based authentication token = request.cookies.get(self.cookie_name) request.state.entity = self.authentication.authenticate(token) - elif request.headers.get("Authorization"): - # auth bearer based authentication - token = request.headers.get("Authorization").split(" ")[1] - request.state.entity = self.authentication.authenticate(token) + elif "Authorization" in request.headers: + parts = request.headers["Authorization"].split(" ", 1) + if parts[0] == "Basic": + try: + username, token = base64.b64decode(parts[1]).decode().split(":", 1) + request.state.entity = self.authentication.authenticate(token) + except Exception: + pass + elif parts[0] == "Bearer": + request.state.entity = self.authentication.authenticate(parts[1]) else: request.state.entity = None @@ -494,10 +574,7 @@ def _oauth_callback_url(request: Request): return _oauth_callback_url def get_oauth_callback_url(self, request: Request): - if callable(self.oauth_callback_url): - return self.oauth_callback_url(request) - else: - return self.oauth_callback_url + return utils.callable_or_value(self.oauth_callback_url, request) login_html = Unicode( """ @@ -524,12 +601,17 @@ def get_login_html(self, request: Request, templates): @staticmethod def oauth_route(auth_url, client_id, redirect_uri, scope=None, state=None): - r = f"{auth_url}?client_id={client_id}&redirect_uri={redirect_uri}&response_type=code" + url = yarl.URL(auth_url) % { + "client_id": client_id, + "redirect_uri": redirect_uri, + "response_type": "code", + } + if scope is not None: - r += f"&scope={scope}" + url = url % {"scope": scope} if state is not None: - r += f"&state={state}" - return r + url = url % {"state": state} + return str(url) @property def routes(self): diff --git a/conda-store-server/conda_store_server/server/templates/build.html b/conda-store-server/conda_store_server/server/templates/build.html index 58c74348f..01ed0bd02 100644 --- a/conda-store-server/conda_store_server/server/templates/build.html +++ b/conda-store-server/conda_store_server/server/templates/build.html @@ -36,6 +36,7 @@

Build {{ build.id }}

Conda Packages {{ build.packages | length }}

+

The following build has these packages

@@ -64,19 +65,21 @@

Conda Packages

Conda Environment Artifacts

+

The following build has several methods of running the given environment

    - {% if build.has_yaml %} -
  • YAML: environment.yaml
  • +
  • $ conda-store run {{ build.environment.namespace.name }}/{{ build.environment.name }}:{{ build.id }} -- python
  • + {% if build.has_yaml %} +
  • Conda pinned environment.yaml
  • {% endif %} {% if build.has_lockfile %} -
  • Lockfile: conda-{{ platform }}.lock
  • +
  • Conda-Lock lockfile: conda-{{ platform }}.lock
  • {% endif %} {% if build.has_conda_pack %} -
  • Archive: environment.tar.gz
  • +
  • Conda-Pack archive: environment.tar.gz
  • {% endif %} {% if build.has_docker_manifest %} -
  • Docker: {{ registry_external_url }}/{{ build.environment.namespace.name }}/{{ build.environment.name }}:{{ build.build_key }}
  • +
  • Docker image registry url: {{ registry_external_url }}/{{ build.environment.namespace.name }}/{{ build.environment.name }}:{{ build.build_key }}
  • {% endif %}
diff --git a/conda-store-server/conda_store_server/server/templates/home.html b/conda-store-server/conda_store_server/server/templates/home.html index bba6d1bb5..ae4c66447 100644 --- a/conda-store-server/conda_store_server/server/templates/home.html +++ b/conda-store-server/conda_store_server/server/templates/home.html @@ -3,12 +3,30 @@ {% block title %}Home{% endblock %} {% block content %} -

Environments

+ +{{ banner | default("") | safe }} + + +
+ +
+ +
+
+ + +

+ + + + Environments +

+ {% for environment in environments %}
- {{ environment.namespace.name }}/{{ environment.name }} + {{ environment.namespace.name }} / {{ environment.name }} {{ (environment.current_build.size or 0) | filesizeformat(true) }}
{% if environment.current_build.has_yaml %} @@ -37,5 +55,13 @@
document.execCommand("copy"); document.body.removeChild(tempInput); } + + function searchHandler(event) { + event.preventDefault(); + let search = document.querySelector("#search-text"); + window.location.href = `?search=${search.value}`; + } + let form = document.querySelector("#search-form"); + form.addEventListener("submit", searchHandler); {% endblock %} diff --git a/conda-store-server/conda_store_server/server/templates/login.html b/conda-store-server/conda_store_server/server/templates/login.html index ee9a15eac..614d604fe 100644 --- a/conda-store-server/conda_store_server/server/templates/login.html +++ b/conda-store-server/conda_store_server/server/templates/login.html @@ -4,6 +4,8 @@ {% block content %} +{{ banner | default("") | safe }} + {% block login %} {{ login_html | safe }} {% endblock %} diff --git a/conda-store-server/conda_store_server/server/templates/navigation.html b/conda-store-server/conda_store_server/server/templates/navigation.html index a1068db2e..ade87dc9d 100644 --- a/conda-store-server/conda_store_server/server/templates/navigation.html +++ b/conda-store-server/conda_store_server/server/templates/navigation.html @@ -1,38 +1,45 @@ diff --git a/conda-store-server/conda_store_server/server/templates/user.html b/conda-store-server/conda_store_server/server/templates/user.html index 378f80608..5871997a9 100644 --- a/conda-store-server/conda_store_server/server/templates/user.html +++ b/conda-store-server/conda_store_server/server/templates/user.html @@ -14,6 +14,15 @@
User
+
+
+
+ + +
+
+
+
Namespace
@@ -27,7 +36,7 @@
Namespace
Permissions
-

Below are your current permissions within the Conda-Store application. The "*" will match any string of characters e.g. "na*ce" will match "namespace". This allows Conda-Store to have flexible and powerful authorization. For further information on authorization see the docs

+

Below are your current permissions within the conda-store application. The "*" will match any string of characters e.g. "na*ce" will match "namespace". This allows conda-store to have flexible and powerful authorization. For further information on authorization see the docs

@@ -52,6 +61,36 @@
Permissions
+ + {% endblock %} {% endblock %} diff --git a/conda-store-server/conda_store_server/server/views/api.py b/conda-store-server/conda_store_server/server/views/api.py index 4bb70b5d2..2bc62fbcb 100644 --- a/conda-store-server/conda_store_server/server/views/api.py +++ b/conda-store-server/conda_store_server/server/views/api.py @@ -1,9 +1,10 @@ from typing import List, Dict, Optional +import datetime import pydantic import yaml from fastapi import APIRouter, Request, Depends, HTTPException, Query, Body -from fastapi.responses import RedirectResponse +from fastapi.responses import RedirectResponse, PlainTextResponse from conda_store_server import api, orm, schema, utils, __version__ from conda_store_server.server import dependencies @@ -131,6 +132,10 @@ def api_get_permissions( entity=Depends(dependencies.get_entity), ): authenticated = entity is not None + entity_binding_roles = auth.authorization.get_entity_bindings( + entity.role_bindings if authenticated else {}, authenticated=authenticated + ) + entity_binding_permissions = auth.authorization.get_entity_binding_permissions( entity.role_bindings if authenticated else {}, authenticated=authenticated ) @@ -146,14 +151,74 @@ def api_get_permissions( "status": "ok", "data": { "authenticated": authenticated, - "entity_permissions": entity_binding_permissions, "primary_namespace": entity.primary_namespace if authenticated else conda_store.default_namespace, + "entity_permissions": entity_binding_permissions, + "entity_roles": entity_binding_roles, + "expiration": entity.exp if authenticated else None, }, } +@router_api.post( + "/token/", + response_model=schema.APIPostToken, +) +def api_post_token( + request: Request, + primary_namespace: Optional[str] = Body(None), + expiration: Optional[datetime.datetime] = Body(None), + role_bindings: Optional[Dict[str, List[str]]] = Body(None), + conda_store=Depends(dependencies.get_conda_store), + auth=Depends(dependencies.get_auth), + entity=Depends(dependencies.get_entity), +): + authenticated = entity is not None + current_role_bindings = auth.authorization.get_entity_bindings( + entity.role_bindings if authenticated else {}, authenticated=authenticated + ) + current_namespace = ( + entity.primary_namespace if authenticated else conda_store.default_namespace + ) + current_expiration = ( + entity.exp + if authenticated + else ( + datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=1) + ) + ) + + new_namespace = primary_namespace or current_namespace + new_role_bindings = role_bindings or current_role_bindings + new_expiration = expiration or current_expiration + + if not auth.authorization.is_subset_entity_permissions( + current_role_bindings, new_role_bindings, authenticated + ): + raise HTTPException( + status_code=400, + detail="Requested role_bindings are not a subset of current permissions", + ) + + if new_expiration > current_expiration: + raise HTTPException( + status_code=400, + detail="Requested expiration of token is greater than current permissions", + ) + + token = schema.AuthenticationToken( + primary_namespace=new_namespace, + role_bindings=new_role_bindings, + exp=new_expiration, + ) + + return { + "status": "ok", + "data": {"token": auth.authentication.encrypt_token(token)}, + } + + @router_api.get( "/namespace/", response_model=schema.APIListNamespace, @@ -253,6 +318,11 @@ def api_delete_namespace( ) def api_list_environments( search: Optional[str] = None, + namespace: Optional[str] = None, + name: Optional[str] = None, + status: Optional[schema.BuildStatus] = None, + packages: Optional[List[str]] = Query([]), + artifact: Optional[schema.BuildArtifactType] = None, conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), @@ -260,7 +330,16 @@ def api_list_environments( ): orm_environments = auth.filter_environments( entity, - api.list_environments(conda_store.db, search=search, show_soft_deleted=False), + api.list_environments( + conda_store.db, + search=search, + namespace=namespace, + name=name, + status=status, + packages=packages, + artifact=artifact, + show_soft_deleted=False, + ), ) return paginated_api_response( orm_environments, @@ -317,14 +396,20 @@ def api_update_environment_build( request: Request, conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), - build_id: int = Body(..., embed=True), + build_id: int = Body(None, embed=True), + description: str = Body(None, embed=True), ): auth.authorize_request( request, f"{namespace}/{name}", {Permissions.ENVIRONMENT_UPDATE}, require=True ) try: - conda_store.update_environment_build(namespace, name, build_id) + if build_id is not None: + conda_store.update_environment_build(namespace, name, build_id) + + if description is not None: + conda_store.update_environment_description(namespace, name, description) + except utils.CondaStoreError as e: raise HTTPException(status_code=400, detail=e.message) @@ -435,21 +520,40 @@ def api_post_specification( @router_api.get("/build/", response_model=schema.APIListBuild) def api_list_builds( + status: Optional[schema.BuildStatus] = None, + packages: Optional[List[str]] = Query([]), + artifact: Optional[schema.BuildArtifactType] = None, + environment_id: Optional[int] = None, + name: Optional[str] = None, + namespace: Optional[str] = None, conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), entity=Depends(dependencies.get_entity), paginated_args=Depends(get_paginated_args), ): orm_builds = auth.filter_builds( - entity, api.list_builds(conda_store.db, show_soft_deleted=True) + entity, + api.list_builds( + conda_store.db, + status=status, + packages=packages, + artifact=artifact, + environment_id=environment_id, + name=name, + namespace=namespace, + show_soft_deleted=True, + ), ) return paginated_api_response( orm_builds, paginated_args, schema.Build, - exclude={"specification", "packages"}, + exclude={"specification", "packages", "build_artifacts"}, allowed_sort_bys={ "id": orm.Build.id, + "started_on": orm.Build.started_on, + "scheduled_on": orm.Build.scheduled_on, + "ended_on": orm.Build.ended_on, }, default_sort_by=["id"], ) @@ -676,3 +780,40 @@ def api_get_build_yaml( require=True, ) return RedirectResponse(conda_store.storage.get_url(build.conda_env_export_key)) + + +@router_api.get("/build/{build_id}/lockfile/", response_class=PlainTextResponse) +def api_get_build_lockfile( + build_id: int, + request: Request, + conda_store=Depends(dependencies.get_conda_store), + auth=Depends(dependencies.get_auth), +): + build = api.get_build(conda_store.db, build_id) + auth.authorize_request( + request, + f"{build.environment.namespace.name}/{build.environment.name}", + {Permissions.ENVIRONMENT_READ}, + require=True, + ) + + lockfile = api.get_build_lockfile(conda_store.db, build_id) + return lockfile + + +@router_api.get("/build/{build_id}/archive/") +def api_get_build_archive( + build_id: int, + request: Request, + conda_store=Depends(dependencies.get_conda_store), + auth=Depends(dependencies.get_auth), +): + build = api.get_build(conda_store.db, build_id) + auth.authorize_request( + request, + f"{build.environment.namespace.name}/{build.environment.name}", + {Permissions.ENVIRONMENT_READ}, + require=True, + ) + + return RedirectResponse(conda_store.storage.get_url(build.conda_pack_key)) diff --git a/conda-store-server/conda_store_server/server/views/registry.py b/conda-store-server/conda_store_server/server/views/registry.py index 78feb5dff..4bb476547 100644 --- a/conda-store-server/conda_store_server/server/views/registry.py +++ b/conda-store-server/conda_store_server/server/views/registry.py @@ -1,11 +1,12 @@ import json import time -from fastapi import APIRouter, Depends +from fastapi import APIRouter, Depends, Request, HTTPException from fastapi.responses import RedirectResponse, Response from conda_store_server.server import dependencies from conda_store_server import schema, api, orm +from conda_store_server.schema import Permissions router_registry = APIRouter(tags=["registry"]) @@ -20,7 +21,7 @@ def _json_response(data, status=200, mimetype="application/json"): def docker_error_message(docker_registry_error: schema.DockerRegistryError): - return _json_response( + response = _json_response( { "errors": [ { @@ -33,6 +34,10 @@ def docker_error_message(docker_registry_error: schema.DockerRegistryError): status=docker_registry_error.value["status"], ) + if docker_registry_error == schema.DockerRegistryError.UNAUTHORIZED: + response.headers["Www-Authenticate"] = 'Basic realm="Registry Realm"' + return response + def dynamic_conda_store_environment(conda_store, packages): def replace_words(s, words): @@ -50,8 +55,8 @@ def replace_words(s, words): # TODO: should really be doing checking on package names to # validate user input - packages = [replace_words(_, constraint_mapper) for _ in sorted(packages)] - environment_name = "|".join(packages) + package_specs = [replace_words(_, constraint_mapper) for _ in sorted(packages)] + environment_name = "-".join(packages) environment = api.get_environment( conda_store.db, environment_name, namespace="conda-store-dynamic" ) @@ -60,7 +65,7 @@ def replace_words(s, words): environment_specification = { "name": environment_name, "channels": ["conda-forge"], - "dependencies": packages, + "dependencies": package_specs, } conda_store.register_environment( environment_specification, namespace="conda-store-dynamic" @@ -124,30 +129,56 @@ def get_docker_image_blob(conda_store, image, blobsum): @router_registry.get("/v2/") -def v2(): +def v2( + request: Request, + entity=Depends(dependencies.get_entity), +): + if entity is None: + return docker_error_message(schema.DockerRegistryError.UNAUTHORIZED) + return _json_response({}) -@router_registry.get("/v2/") +@router_registry.get( + "/v2/{rest:path}", +) def list_tags( rest: str, + request: Request, conda_store=Depends(dependencies.get_conda_store), + auth=Depends(dependencies.get_auth), + entity=Depends(dependencies.get_entity), ): parts = rest.split("/") + if len(parts) <= 3: + return docker_error_message(schema.DockerRegistryError.UNSUPPORTED) + + if entity is None: + return docker_error_message(schema.DockerRegistryError.UNAUTHORIZED) + + image = "/".join(parts[:-2]) + + try: + auth.authorize_request( + request, + image + if parts[0] != "conda-store-dynamic" + else "conda-store-dynamic/python", + {Permissions.ENVIRONMENT_READ}, + require=True, + ) + except HTTPException as e: + if e.status_code == 403: + return docker_error_message(schema.DockerRegistryError.DENIED) # /v2//tags/list - if len(parts) > 2 and parts[-2:] == ["tags", "list"]: - image = "/".join(parts[:-2]) + if parts[-2:] == ["tags", "list"]: raise NotImplementedError() # /v2//manifests/ - elif len(parts) > 2 and parts[-2] == "manifests": - image = "/".join(parts[:-2]) + elif parts[-2] == "manifests": tag = parts[-1] return get_docker_image_manifest(conda_store, image, tag) # /v2//blobs/ - elif len(parts) > 2 and parts[-2] == "blobs": - image = "/".join(parts[:-2]) + elif parts[-2] == "blobs": blobsum = parts[-1].split(":")[1] return get_docker_image_blob(conda_store, image, blobsum) - else: - return docker_error_message(schema.DockerRegistryError.UNSUPPORTED) diff --git a/conda-store-server/conda_store_server/server/views/ui.py b/conda-store-server/conda_store_server/server/views/ui.py index 3101d6093..401087bfc 100644 --- a/conda-store-server/conda_store_server/server/views/ui.py +++ b/conda-store-server/conda_store_server/server/views/ui.py @@ -1,5 +1,7 @@ +from typing import Optional + from fastapi import APIRouter, Request, Depends -from fastapi.responses import RedirectResponse, PlainTextResponse +from fastapi.responses import RedirectResponse import yaml from conda_store_server import api @@ -44,6 +46,7 @@ def sort_namespace(n): @router_ui.get("/") def ui_list_environments( request: Request, + search: Optional[str] = None, templates=Depends(dependencies.get_templates), conda_store=Depends(dependencies.get_conda_store), auth=Depends(dependencies.get_auth), @@ -51,7 +54,8 @@ def ui_list_environments( entity=Depends(dependencies.get_entity), ): orm_environments = auth.filter_environments( - entity, api.list_environments(conda_store.db, show_soft_deleted=False) + entity, + api.list_environments(conda_store.db, search=search, show_soft_deleted=False), ) context = { @@ -227,58 +231,3 @@ def ui_get_user( "entity_binding_permissions": entity_binding_permissions, } return templates.TemplateResponse("user.html", context) - - -@router_ui.get("/build/{build_id}/logs/") -def api_get_build_logs( - build_id: int, - request: Request, - conda_store=Depends(dependencies.get_conda_store), - auth=Depends(dependencies.get_auth), -): - build = api.get_build(conda_store.db, build_id) - auth.authorize_request( - request, - f"{build.environment.namespace.name}/{build.environment.name}", - {Permissions.ENVIRONMENT_READ}, - require=True, - ) - - return RedirectResponse(conda_store.storage.get_url(build.log_key)) - - -@router_ui.get("/build/{build_id}/lockfile/", response_class=PlainTextResponse) -def api_get_build_lockfile( - build_id: int, - request: Request, - conda_store=Depends(dependencies.get_conda_store), - auth=Depends(dependencies.get_auth), -): - build = api.get_build(conda_store.db, build_id) - auth.authorize_request( - request, - f"{build.environment.namespace.name}/{build.environment.name}", - {Permissions.ENVIRONMENT_READ}, - require=True, - ) - - lockfile = api.get_build_lockfile(conda_store.db, build_id) - return lockfile - - -@router_ui.get("/build/{build_id}/archive/") -def api_get_build_archive( - build_id: int, - request: Request, - conda_store=Depends(dependencies.get_conda_store), - auth=Depends(dependencies.get_auth), -): - build = api.get_build(conda_store.db, build_id) - auth.authorize_request( - request, - f"{build.environment.namespace.name}/{build.environment.name}", - {Permissions.ENVIRONMENT_READ}, - require=True, - ) - - return RedirectResponse(conda_store.storage.get_url(build.conda_pack_key)) diff --git a/conda-store-server/conda_store_server/storage.py b/conda-store-server/conda_store_server/storage.py index c81a414e0..dfb2a7d5c 100644 --- a/conda-store-server/conda_store_server/storage.py +++ b/conda-store-server/conda_store_server/storage.py @@ -199,17 +199,17 @@ class LocalStorage(Storage): ) def fset(self, db, build_id, key, filename, content_type=None, artifact_type=None): - filename = os.path.join(self.storage_path, key) - os.makedirs(os.path.dirname(filename), exist_ok=True) + destination_filename = os.path.abspath(os.path.join(self.storage_path, key)) + os.makedirs(os.path.dirname(destination_filename), exist_ok=True) - shutil.copyfile(filename, os.path.join(self.storage_path, key)) + shutil.copyfile(filename, destination_filename) super().fset(db, build_id, key, filename, artifact_type) def set(self, db, build_id, key, value, content_type=None, artifact_type=None): - filename = os.path.join(self.storage_path, key) - os.makedirs(os.path.dirname(filename), exist_ok=True) + destination_filename = os.path.join(self.storage_path, key) + os.makedirs(os.path.dirname(destination_filename), exist_ok=True) - with open(filename, "wb") as f: + with open(destination_filename, "wb") as f: f.write(value) super().set(db, build_id, key, value, artifact_type) diff --git a/conda-store-server/conda_store_server/utils.py b/conda-store-server/conda_store_server/utils.py index 19ad41631..06c059370 100644 --- a/conda-store-server/conda_store_server/utils.py +++ b/conda-store-server/conda_store_server/utils.py @@ -72,3 +72,9 @@ def sort_key(v): def datastructure_hash(v): json_blob = json.dumps(recursive_sort(v)) return hashlib.sha256(json_blob.encode("utf-8")).hexdigest() + + +def callable_or_value(v, *args, **kwargs): + if callable(v): + return v(*args, **kwargs) + return v diff --git a/conda-store-server/conda_store_server/worker/app.py b/conda-store-server/conda_store_server/worker/app.py index f49d12b4c..57b36600e 100644 --- a/conda-store-server/conda_store_server/worker/app.py +++ b/conda-store-server/conda_store_server/worker/app.py @@ -13,6 +13,17 @@ class CondaStoreWorker(Application): "config": "CondaStoreWorker.config_file", } + flags = { + "standalone": ( + { + "CondaStoreServer": { + "standalone": True, + } + }, + "Run conda-store-server in standalone mode with celery worker as a subprocess of webserver", + ), + } + log_level = Integer( logging.INFO, help="log level to use", @@ -65,6 +76,7 @@ def initialize(self, *args, **kwargs): self.load_config_file(self.config_file) self.conda_store = CondaStore(parent=self, log=self.log) + # ensure checks on redis_url self.conda_store.redis_url diff --git a/conda-store-server/conda_store_server/worker/tasks.py b/conda-store-server/conda_store_server/worker/tasks.py index 097093b4b..9597f6e5d 100644 --- a/conda-store-server/conda_store_server/worker/tasks.py +++ b/conda-store-server/conda_store_server/worker/tasks.py @@ -199,6 +199,11 @@ def delete_build_artifact(conda_store, build_artifact): ): shutil.rmtree(conda_prefix) conda_store.db.delete(build_artifact) + elif build_artifact.artifact_type == schema.BuildArtifactType.CONTAINER_REGISTRY: + pass + # # container registry tag deletion is not generally implemented + # # the underlying library `python_docker` is already capable + # conda_store.container_registry.delete_image(build_artifact.key) elif build_artifact.artifact_type == schema.BuildArtifactType.LOCKFILE: pass else: diff --git a/conda-store-server/environment-dev.yaml b/conda-store-server/environment-dev.yaml index b63410ea9..8316a27dc 100644 --- a/conda-store-server/environment-dev.yaml +++ b/conda-store-server/environment-dev.yaml @@ -4,7 +4,7 @@ channels: dependencies: # conda builds - conda - - conda-docker + - conda-docker >= 0.1.2 - conda-pack - conda-lock >=1.0.5 # web server @@ -26,6 +26,7 @@ dependencies: - itsdangerous - jinja2 - python-multipart + - alembic # artifact storage - minio # CLI diff --git a/conda-store-server/environment.yaml b/conda-store-server/environment.yaml index 403fd0454..256521c8b 100644 --- a/conda-store-server/environment.yaml +++ b/conda-store-server/environment.yaml @@ -4,7 +4,7 @@ channels: dependencies: # conda environment builds - conda - - conda-docker + - conda-docker >= 0.1.2 - conda-pack - conda-lock >=1.0.5 # web server @@ -12,6 +12,7 @@ dependencies: - flower - redis-py - sqlalchemy + - alembic - psycopg2 - pymysql - requests diff --git a/conda-store-server/pyproject.toml b/conda-store-server/pyproject.toml index b49fdd0ec..6cdbd6b21 100644 --- a/conda-store-server/pyproject.toml +++ b/conda-store-server/pyproject.toml @@ -1,3 +1,7 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + [tool.black] line-length = 88 target-version = ['py36', 'py37', 'py38'] diff --git a/conda-store-server/setup.cfg b/conda-store-server/setup.cfg index 15e4a0d0c..52a4fa8b2 100644 --- a/conda-store-server/setup.cfg +++ b/conda-store-server/setup.cfg @@ -3,6 +3,7 @@ name = conda-store-server version = attr: conda_store_server.__version__ description = Conda Environment Management, Builds, and Serve long_description = file: README.md, LICENSE +long_description_content_type = text/markdown author = Christopher Ostrouchov author_email = chris.ostrouchov@gmail.com url = https://github.com/Quansight/conda-store @@ -17,7 +18,11 @@ classifiers = Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3 :: Only", + Programming Language :: Python :: 3 :: Only +project_urls = + Bug Reports = https://github.com/quansight/conda-store + Documentation = https://conda-store.readthedocs.io/ + Source = https://github.com/quansight/conda-store [options] zip_safe = False @@ -30,6 +35,7 @@ install_requires = requests fastapi pyyaml + redis pydantic minio traitlets @@ -41,13 +47,16 @@ install_requires = python-multipart # conda (which should not be included here) +[options.package_data] +conda_store_server = alembic.ini, alembic/*, alembic/versions/*, server/templates/* + [options.entry_points] console_scripts = conda-store-server=conda_store_server.server.__main__:main conda-store-worker=conda_store_server.worker.__main__:main [options.extras_require] -dev = pytest; pytest-mock; black==22.3.0; flake8; sphinx; recommonmark; pydata-sphinx-theme +dev = pytest; pytest-mock; black==22.3.0; flake8; sphinx; recommonmark; pydata-sphinx-theme; build; twine [options.packages.find] exclude = diff --git a/conda-store-server/tests/test_auth.py b/conda-store-server/tests/test_auth.py index 6d7f01d0a..d6ba51199 100644 --- a/conda-store-server/tests/test_auth.py +++ b/conda-store-server/tests/test_auth.py @@ -4,9 +4,8 @@ from conda_store_server.server.auth import ( AuthenticationBackend, RBACAuthorizationBackend, - Permissions, ) -from conda_store_server.schema import AuthenticationToken +from conda_store_server.schema import AuthenticationToken, Permissions @pytest.mark.parametrize( @@ -100,6 +99,14 @@ def test_expired_token(): }, "example-namespace/example-name", {Permissions.ENVIRONMENT_DELETE}, + False, + ), + ( + { + "example-namespace/example-name": {"developer", "admin"}, + }, + "example-namespace/example-name", + {Permissions.ENVIRONMENT_DELETE}, True, ), ( @@ -151,3 +158,98 @@ def test_end_to_end_auth_flow(): }, authenticated=True, ) + + +@pytest.mark.parametrize( + "arn_1,arn_2,value", + [ + ("ab/cd", "a*b/c*d", True), + ("a1111b/c22222d", "a*b/c*d", True), + ("a1/cd", "a*b/c*d", False), + ("abc/ed", "a*b*c/e*d", True), + ("a111bc/ed", "a*b*c/e*d", True), + ("a111b2222c/e3333d", "a*b*c/e*d", True), + ("aaabbbcccc/eeddd", "a*b*c/e*d", True), + ("aaabbbcccc1/eeddd", "a*b*c/e*d", False), + ("aaabbbcccc1c/eeddd", "a*b*c/e*d", True), + ], +) +def test_is_arn_subset(arn_1, arn_2, value): + assert RBACAuthorizationBackend.is_arn_subset(arn_1, arn_2) == value + + +@pytest.mark.parametrize( + "entity_bindings, new_entity_bindings, authenticated, value", + [ + # */* viewer is a subset of admin + ({"*/*": ["admin"]}, {"*/*": ["viewer"]}, False, True), + ({"*/*": ["admin"]}, {"*/*": ["viewer"]}, True, True), + # */* admin is not a subset of viewer + ({"*/*": ["viewer"]}, {"*/*": ["admin"]}, False, False), + ({"*/*": ["viewer"]}, {"*/*": ["admin"]}, True, False), + # a/b viewer is a subset of admin + ({"a/b": ["admin"]}, {"a/b": ["viewer"]}, False, True), + ({"a/b": ["admin"]}, {"a/b": ["viewer"]}, True, True), + # a/b admin is not a subset of viewer + ({"a/b": ["viewer"]}, {"a/b": ["admin"]}, False, False), + ({"a/b": ["viewer"]}, {"a/b": ["admin"]}, True, False), + # default/* vs. */* + ({"*/*": ["viewer"]}, {"default/*": ["viewer"]}, False, True), + ({"*/*": ["viewer"]}, {"default/*": ["viewer"]}, True, True), + # efault/* vs. d*/* + ({"d*/*": ["viewer"]}, {"efault/*": ["viewer"]}, False, False), + ({"d*/*": ["viewer"]}, {"efault/*": ["viewer"]}, True, False), + # multiple entities keys + ( + {"d*/*": ["viewer"], "de*/*": ["admin"]}, + {"default/*": ["developer"]}, + False, + True, + ), + ( + {"d*/*": ["viewer"], "de*/*": ["admin"]}, + {"default/*": ["developer"]}, + True, + True, + ), + # multiple entities keys + ( + {"d*/*": ["viewer"], "de*/*": ["admin"]}, + {"dcefault/*": ["developer"]}, + False, + False, + ), + ( + {"d*/*": ["viewer"], "de*/*": ["admin"]}, + {"dcefault/*": ["developer"]}, + True, + False, + ), + # multiple entities keys + ({"d*/*": ["viewer"]}, {"d*/*": ["viewer"], "dc*/*": ["viewer"]}, False, True), + ({"d*/*": ["viewer"]}, {"d*/*": ["viewer"], "dc*/*": ["viewer"]}, True, True), + # multiple entities keys + ( + {"d*/*": ["viewer"]}, + {"d*/*": ["viewer"], "dc*/*": ["developer"]}, + False, + False, + ), + ( + {"d*/*": ["viewer"]}, + {"d*/*": ["viewer"], "dc*/*": ["developer"]}, + True, + False, + ), + ], +) +def test_is_subset_entity_permissions( + entity_bindings, new_entity_bindings, authenticated, value +): + authorization = RBACAuthorizationBackend() + assert ( + authorization.is_subset_entity_permissions( + entity_bindings, new_entity_bindings, authenticated + ) + == value + ) diff --git a/conda-store/Dockerfile b/conda-store/Dockerfile index 128008424..55df08fca 100644 --- a/conda-store/Dockerfile +++ b/conda-store/Dockerfile @@ -2,9 +2,6 @@ FROM --platform=linux/amd64 condaforge/mambaforge USER root -ARG GATOR_GIT_URL=https://github.com/Quansight/gator.git -ARG GATOR_GIT_BRANCH=change-default-view-for-env - RUN apt-get update \ && apt-get install -y curl \ && rm -rf /var/lib/apt/lists/* @@ -13,21 +10,16 @@ COPY environment.yaml /opt/conda-store/environment.yaml RUN mamba env create -f /opt/conda-store/environment.yaml -COPY ./ /opt/conda-store/ - ENV PATH=/opt/conda/condabin:/opt/conda/envs/conda-store/bin:/opt/conda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:${PATH} -RUN cd /opt/conda-store && \ - pip install . && \ - jupyter labextension install jupyterlab-launcher-shortcuts +COPY ./ /opt/conda-store/ -RUN git clone ${GATOR_GIT_URL} && \ - cd gator && \ - git checkout origin/${GATOR_GIT_BRANCH} && \ - pip install -e . && \ - jupyter server extension enable mamba_gator --sys-prefix +RUN cd /opt/conda-store && \ + pip install -e . RUN mkdir -p /opt/jupyterhub && \ chown -R 1000:1000 /opt/jupyterhub +USER 1000:1000 + WORKDIR /opt/jupyterhub diff --git a/conda-store/README.md b/conda-store/README.md index 554785a9f..8112cd78a 100644 --- a/conda-store/README.md +++ b/conda-store/README.md @@ -1,10 +1,27 @@ -# Conda Store +# conda-store [![Documentation Status](https://readthedocs.org/projects/conda-store/badge/?version=latest)](https://conda-store.readthedocs.io/en/latest/?badge=latest) -A client library for interacting with a Conda-Store server. See the +A client library for interacting with a conda-store server. See the [documentation](https://conda-store.readthedocs.io/en/latest/) for more information. The client library provides a CLI for interacting with conda-store. -Currently this part of conda-store is not well developed. +```shell +$ conda-store --help +Usage: conda-store [OPTIONS] COMMAND [ARGS]... + +Options: + --conda-store-url TEXT conda-store base url including prefix + --auth [none|token|basic] conda-store authentication to use + --no-verify-ssl Disable tls verification on API requests + --help Show this message and exit. + +Commands: + download Download artifacts for given build + info Get current permissions and default namespace + list + run Execute given environment specified as a URI with COMMAND + solve Remotely solve given environment.yaml + wait Wait for given URI to complete or fail building +``` diff --git a/conda-store/conda_store/__init__.py b/conda-store/conda_store/__init__.py index e69de29bb..58ce5cd17 100644 --- a/conda-store/conda_store/__init__.py +++ b/conda-store/conda_store/__init__.py @@ -0,0 +1 @@ +__version__ = "0.4.11" diff --git a/conda-store/conda_store/__main__.py b/conda-store/conda_store/__main__.py new file mode 100644 index 000000000..d10f11410 --- /dev/null +++ b/conda-store/conda_store/__main__.py @@ -0,0 +1,43 @@ +from conda_store.cli import cli + + +def detect_shebang(): + """Enable conda-store run within shebangs + + Feature inspired by nix-shell shebangs see: + - usage :: https://nixos.wiki/wiki/Nix-shell_shebang + - implementation :: https://github.com/nixos/nix/blob/7a9ac91a43e1e05e9df9d1b9b4a2cf322d62bb1c/src/nix-build/nix-build.cc#L108-L130 + """ + import sys + import re + import pathlib + import shlex + + filename = pathlib.Path(sys.argv[1]).resolve() + args = ["conda-store", "run"] + + try: + with filename.open() as f: + line = f.readline() + # shebangs are common within entrypoints in python scripts + # so we must be strict and the specification for shebangs + # is quite limiting + if re.fullmatch(r"^#!.*conda-store\s*$", line.strip()): + for line in f: + match = re.fullmatch(r"^#!\s*conda-store run (.*)$", line.strip()) + if match: + for token in shlex.split(match.group(1)): + args.append(token) + args.append(str(filename)) + sys.argv = args + except Exception: + pass + + +def main(): + detect_shebang() + cli() + + +if __name__ == "__main__": + main() diff --git a/conda-store/conda_store/api.py b/conda-store/conda_store/api.py new file mode 100644 index 000000000..5ffc52d78 --- /dev/null +++ b/conda-store/conda_store/api.py @@ -0,0 +1,192 @@ +import os +import math +import datetime +from typing import List, Dict + +import yarl + +from conda_store import auth, exception + + +class CondaStoreAPIError(exception.CondaStoreError): + pass + + +class CondaStoreAPI: + def __init__( + self, conda_store_url: str, auth_type: str = "none", verify_ssl=True, **kwargs + ): + self.conda_store_url = yarl.URL(conda_store_url) + self.api_url = self.conda_store_url / "api/v1" + self.auth_type = auth_type + self.verify_ssl = verify_ssl + + if auth_type == "token": + self.api_token = kwargs.get("api_token", os.environ["CONDA_STORE_TOKEN"]) + elif auth_type == "basic": + self.username = kwargs.get("username", os.environ["CONDA_STORE_USERNAME"]) + self.password = kwargs.get("password", os.environ["CONDA_STORE_PASSWORD"]) + + async def __aenter__(self): + if self.auth_type == "none": + self.session = await auth.none_authentication(verify_ssl=self.verify_ssl) + elif self.auth_type == "token": + self.session = await auth.token_authentication( + self.api_token, verify_ssl=self.verify_ssl + ) + elif self.auth_type == "basic": + self.session = await auth.basic_authentication( + self.conda_store_url, + self.username, + self.password, + verify_ssl=self.verify_ssl, + ) + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.session.close() + + async def get_paginated_request(self, url: yarl.URL, max_pages=None, **kwargs): + data = [] + + async with self.session.get(url) as response: + response_data = await response.json() + num_pages = math.ceil(response_data["count"] / response_data["size"]) + data.extend(response_data["data"]) + + if max_pages is not None: + num_pages = min(max_pages, num_pages) + + for page in range(2, num_pages + 1): + async with self.session.get(url % {"page": page}) as response: + data.extend((await response.json())["data"]) + + return data + + async def get_permissions(self): + async with self.session.get(self.api_url / "permission") as response: + return (await response.json())["data"] + + async def create_token( + self, + primary_namespace: str = None, + role_bindings: Dict[str, List[str]] = None, + expiration: datetime.datetime = None, + ): + current_permissions = await self.get_permissions() + requested_permissions = { + "primary_namespace": primary_namespace + or current_permissions["primary_namespace"], + "role_bindings": role_bindings or current_permissions["entity_roles"], + "exp": expiration or current_permissions["expiration"], + } + async with self.session.post( + self.api_url / "token", json=requested_permissions + ) as response: + if response.status == 400: + raise CondaStoreAPIError((await response.json())["message"]) + + return (await response.json())["data"]["token"] + + async def list_namespaces(self): + return await self.get_paginated_request(self.api_url / "namespace") + + async def create_namespace(self, namespace: str): + async with self.session.post( + self.api_url / "namespace" / namespace + ) as response: + if response.status != 200: + raise CondaStoreAPIError(f"Error creating namespace {namespace}") + + async def delete_namespace(self, namespace: str): + async with self.session.delete( + self.api_url / "namespace" / namespace + ) as response: + if response.status != 200: + raise CondaStoreAPIError(f"Error deleting namespace {namespace}") + + async def list_environments(self, status: str, artifact: str, packages: List[str]): + url = self.api_url / "environment" + if status: + url = url % {"status": status} + if artifact: + url = url % {"artifact": artifact} + if packages: + url = url % {"packages": packages} + return await self.get_paginated_request(url) + + async def delete_environment(self, namespace: str, name: str): + async with self.session.delete( + self.api_url / "environment" / namespace / name + ) as response: + if response.status != 200: + raise CondaStoreAPIError( + f"Error deleting environment {namespace}/{name}" + ) + + async def create_environment(self, namespace: str, specification: str): + async with self.session.post( + self.api_url / "specification", + json={ + "namespace": namespace, + "specification": specification, + }, + ) as response: + data = await response.json() + if response.status != 200: + message = data["message"] + raise CondaStoreAPIError( + f"Error creating environment in namespace {namespace}\nReason {message}" + ) + + return data["data"]["build_id"] + + async def get_environment(self, namespace: str, name: str): + async with self.session.get( + self.api_url / "environment" / namespace / name + ) as response: + if response.status != 200: + raise CondaStoreAPIError( + f"Error getting environment {namespace}/{name}" + ) + + return (await response.json())["data"] + + async def solve_environment( + self, channels: List[str], conda: List[str], pip: List[str] + ): + async with self.session.get( + self.api_url + / "specification" + % { + "channels": channels, + "conda": conda, + "pip": pip, + } + ) as response: + return (await response.json())["solve"] + + async def list_builds(self, status: str, artifact: str, packages: List[str]): + url = self.api_url / "build" + if status: + url = url % {"status": status} + if artifact: + url = url % {"artifact": artifact} + if packages: + url = url % {"packages": packages} + return await self.get_paginated_request(url) + + async def get_build(self, build_id: int): + async with self.session.get(self.api_url / "build" / str(build_id)) as response: + if response.status != 200: + raise CondaStoreAPIError(f"Error getting build {build_id}") + + return (await response.json())["data"] + + async def download(self, build_id: int, artifact: str) -> bytes: + url = self.api_url / "build" / str(build_id) / artifact + async with self.session.get(url) as response: + if response.status != 200: + raise CondaStoreAPIError(f"Error downloading build {build_id}") + + return await response.content.read() diff --git a/conda-store/conda_store/auth.py b/conda-store/conda_store/auth.py new file mode 100644 index 000000000..34d2c58e5 --- /dev/null +++ b/conda-store/conda_store/auth.py @@ -0,0 +1,33 @@ +import aiohttp +import yarl + + +async def none_authentication(verify_ssl: bool = True): + return aiohttp.ClientSession( + connector=aiohttp.TCPConnector(ssl=None if verify_ssl else False), + ) + + +async def token_authentication(api_token: str, verify_ssl: bool = True): + return aiohttp.ClientSession( + headers={"Authorization": f"token {api_token}"}, + connector=aiohttp.TCPConnector(ssl=None if verify_ssl else False), + ) + + +async def basic_authentication( + conda_store_url, username, password, verify_ssl: bool = True +): + session = aiohttp.ClientSession( + connector=aiohttp.TCPConnector(ssl=None if verify_ssl else False), + ) + + await session.post( + yarl.URL(conda_store_url) / "login", + json={ + "username": username, + "password": password, + }, + ) + + return session diff --git a/conda-store/conda_store/cli.py b/conda-store/conda_store/cli.py new file mode 100644 index 000000000..b0ffad7fc --- /dev/null +++ b/conda-store/conda_store/cli.py @@ -0,0 +1,457 @@ +import os +import re +import tempfile +import time +import asyncio +from typing import List +import datetime + +from ruamel.yaml import YAML +import click + +from conda_store import api, runner, utils, exception, __version__ + + +async def parse_build(conda_store_api: api.CondaStoreAPI, uri: str): + if re.fullmatch("(.+)/(.*):(.*)", uri): + namespace, name, build_id = re.fullmatch("(.+)/(.*):(.*)", uri).groups() + build = await conda_store_api.get_build(build_id) + environment = await conda_store_api.get_environment(namespace, name) + if build["environment_id"] != environment["id"]: + raise exception.CondaStoreError( + f"build {build_id} does not belong to environment {namespace}/{name}" + ) + build_id = int(build_id) + elif re.fullmatch("(.+)/(.*)", uri): + namespace, name = re.fullmatch("(.+)/(.*)", uri).groups() + environment = await conda_store_api.get_environment(namespace, name) + build_id = environment["current_build_id"] + elif re.fullmatch(r"\d+", uri): # build_id + build_id = int(uri) + else: + raise exception.CondaStoreError(f"unable to parse uri={uri}") + + return build_id + + +@click.group() +@click.option( + "--conda-store-url", + default="http://localhost:5000", + envvar="CONDA_STORE_URL", + help="conda-store base url including prefix", +) +@click.option( + "--auth", + envvar="CONDA_STORE_AUTH", + type=click.Choice(["none", "token", "basic"], case_sensitive=False), + help="conda-store authentication to use", + default="none", +) +@click.option( + "--no-verify-ssl", + envvar="CONDA_STORE_NO_VERIFY", + is_flag=True, + default=False, + help="Disable tls verification on API requests", +) +@click.version_option(__version__) +@click.pass_context +def cli(ctx, conda_store_url: str, auth: str, no_verify_ssl: bool): + ctx.ensure_object(dict) + ctx.obj["CONDA_STORE_API"] = api.CondaStoreAPI( + conda_store_url=conda_store_url, + verify_ssl=not no_verify_ssl, + auth_type=auth, + ) + + +@cli.command(name="info") +@click.pass_context +@utils.coro +async def get_permissions(ctx): + """Get current permissions and default namespace""" + async with ctx.obj["CONDA_STORE_API"] as conda_store: + data = await conda_store.get_permissions() + + utils.console.print( + f"Default namespace is [bold]{data['primary_namespace']}[/bold]" + ) + utils.console.print(f"Authenticated [bold]{data['authenticated']}[/bold]") + + columns = { + "Namespace": "namespace", + "Name": "name", + "Permissions": "permissions", + } + + rows = [] + for key, value in data["entity_permissions"].items(): + namespace, name = key.split("/") + rows.append( + { + "namespace": namespace, + "name": name, + "permissions": " ".join(_ for _ in value), + } + ) + + utils.output_table("Permissions", columns, rows) + + +@cli.command(name="token") +@click.option("--namespace", help="Primary namespace to assign to new token") +@click.option("--expiration", type=int, help="Seconds from now for token to expire") +@click.option( + "--permission", + help="Permission to assign to token in form /:,,...", + multiple=True, +) +@click.pass_context +@utils.coro +async def get_token(ctx, namespace: str, expiration: int, permission: List[str]): + role_bindings = {} + for p in permission: + namespace_name, roles = p.split(":") + roles = roles.split(",") + role_bindings[namespace_name] = roles + + if expiration: + expiration = str( + datetime.datetime.now(tz=datetime.timezone.utc) + + datetime.timedelta(seconds=expiration) + ) + + async with ctx.obj["CONDA_STORE_API"] as conda_store: + print( + await conda_store.create_token( + primary_namespace=namespace, + expiration=expiration, + role_bindings=role_bindings, + ), + end="", + ) + + +@cli.command(name="download") +@click.argument("uri") +@click.option( + "--artifact", + default="lockfile", + type=click.Choice(["logs", "yaml", "lockfile", "archive"], case_sensitive=False), +) +@click.option( + "--output-filename", + help="Output filename for given download. build-{build_id}.{extension yaml|lock|tar.gz|image}", +) +@click.pass_context +@utils.coro +async def download(ctx, uri: str, artifact: str, output_filename: str = None): + """Download artifacts for given build + + URI in format '', '/', '/:' + """ + async with ctx.obj["CONDA_STORE_API"] as conda_store: + build_id = await parse_build(conda_store, uri) + + content = await conda_store.download(build_id, artifact) + if output_filename is None: + if artifact == "yaml": + extension = "yaml" + elif artifact == "lockfile": + extension = "lock" + elif artifact == "archive": + extension = "tar.gz" + elif artifact == "docker": + extension = "image" + output_filename = f"build-{build_id}.{extension}" + + with open(output_filename, "wb") as f: + f.write(content) + + print(os.path.abspath(output_filename), end="") + + +@cli.command("wait") +@click.argument("uri") +@click.option( + "--timeout", + type=int, + default=10 * 60, + help="Time to wait for build to complete until reporting an error. Default 10 minutes", +) +@click.option( + "--interval", + type=int, + default=10, + help="Time to wait between polling for build status.Default 10 seconds", +) +@click.option( + "--artifact", + type=click.Choice( + ["build", "lockfile", "yaml", "archive", "docker"], case_sensitive=False + ), + default="build", + help="Choice of artifact to wait for. Default is 'build' which indicates the environment was built.", +) +@click.pass_context +@utils.coro +async def wait_environment(ctx, uri: str, timeout: int, interval: int, artifact: str): + """Wait for given URI to complete or fail building + + URI in format '', '/', '/:' + """ + async with ctx.obj["CONDA_STORE_API"] as conda_store: + build_id = await parse_build(conda_store, uri) + + start_time = time.time() + while (time.time() - start_time) < timeout: + build = await conda_store.get_build(build_id) + build_artifact_types = set( + _["artifact_type"] for _ in build["build_artifacts"] + ) + + if artifact == "build" and build["status"] == "COMPLETED": + return + elif artifact == "build" and build["status"] == "FAILED": + raise exception.CondaStoreError(f"Build {build_id} failed") + elif artifact == "lockfile" and "LOCKFILE" in build_artifact_types: + return + elif artifact == "yaml" and "YAML" in build_artifact_types: + return + elif artifact == "archive" and "CONDA_PACK" in build_artifact_types: + return + elif artifact == "docker" and "DOCKER_MANIFEST" in build_artifact_types: + return + await asyncio.sleep(interval) + + raise exception.CondaStoreError( + f"Build {build_id} failed to complete in {timeout} seconds" + ) + + +@cli.command("run") +@click.option( + "--artifact", + default="archive", + type=click.Choice(["yaml", "lockfile", "archive"], case_sensitive=False), + help="Artifact type to use for execution. Conda-Pack is the default format", +) +@click.option( + "--no-cache", + is_flag=True, + default=False, + help="Disable caching builds for fast execution", +) +@click.argument("uri") +@click.argument("command", nargs=-1) +@click.pass_context +@utils.coro +async def run_environment(ctx, uri: str, no_cache: bool, command: str, artifact: str): + """Execute given environment specified as a URI with COMMAND + + URI in format '', '/', '/:'\n + COMMAND is a list of arguments to execute in given environment + """ + if len(command) == 0: + command = ["python"] + + async with ctx.obj["CONDA_STORE_API"] as conda_store: + build_id = await parse_build(conda_store, uri) + + if no_cache: + with tempfile.TemporaryDirectory() as tmpdir: + await runner.run_build(conda_store, tmpdir, build_id, command, artifact) + else: + directory = os.path.join( + tempfile.gettempdir(), "conda-store", str(build_id) + ) + os.makedirs(directory, exist_ok=True) + await runner.run_build(conda_store, directory, build_id, command, artifact) + + +@cli.command("solve") +@click.argument("filename", type=click.File("r")) +@click.option( + "--output", + default="table", + type=click.Choice(["json", "table"]), + help="Output format to display solve. Default table.", +) +@click.pass_context +@utils.coro +async def solve_environment(ctx, filename, output: str): + """Remotely solve given environment.yaml""" + yaml = YAML(typ="safe") + data = yaml.load(filename) + + def _get_pip(data): + for _ in data.get("dependencies", []): + if isinstance(_, dict): + return _.get("pip", []) + return [] + + arguments = { + "channels": data.get("channels", []), + "conda": [_ for _ in data.get("dependencies", []) if isinstance(_, str)], + "pip": _get_pip(data), + } + + async with ctx.obj["CONDA_STORE_API"] as conda_store: + packages = await conda_store.solve_environment(**arguments) + + total_size = 0 + for package in packages: + total_size += package["size"] + package["size"] = utils.sizeof_fmt(package["size"]) + + if output == "table": + utils.output_table( + "Packages", + { + "Name": "name", + "Version": "version", + "Channel": "channel_id", + "Size": "size", + }, + packages, + ) + utils.console.print(f"Total Size: {utils.sizeof_fmt(total_size)}") + elif output == "json": + utils.output_json(packages) + + +# ================= LIST ================== +@cli.group("list") +def list_group(): + pass + + +@list_group.command("namespace") +@click.option( + "--output", + default="table", + type=click.Choice(["json", "table"]), + help="Output format to display builds. Default table.", +) +@click.pass_context +@utils.coro +async def list_namespace(ctx, output: str): + async with ctx.obj["CONDA_STORE_API"] as conda_store: + namespaces = await conda_store.list_namespaces() + + if output == "table": + utils.output_table("Namespaces", {"Id": "id", "Name": "name"}, namespaces) + elif output == "json": + utils.output_json(namespaces) + + +@list_group.command("build") +@click.option( + "--status", + # see conda_store_server.schema.BuildStatus + type=click.Choice(["QUEUED", "BUILDING", "COMPLETED", "FAILED"]), + help="Filter builds which have given status", +) +@click.option( + "--artifact", + # see conda_store_server.schema.BuildArtifactType + type=click.Choice( + [ + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + ] + ), + help="Filter builds which have given artifact", +) +@click.option( + "--package", + help="Filter builds which have given package (can be used multiple times)", + multiple=True, +) +@click.option( + "--output", + default="table", + type=click.Choice(["json", "table"]), + help="Output format to display builds. Default table.", +) +@click.pass_context +@utils.coro +async def list_build(ctx, output: str, status: str, artifact: str, package: List[str]): + async with ctx.obj["CONDA_STORE_API"] as conda_store: + builds = await conda_store.list_builds( + status=status, + artifact=artifact, + packages=package, + ) + + for build in builds: + build["size"] = utils.sizeof_fmt(build["size"]) + + if output == "table": + utils.output_table( + "Builds", {"Id": "id", "Size": "size", "Status": "status"}, builds + ) + elif output == "json": + utils.output_json(builds) + + +@list_group.command("environment") +@click.option( + "--status", + # see conda_store_server.schema.BuildStatus + type=click.Choice(["QUEUED", "BUILDING", "COMPLETED", "FAILED"]), + help="Filter environments which have given status", +) +@click.option( + "--artifact", + # see conda_store_server.schema.BuildArtifactType + type=click.Choice( + [ + "DIRECTORY", + "LOCKFILE", + "LOGS", + "YAML", + "CONDA_PACK", + "DOCKER_BLOB", + "DOCKER_MANIFEST", + ] + ), + help="Filter environments which have given artifact", +) +@click.option( + "--package", + help="Filter environments which have given package (can be used multiple times)", + multiple=True, +) +@click.option( + "--output", + default="table", + type=click.Choice(["json", "table"]), + help="Output format to display builds. Default table.", +) +@click.pass_context +@utils.coro +async def list_environment( + ctx, output: str, status: str, artifact: str, package: List[str] +): + async with ctx.obj["CONDA_STORE_API"] as conda_store: + builds = await conda_store.list_environments( + status=status, + artifact=artifact, + packages=package, + ) + + if output == "table": + utils.output_table( + "Environments", + {"Id": "id", "Namespace Id": "namespace.id", "Name": "name"}, + builds, + ) + elif output == "json": + utils.output_json(builds) diff --git a/conda-store/conda_store/exception.py b/conda-store/conda_store/exception.py new file mode 100644 index 000000000..6f1329c8a --- /dev/null +++ b/conda-store/conda_store/exception.py @@ -0,0 +1,2 @@ +class CondaStoreError(Exception): + pass diff --git a/conda-store/conda_store/runner.py b/conda-store/conda_store/runner.py new file mode 100644 index 000000000..90df44ac6 --- /dev/null +++ b/conda-store/conda_store/runner.py @@ -0,0 +1,50 @@ +from typing import List +import os +import io +import tarfile +import subprocess +import shlex + +from conda_store.exception import CondaStoreError +from conda_store import api + + +async def run_build( + conda_store_api: api.CondaStoreAPI, + directory: str, + build_id: int, + command: List[str], + artifact="archive", +): + if artifact == "archive": + await run_build_archive(conda_store_api, directory, build_id, command) + else: + raise CondaStoreError(f"Running build artifact {artifact} not supported") + + +async def run_build_archive( + conda_store_api: api.CondaStoreAPI, + conda_prefix: str, + build_id: int, + command: List[str], +): + activate = os.path.join(conda_prefix, "bin", "activate") + conda_unpack = os.path.join(conda_prefix, "bin", "conda-unpack") + + if not os.path.isfile(activate): + content = await conda_store_api.download(build_id, "archive") + content = io.BytesIO(content) + + tarfile.open(fileobj=content, mode="r:gz").extractall(path=conda_prefix) + + if os.path.exists(conda_unpack): + subprocess.check_output(conda_unpack) + + wrapped_command = [ + "bash", + "-c", + ". '{}' '{}' && exec {}".format( + activate, conda_prefix, " ".join(shlex.quote(c) for c in command) + ), + ] + os.execvp(wrapped_command[0], wrapped_command) diff --git a/conda-store/conda_store/utils.py b/conda-store/conda_store/utils.py new file mode 100644 index 000000000..75a3902f0 --- /dev/null +++ b/conda-store/conda_store/utils.py @@ -0,0 +1,99 @@ +from typing import List, Dict +import asyncio +import sys +import functools +import json + +from rich.console import Console +from rich.table import Table + +from conda_store import exception + + +console = Console() +error_console = Console(stderr=True, style="bold red") + + +def coro(f): + @functools.wraps(f) + def wrapper(*args, **kwargs): + try: + return asyncio.run(f(*args, **kwargs)) + except exception.CondaStoreError as e: + error_console.print(e.args[0]) + sys.exit(1) + + return wrapper + + +def flatten(d: Dict): + _d = {} + for key, value in d.items(): + if isinstance(value, dict): + for _key, _value in flatten(value).items(): + _d[f"{key}.{_key}"] = _value + else: + _d[key] = value + return _d + + +def lookup(d: Dict, key: str): + _d = d + keys = key.split(".") + for key in keys: + _d = _d[key] + return _d + + +def sizeof_fmt(num, suffix="B"): + for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: + if abs(num) < 1024.0: + return f"{num:3.1f}{unit}{suffix}" + num /= 1024.0 + return f"{num:.1f}Yi{suffix}" + + +def timedelta_fmt(td): + """ + Returns a humanized string representing timedelta + """ + + def plural(unit, word): + if unit > 1: + return word + "s" + return word + + years = td.days // 365 + months = td.days // 30 + days = td.days + hours = td.seconds / 3600 + minutes = td.seconds % 60 + seconds = td.seconds + + if years > 0: + return f"{years} {plural(years, 'year')}" + elif months > 0: + return f"{months} {plural(months, 'month')}" + elif days > 0: + return f"{days} {plural(days, 'day')}" + elif hours > 0: + return f"{hours} {plural(hours, 'hour')}" + elif minutes > 0: + return f"{minutes} {plural(minutes, 'minute')}" + elif seconds > 0: + return f"{seconds} {plural(seconds, 'second')}" + + +def output_json(data, **kwargs): + print(json.dumps(data, **kwargs), end="") + + +def output_table(title: str, columns: Dict[str, str], rows: List[Dict]): + table = Table(title=title) + for column in columns.keys(): + table.add_column(column) + + for row in rows: + table.add_row(*[str(lookup(row, key)) for key in columns.values()]) + + console.print(table) diff --git a/conda-store/environment.yaml b/conda-store/environment.yaml index d578a4319..54eb194ea 100644 --- a/conda-store/environment.yaml +++ b/conda-store/environment.yaml @@ -5,8 +5,14 @@ dependencies: - pip - jupyterhub<2.0.0 - jupyterlab>=3.0.0 - - nb_conda_kernels + - nb_conda_store_kernels >=0.1.4 - nodejs=16 - yarn + # conda-store client dependencies + - yarl + - aiohttp + - rich + - click + - ruamel.yaml - pip: - https://github.com/yuvipanda/jupyter-launcher-shortcuts/archive/refs/heads/master.zip diff --git a/conda-store/pyproject.toml b/conda-store/pyproject.toml new file mode 100644 index 000000000..6cdbd6b21 --- /dev/null +++ b/conda-store/pyproject.toml @@ -0,0 +1,7 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 88 +target-version = ['py36', 'py37', 'py38'] diff --git a/conda-store/setup.cfg b/conda-store/setup.cfg new file mode 100644 index 000000000..fe14646fa --- /dev/null +++ b/conda-store/setup.cfg @@ -0,0 +1,53 @@ +[metadata] +name = conda-store +version = attr: conda_store.__version__ +description = conda-store client +long_description = file: README.md, LICENSE +long_description_content_type = text/markdown +author = Christopher Ostrouchov +author_email = chris.ostrouchov@gmail.com +url = https://github.com/Quansight/conda-store +keywords = conda +license = BSD License +classifiers = + Development Status :: 3 - Alpha + Intended Audience :: Developers + Topic :: Software Development :: Build Tools + License :: OSI Approved :: BSD License + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3 :: Only +project_urls = + Bug Reports = https://github.com/quansight/conda-store + Documentation = https://conda-store.readthedocs.io/ + Source = https://github.com/quansight/conda-store + +[options] +zip_safe = True +packages = find: +install_requires = + rich + click + yarl + aiohttp + ruamel.yaml + +[options.entry_points] +console_scripts = + conda-store=conda_store.__main__:main + +[options.extras_require] +dev = pytest; pytest-mock; black==22.3.0; flake8; sphinx; recommonmark; pydata-sphinx-theme; build; twine + +[options.packages.find] +exclude = + tests + +[flake8] +ignore = E203, E266, E501, W503 +max-line-length = 89 +exclude = + .git, + __pycache__, diff --git a/conda-store/setup.py b/conda-store/setup.py deleted file mode 100644 index b89a1bc82..000000000 --- a/conda-store/setup.py +++ /dev/null @@ -1,38 +0,0 @@ -from setuptools import setup, find_packages -import pathlib - -here = pathlib.Path(__file__).parent.resolve() - -long_description = (here / "README.md").read_text(encoding="utf-8") - -setup( - name="conda-store", - version='0.4.0', - url="https://github.com/Quansight/conda-store", - author="Chris Ostrouchov", - description="A client to interface with conda-store", - long_description=long_description, - long_description_content_type="text/markdown", - packages=find_packages(), - install_requires=[], - python_requires=">=3.6", - license="BSD-3-Clause", - platforms="Linux, Mac OS X, Windows", - keywords=["conda-store"], - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - 'License :: OSI Approved :: BSD License', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3 :: Only', - ], - project_urls={ - "Bug Reports": "https://github.com/quansight/conda-store", - "Documentation": "https://conda-store.readthedocs.io/", - "Source": "https://github.com/quansight/conda-store", - }, -) diff --git a/docker-compose.yaml b/docker-compose.yaml index 5e8c0e3c5..8baa57612 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -3,6 +3,7 @@ version: "3.8" services: conda-store-worker: build: conda-store-server + user: 1000:1000 volumes: - ./tests/assets/environments:/opt/environments:ro - ./tests/assets/conda_store_config.py:/opt/conda_store/conda_store_config.py:ro @@ -14,6 +15,7 @@ services: conda-store-server: build: conda-store-server + user: 1000:1000 depends_on: postgres: condition: service_healthy diff --git a/docs/_static/openapi.json b/docs/_static/openapi.json new file mode 100644 index 000000000..60a138369 --- /dev/null +++ b/docs/_static/openapi.json @@ -0,0 +1 @@ +{"openapi":"3.0.2","info":{"title":"conda-store","contact":{"name":"Quansight","url":"https://quansight.com"},"license":{"name":"BSD 3-Clause","url":"https://opensource.org/licenses/BSD-3-Clause"},"version":"0.4.4"},"paths":{"/login/":{"get":{"tags":["auth"],"summary":"Get Login Method","operationId":"get_login_method_login__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}},"post":{"tags":["auth"],"summary":"Post Login Method","operationId":"post_login_method_login__post","parameters":[{"required":false,"schema":{"title":"Next","type":"string"},"name":"next","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/logout/":{"post":{"tags":["auth"],"summary":"Post Logout Method","operationId":"post_logout_method_logout__post","parameters":[{"required":false,"schema":{"title":"Next","type":"string"},"name":"next","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/":{"get":{"tags":["api"],"summary":"Api Status","operationId":"api_status_api_v1__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIGetStatus"}}}}}}},"/api/v1/permission/":{"get":{"tags":["api"],"summary":"Api Get Permissions","operationId":"api_get_permissions_api_v1_permission__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIGetPermission"}}}}}}},"/api/v1/namespace/":{"get":{"tags":["api"],"summary":"Api List Namespaces","operationId":"api_list_namespaces_api_v1_namespace__get","parameters":[{"required":false,"schema":{"title":"Page","type":"integer","default":1},"name":"page","in":"query"},{"required":false,"schema":{"title":"Order","type":"string"},"name":"order","in":"query"},{"required":false,"schema":{"title":"Size","type":"integer"},"name":"size","in":"query"},{"required":false,"schema":{"title":"Sort By","type":"array","items":{"type":"string"},"default":[]},"name":"sort_by","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIListNamespace"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/namespace/{namespace}/":{"get":{"tags":["api"],"summary":"Api Get Namespace","operationId":"api_get_namespace_api_v1_namespace__namespace___get","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIGetNamespace"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}},"post":{"tags":["api"],"summary":"Api Create Namespace","operationId":"api_create_namespace_api_v1_namespace__namespace___post","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIAckResponse"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}},"delete":{"tags":["api"],"summary":"Api Delete Namespace","operationId":"api_delete_namespace_api_v1_namespace__namespace___delete","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIAckResponse"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/environment/":{"get":{"tags":["api"],"summary":"Api List Environments","operationId":"api_list_environments_api_v1_environment__get","parameters":[{"required":false,"schema":{"title":"Search","type":"string"},"name":"search","in":"query"},{"required":false,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"query"},{"required":false,"schema":{"title":"Name","type":"string"},"name":"name","in":"query"},{"required":false,"schema":{"$ref":"#/components/schemas/BuildStatus"},"name":"status","in":"query"},{"required":false,"schema":{"title":"Packages","type":"array","items":{"type":"string"},"default":[]},"name":"packages","in":"query"},{"required":false,"schema":{"$ref":"#/components/schemas/BuildArtifactType"},"name":"artifact","in":"query"},{"required":false,"schema":{"title":"Page","type":"integer","default":1},"name":"page","in":"query"},{"required":false,"schema":{"title":"Order","type":"string"},"name":"order","in":"query"},{"required":false,"schema":{"title":"Size","type":"integer"},"name":"size","in":"query"},{"required":false,"schema":{"title":"Sort By","type":"array","items":{"type":"string"},"default":[]},"name":"sort_by","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIListEnvironment"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/environment/{namespace}/{environment_name}/":{"get":{"tags":["api"],"summary":"Api Get Environment","operationId":"api_get_environment_api_v1_environment__namespace___environment_name___get","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"},{"required":true,"schema":{"title":"Environment Name","type":"string"},"name":"environment_name","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIGetEnvironment"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/environment/{namespace}/{name}/":{"put":{"tags":["api"],"summary":"Api Update Environment Build","operationId":"api_update_environment_build_api_v1_environment__namespace___name___put","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"},{"required":true,"schema":{"title":"Name","type":"string"},"name":"name","in":"path"}],"requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/Body_api_update_environment_build_api_v1_environment__namespace___name___put"}}},"required":true},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIAckResponse"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}},"delete":{"tags":["api"],"summary":"Api Delete Environment","operationId":"api_delete_environment_api_v1_environment__namespace___name___delete","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"},{"required":true,"schema":{"title":"Name","type":"string"},"name":"name","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIAckResponse"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/specification/":{"get":{"tags":["api"],"summary":"Api Get Specification","operationId":"api_get_specification_api_v1_specification__get","parameters":[{"required":false,"schema":{"title":"Channel","type":"array","items":{"type":"string"},"default":[]},"name":"channel","in":"query"},{"required":false,"schema":{"title":"Conda","type":"array","items":{"type":"string"},"default":[]},"name":"conda","in":"query"},{"required":false,"schema":{"title":"Pip","type":"array","items":{"type":"string"},"default":[]},"name":"pip","in":"query"},{"required":false,"schema":{"allOf":[{"$ref":"#/components/schemas/APIGetSpecificationFormat"}],"default":"yaml"},"name":"format","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}},"post":{"tags":["api"],"summary":"Api Post Specification","operationId":"api_post_specification_api_v1_specification__post","requestBody":{"content":{"application/json":{"schema":{"$ref":"#/components/schemas/Body_api_post_specification_api_v1_specification__post"}}}},"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIPostSpecification"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/":{"get":{"tags":["api"],"summary":"Api List Builds","operationId":"api_list_builds_api_v1_build__get","parameters":[{"required":false,"schema":{"$ref":"#/components/schemas/BuildStatus"},"name":"status","in":"query"},{"required":false,"schema":{"title":"Packages","type":"array","items":{"type":"string"},"default":[]},"name":"packages","in":"query"},{"required":false,"schema":{"$ref":"#/components/schemas/BuildArtifactType"},"name":"artifact","in":"query"},{"required":false,"schema":{"title":"Page","type":"integer","default":1},"name":"page","in":"query"},{"required":false,"schema":{"title":"Order","type":"string"},"name":"order","in":"query"},{"required":false,"schema":{"title":"Size","type":"integer"},"name":"size","in":"query"},{"required":false,"schema":{"title":"Sort By","type":"array","items":{"type":"string"},"default":[]},"name":"sort_by","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIListBuild"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/{build_id}/":{"get":{"tags":["api"],"summary":"Api Get Build","operationId":"api_get_build_api_v1_build__build_id___get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIGetBuild"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}},"put":{"tags":["api"],"summary":"Api Put Build","operationId":"api_put_build_api_v1_build__build_id___put","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIPostSpecification"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}},"delete":{"tags":["api"],"summary":"Api Delete Build","operationId":"api_delete_build_api_v1_build__build_id___delete","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIAckResponse"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/{build_id}/packages/":{"get":{"tags":["api"],"summary":"Api Get Build Packages","operationId":"api_get_build_packages_api_v1_build__build_id__packages__get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"},{"required":false,"schema":{"title":"Search","type":"string"},"name":"search","in":"query"},{"required":false,"schema":{"title":"Exact","type":"string"},"name":"exact","in":"query"},{"required":false,"schema":{"title":"Build","type":"string"},"name":"build","in":"query"},{"required":false,"schema":{"title":"Page","type":"integer","default":1},"name":"page","in":"query"},{"required":false,"schema":{"title":"Order","type":"string"},"name":"order","in":"query"},{"required":false,"schema":{"title":"Size","type":"integer"},"name":"size","in":"query"},{"required":false,"schema":{"title":"Sort By","type":"array","items":{"type":"string"},"default":[]},"name":"sort_by","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIListCondaPackage"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/{build_id}/logs/":{"get":{"tags":["api"],"summary":"Api Get Build Logs","operationId":"api_get_build_logs_api_v1_build__build_id__logs__get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/channel/":{"get":{"tags":["api"],"summary":"Api List Channels","operationId":"api_list_channels_api_v1_channel__get","parameters":[{"required":false,"schema":{"title":"Page","type":"integer","default":1},"name":"page","in":"query"},{"required":false,"schema":{"title":"Order","type":"string"},"name":"order","in":"query"},{"required":false,"schema":{"title":"Size","type":"integer"},"name":"size","in":"query"},{"required":false,"schema":{"title":"Sort By","type":"array","items":{"type":"string"},"default":[]},"name":"sort_by","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIListCondaChannel"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/package/":{"get":{"tags":["api"],"summary":"Api List Packages","operationId":"api_list_packages_api_v1_package__get","parameters":[{"required":false,"schema":{"title":"Search","type":"string"},"name":"search","in":"query"},{"required":false,"schema":{"title":"Exact","type":"string"},"name":"exact","in":"query"},{"required":false,"schema":{"title":"Build","type":"string"},"name":"build","in":"query"},{"required":false,"schema":{"title":"Distinct On","type":"array","items":{"type":"string"},"default":[]},"name":"distinct_on","in":"query"},{"required":false,"schema":{"title":"Page","type":"integer","default":1},"name":"page","in":"query"},{"required":false,"schema":{"title":"Order","type":"string"},"name":"order","in":"query"},{"required":false,"schema":{"title":"Size","type":"integer"},"name":"size","in":"query"},{"required":false,"schema":{"title":"Sort By","type":"array","items":{"type":"string"},"default":[]},"name":"sort_by","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{"$ref":"#/components/schemas/APIListCondaPackage"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/{build_id}/yaml/":{"get":{"tags":["api"],"summary":"Api Get Build Yaml","operationId":"api_get_build_yaml_api_v1_build__build_id__yaml__get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/{build_id}/lockfile/":{"get":{"tags":["api"],"summary":"Api Get Build Lockfile","operationId":"api_get_build_lockfile_api_v1_build__build_id__lockfile__get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"text/plain":{"schema":{"type":"string"}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/api/v1/build/{build_id}/archive/":{"get":{"tags":["api"],"summary":"Api Get Build Archive","operationId":"api_get_build_archive_api_v1_build__build_id__archive__get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/v2/":{"get":{"tags":["registry"],"summary":"V2","operationId":"v2_v2__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/v2/":{"get":{"tags":["registry"],"summary":"List Tags","operationId":"list_tags_v2__rest_path__get","parameters":[{"required":true,"schema":{"title":"Rest","type":"string"},"name":"rest","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/create/":{"get":{"tags":["ui"],"summary":"Ui Create Get Environment","operationId":"ui_create_get_environment_create__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/":{"get":{"tags":["ui"],"summary":"Ui List Environments","operationId":"ui_list_environments__get","parameters":[{"required":false,"schema":{"title":"Search","type":"string"},"name":"search","in":"query"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/namespace/":{"get":{"tags":["ui"],"summary":"Ui List Namespaces","operationId":"ui_list_namespaces_namespace__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/environment/{namespace}/{environment_name}/":{"get":{"tags":["ui"],"summary":"Ui Get Environment","operationId":"ui_get_environment_environment__namespace___environment_name___get","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"},{"required":true,"schema":{"title":"Environment Name","type":"string"},"name":"environment_name","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/environment/{namespace}/{environment_name}/edit/":{"get":{"tags":["ui"],"summary":"Ui Edit Environment","operationId":"ui_edit_environment_environment__namespace___environment_name__edit__get","parameters":[{"required":true,"schema":{"title":"Namespace","type":"string"},"name":"namespace","in":"path"},{"required":true,"schema":{"title":"Environment Name","type":"string"},"name":"environment_name","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/build/{build_id}/":{"get":{"tags":["ui"],"summary":"Ui Get Build","operationId":"ui_get_build_build__build_id___get","parameters":[{"required":true,"schema":{"title":"Build Id","type":"integer"},"name":"build_id","in":"path"}],"responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}},"422":{"description":"Validation Error","content":{"application/json":{"schema":{"$ref":"#/components/schemas/HTTPValidationError"}}}}}}},"/user/":{"get":{"tags":["ui"],"summary":"Ui Get User","operationId":"ui_get_user_user__get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}},"/metrics":{"get":{"tags":["metrics"],"summary":"Prometheus Metrics","operationId":"prometheus_metrics_metrics_get","responses":{"200":{"description":"Successful Response","content":{"text/plain":{"schema":{"type":"string"}}}}}}},"/celery":{"get":{"tags":["metrics"],"summary":"Trigger Task","operationId":"trigger_task_celery_get","responses":{"200":{"description":"Successful Response","content":{"application/json":{"schema":{}}}}}}}},"components":{"schemas":{"APIAckResponse":{"title":"APIAckResponse","required":["status"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"message":{"title":"Message","type":"string"}}},"APIGetBuild":{"title":"APIGetBuild","required":["status","data"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"$ref":"#/components/schemas/Build"},"message":{"title":"Message","type":"string"}}},"APIGetEnvironment":{"title":"APIGetEnvironment","required":["status","data"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"$ref":"#/components/schemas/Environment"},"message":{"title":"Message","type":"string"}}},"APIGetNamespace":{"title":"APIGetNamespace","required":["status","data"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"$ref":"#/components/schemas/Namespace"},"message":{"title":"Message","type":"string"}}},"APIGetPermission":{"title":"APIGetPermission","required":["status","data"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"$ref":"#/components/schemas/APIGetPermissionData"},"message":{"title":"Message","type":"string"}}},"APIGetPermissionData":{"title":"APIGetPermissionData","required":["authenticated","entity_permissions","primary_namespace"],"type":"object","properties":{"authenticated":{"title":"Authenticated","type":"boolean"},"entity_permissions":{"title":"Entity Permissions","type":"object","additionalProperties":{"type":"array","items":{"type":"string"}}},"primary_namespace":{"title":"Primary Namespace","type":"string"}}},"APIGetSpecificationFormat":{"title":"APIGetSpecificationFormat","enum":["yaml","lockfile"],"description":"An enumeration."},"APIGetStatus":{"title":"APIGetStatus","required":["status","data"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"$ref":"#/components/schemas/APIGetStatusData"},"message":{"title":"Message","type":"string"}}},"APIGetStatusData":{"title":"APIGetStatusData","required":["version"],"type":"object","properties":{"version":{"title":"Version","type":"string"}}},"APIListBuild":{"title":"APIListBuild","required":["status","data","page","size","count"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"title":"Data","type":"array","items":{"$ref":"#/components/schemas/Build"}},"message":{"title":"Message","type":"string"},"page":{"title":"Page","type":"integer"},"size":{"title":"Size","type":"integer"},"count":{"title":"Count","type":"integer"}}},"APIListCondaChannel":{"title":"APIListCondaChannel","required":["status","data","page","size","count"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"title":"Data","type":"array","items":{"$ref":"#/components/schemas/CondaChannel"}},"message":{"title":"Message","type":"string"},"page":{"title":"Page","type":"integer"},"size":{"title":"Size","type":"integer"},"count":{"title":"Count","type":"integer"}}},"APIListCondaPackage":{"title":"APIListCondaPackage","required":["status","data","page","size","count"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"title":"Data","type":"array","items":{"$ref":"#/components/schemas/CondaPackage"}},"message":{"title":"Message","type":"string"},"page":{"title":"Page","type":"integer"},"size":{"title":"Size","type":"integer"},"count":{"title":"Count","type":"integer"}}},"APIListEnvironment":{"title":"APIListEnvironment","required":["status","data","page","size","count"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"title":"Data","type":"array","items":{"$ref":"#/components/schemas/Environment"}},"message":{"title":"Message","type":"string"},"page":{"title":"Page","type":"integer"},"size":{"title":"Size","type":"integer"},"count":{"title":"Count","type":"integer"}}},"APIListNamespace":{"title":"APIListNamespace","required":["status","data","page","size","count"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"title":"Data","type":"array","items":{"$ref":"#/components/schemas/Namespace"}},"message":{"title":"Message","type":"string"},"page":{"title":"Page","type":"integer"},"size":{"title":"Size","type":"integer"},"count":{"title":"Count","type":"integer"}}},"APIPostSpecification":{"title":"APIPostSpecification","required":["status","data"],"type":"object","properties":{"status":{"$ref":"#/components/schemas/APIStatus"},"data":{"$ref":"#/components/schemas/APIPostSpecificationData"},"message":{"title":"Message","type":"string"}}},"APIPostSpecificationData":{"title":"APIPostSpecificationData","required":["build_id"],"type":"object","properties":{"build_id":{"title":"Build Id","type":"integer"}}},"APIStatus":{"title":"APIStatus","enum":["ok","error"],"description":"An enumeration."},"Body_api_post_specification_api_v1_specification__post":{"title":"Body_api_post_specification_api_v1_specification__post","type":"object","properties":{"specification":{"title":"Specification","type":"string","default":""},"namespace":{"title":"Namespace","type":"string"}}},"Body_api_update_environment_build_api_v1_environment__namespace___name___put":{"title":"Body_api_update_environment_build_api_v1_environment__namespace___name___put","required":["build_id"],"type":"object","properties":{"build_id":{"title":"Build Id","type":"integer"}}},"Build":{"title":"Build","required":["id","environment_id","status","size","scheduled_on"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"environment_id":{"title":"Environment Id","type":"integer"},"specification":{"$ref":"#/components/schemas/Specification"},"packages":{"title":"Packages","type":"array","items":{"$ref":"#/components/schemas/CondaPackage"}},"status":{"$ref":"#/components/schemas/BuildStatus"},"size":{"title":"Size","type":"integer"},"scheduled_on":{"title":"Scheduled On","type":"string","format":"date-time"},"started_on":{"title":"Started On","type":"string","format":"date-time"},"ended_on":{"title":"Ended On","type":"string","format":"date-time"},"build_artifacts":{"title":"Build Artifacts","type":"array","items":{"$ref":"#/components/schemas/BuildArtifact"}}}},"BuildArtifact":{"title":"BuildArtifact","required":["id","artifact_type","key"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"artifact_type":{"$ref":"#/components/schemas/BuildArtifactType"},"key":{"title":"Key","type":"string"}}},"BuildArtifactType":{"title":"BuildArtifactType","enum":["DIRECTORY","LOCKFILE","LOGS","YAML","CONDA_PACK","DOCKER_BLOB","DOCKER_MANIFEST"],"description":"An enumeration."},"BuildStatus":{"title":"BuildStatus","enum":["QUEUED","BUILDING","COMPLETED","FAILED"],"description":"An enumeration."},"CondaChannel":{"title":"CondaChannel","required":["id","name"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"name":{"title":"Name","type":"string"},"last_update":{"title":"Last Update","type":"string","format":"date-time"}}},"CondaPackage":{"title":"CondaPackage","required":["id","channel","build","sha256","name","version"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"channel":{"$ref":"#/components/schemas/CondaChannel"},"build":{"title":"Build","type":"string"},"license":{"title":"License","type":"string"},"sha256":{"title":"Sha256","type":"string"},"name":{"title":"Name","type":"string"},"version":{"title":"Version","type":"string"},"summary":{"title":"Summary","type":"string"}}},"Environment":{"title":"Environment","required":["id","namespace","name","current_build_id"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"namespace":{"$ref":"#/components/schemas/Namespace"},"name":{"title":"Name","type":"string"},"current_build_id":{"title":"Current Build Id","type":"integer"},"current_build":{"$ref":"#/components/schemas/Build"}}},"HTTPValidationError":{"title":"HTTPValidationError","type":"object","properties":{"detail":{"title":"Detail","type":"array","items":{"$ref":"#/components/schemas/ValidationError"}}}},"Namespace":{"title":"Namespace","required":["id","name"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"name":{"title":"Name","pattern":"^[A-Za-z0-9-+_=@$&?^|~.]+$","type":"string"}}},"Specification":{"title":"Specification","required":["id","name","spec","sha256","created_on"],"type":"object","properties":{"id":{"title":"Id","type":"integer"},"name":{"title":"Name","type":"string"},"spec":{"title":"Spec","type":"object"},"sha256":{"title":"Sha256","type":"string"},"created_on":{"title":"Created On","type":"string","format":"date-time"}}},"ValidationError":{"title":"ValidationError","required":["loc","msg","type"],"type":"object","properties":{"loc":{"title":"Location","type":"array","items":{"anyOf":[{"type":"string"},{"type":"integer"}]}},"msg":{"title":"Message","type":"string"},"type":{"title":"Error Type","type":"string"}}}}}} diff --git a/docs/administration.md b/docs/administration.md index efca42cb4..993bf4e35 100644 --- a/docs/administration.md +++ b/docs/administration.md @@ -11,13 +11,13 @@ ## Performance -There are several parts of Conda-Store to consider for performance. We +There are several parts of conda-store to consider for performance. We have tried to list them in order of performance impact that may be seen. ### Worker Storage -When Conda-Store builds a given environment it has to locally install +When conda-store builds a given environment it has to locally install the environment in the directory specified in the [Traitlets](https://traitlets.readthedocs.io/en/stable/using_traitlets.html) configuration `CondaStore.store_directroy`. Conda environments consist @@ -51,16 +51,16 @@ plenty fast Internet. ### S3 Storage -All build artifacts from Conda-Store are stored in object storage that +All build artifacts from conda-store are stored in object storage that behaves S3 like. S3 traditionally has great performance if you use the cloud provider implementation. ## Configuration -Conda-Store is configured via +conda-store is configured via [Traitlets](https://traitlets.readthedocs.io/en/stable/). Originally this configuration was done via command line options but as the -options grew this seems untenable. Conda-Store server and worker can +options grew this seems untenable. conda-store server and worker can be launched via configuration easily. ```shell @@ -71,17 +71,17 @@ conda-store-server --config conda-store-worker --config ``` -Below we outline the options for Conda-Store. +Below we outline the options for conda-store. ### `conda_store_server.app.CondaStore` `CondaStore.storage_class` configures the storage backend to use for storing build artifacts from -Conda-Store. [S3](https://en.wikipedia.org/wiki/Amazon_S3) storage is +conda-store. [S3](https://en.wikipedia.org/wiki/Amazon_S3) storage is the default. File based storage is also supported but not nearly as well tested. -`CondaStore.store_directory` is the directory used for Conda-Store to +`CondaStore.store_directory` is the directory used for conda-store to build the environments. `CondaStore.build_directory` template used to form the directory for @@ -116,9 +116,10 @@ repodata.json from. By default includes current architecture and are by default added if channels within the specification is empty. `CondaStore.conda_allowed_channels` is a list of Conda channels that -are allowed. This also tells Conda-Store which channels to prefetch -the channel `repodata` and `channeldata` from. The default is `main` and -`conda-forge`. +are allowed. This also tells conda-store which channels to prefetch +the channel `repodata` and `channeldata` from. The default is `main` +and `conda-forge`. If `conda_allowed_channels` is an empty list all +Channels are accepted by users. `CondaStore.conda_default_packages` is a list of Conda packages that are included by default if none are specified within the specification @@ -150,13 +151,12 @@ added. database. Behind the scenes [SQLAlchemy](https://www.sqlalchemy.org/) is used for the connection so [consult their docs](https://docs.sqlalchemy.org/en/14/core/engines.html) for -connecting to your specific database. Conda-Store will automatically +connecting to your specific database. conda-store will automatically create the tables if they do not already exist. -`CondaStore.redis_url` is a required argument to a running Redis -instance. This became a dependency as of release `0.4.1` due to the -massive advantages of features that Conda-Store can provide with this -dependency. See +`CondaStore.redis_url` is an optional argument to a running Redis +instance. This was removed as a dependency as of release `0.4.10` due +to the need to have a simple deployment option for conda-store. See [documentation](https://github.com/redis/redis-py/#connecting-to-redis) for proper specification. This url is used by default for the Celery broker and results backend. @@ -165,12 +165,13 @@ broker and results backend. celery. Celery supports a [wide range of brokers](https://docs.celeryproject.org/en/stable/getting-started/backends-and-brokers/index.html) each with different guarantees. By default the Redis based broker is -used. It is production ready and has worked well in practice. The url -must be provided in a format that celery understands. The default -value is `CondaStore.redis_url`. +used if a `CondaStore.redis_url` if provided otherwise defaults to +sqlalchemy. It is production ready and has worked well in +practice. The url must be provided in a format that celery +understands. The default value is `CondaStore.redis_url`. -`CondaStore.build_artifacts` is the list of artifacts for Conda-Store -to build. By default it is all the artifacts that Conda-Store is +`CondaStore.build_artifacts` is the list of artifacts for conda-store +to build. By default it is all the artifacts that conda-store is capable of building. These are the [lockfile](https://github.com/conda-incubator/conda-lock), [YAML](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#create-env-file-manually), @@ -184,14 +185,16 @@ want to keep around the logs etc. of a build and the Conda solve for the given build. `CondaStore.celery_results_backend` is the backend to use for storing -all results from celery task execution. Conda-Store currently does not +all results from celery task execution. conda-store currently does not leverage the backend results but it may be needed for future work -using celery. The backend defaults to using the Redis backend. This -choice works great in production. Please consult the [celery docs on +using celery. The backend defaults to using the Redis backend if +`CondaStore.redis_url` is specified otherwise uses the +`CondaStore.database_url`. This choice works great in +production. Please consult the [celery docs on backend](https://docs.celeryproject.org/en/stable/getting-started/backends-and-brokers/index.html). `CondaStore.default_namespace` is the default namespace for -Conda-Store to use. All environments are built behind a given +conda-store to use. All environments are built behind a given namespace. `CondaStore.filesystem_namespace` is the namespace to use for @@ -213,12 +216,16 @@ assign to all files and directories in a given built environment. This setting is useful if you want to protect environments from modification from certain users and groups. -`CondaStore.default_docker_base_image` is the base image to use for -docker builds of Conda environments. This package at a minimum should -have the [following packages +`CondaStore.default_docker_base_image` default base image used for the +Dockerized environments. Make sure to have a proper glibc within image +(highly discourage alpine/musl based images). Can also be callable +function which takes the `orm.Build` object as input which has access +to all attributes about the build such as installed packages, requested +packages, name, namespace, etc. This package at a minimum should have +the [following packages installed](https://docs.anaconda.com/anaconda/install/linux/). Often times for non-graphic and non-gpu environments glibc is enough. Hence -the default docker image `frolvlad/alpine-glibc:latest`. +the default docker image `library/debian:sid-slim`. `CondaStore.serialize_builds` no longer build Conda environment in parallel. This is due to an issue in Conda/Mamba that when downloading @@ -228,11 +235,11 @@ is True until this bug is fixed. ### `conda_store_server.storage.S3Storage` -Conda-Store uses [minio-py](https://github.com/minio/minio-py) as a +conda-store uses [minio-py](https://github.com/minio/minio-py) as a client to connect to S3 "like" object stores. -`S3Storage.internal_endpoint` is the internal endpoint for Conda-Store -reaching out to s3 bucket. This is the url that Conda-Store use for +`S3Storage.internal_endpoint` is the internal endpoint for conda-store +reaching out to s3 bucket. This is the url that conda-store use for get/set s3 blobs. For AWS S3 use the endpoint `s3.amazonaws.com`. `S3Storage.external_endpoint` is the external s3 endpoint for users to @@ -251,12 +258,12 @@ the S3 bucket. `S3Storage.internal_secure` Boolean to indicate if connecting via `http` (False) or `https` (True) internally. The internal connection -is the url that will be exclusively used by Conda-Store and not shared +is the url that will be exclusively used by conda-store and not shared with users. `S3Storage.external_secure` Boolean to indicate if connecting via `http` (False) or `https` (True) internally. The external connection -is the url that will be served to users of Conda-Store. +is the url that will be served to users of conda-store. `S3Storage.credentials` provider to use to get credentials for s3 access. see examples @@ -289,6 +296,12 @@ encrypting tokens. `AuthenticationBackend.jwt_algorithm` is the algorithm for encrypting the JSON Web Tokens. +`AuthenticationBackend.predefined_tokens` is a set of tokens with +predefined permission. This is useful for setting up service accounts +in a similar manner to how things are done with jupyterhub. Format for +the values is a dictionary with keys being the tokens and values being +the `schema.AuthenticaitonToken` all fields are optional. + ### `conda_store_server.server.auth.AuthorizationBackend` `AuthorizationBackend.role_mappings` is a dictionary that maps `roles` @@ -404,7 +417,7 @@ metrics endpoints. Default True. `CondaStoreServer.address` is the address for the server to bind to. The default is all IP addresses `0.0.0.0`. -`CondaStoreServer.port` is the port for Conda-Store server to +`CondaStoreServer.port` is the port for conda-store server to use. Default is `5000`. `CondaStoreServer.registry_external_url` is the external hostname and @@ -429,13 +442,19 @@ reverse proxy such as Nginx, Traefik, Apache. Will use `X-Forward-...` headers to determine scheme. Do not set to true if not behind proxy since Flask will trust any `X-Forward-...` header. +`CondaStoreServer.template` initialized +`fastapi.templating.Jinja2Templates` to use for html templates. + +`CondaStoreServer.template_vars` extra variables to be passed into +jinja templates for page rendering. + ### `conda_store_server.worker.app.CondaStoreWorker` `CondaStoreWorker.log_level` is the level for all server logging. Default is `INFO`. Common options are `DEBUG`, `INFO`, `WARNING`, and `ERROR`. -`CondaStoreWorker.watch_paths` is a list of paths for Conda-Store to +`CondaStoreWorker.watch_paths` is a list of paths for conda-store to watch for changes to directories of `environment.yaml` files or a single filename to watch. @@ -443,9 +462,43 @@ single filename to watch. the number of threads on your given machine. If set will limit the number of concurrent celery tasks to the integer. +### `conda_store_server.registry.ContainerRegistry` + +`ContainerRegistry.container_registries` dictionary of registries_url +to upload built container images with callable function to configure +registry instance with credentials. Example configuration shown +bellow. Some registries are more complex to setup such as ECR, GCR, +etc. `password` is often the token generated from the AWS, GCP, Azure, +and Digital Ocean clients. + +```python +from python_docker.registry import Registry +import os + +def _configure_docker_registry(registry_url: str): + return Registry( + "https://registry-1.docker.io", + username=os.environ.get('DOCKER_USERNAME'), + password=os.environ.get('DOCKER_PASSWORD')) + +c.ContainerRegistry.container_registries = { + 'https://registry-1.docker.io': _configure_docker_registry +} +``` + +`ContainerRegistry.container_registry_image_name` image name +to assign to docker image pushed for particular registry via a +callable function with arguments of registry and build. + +`ContainerRegistry.container_registry_image_tag` image tag +to assign to docker image pushed for particular registry via a +callable function with arguments of registry and build. + + + ## Frequently Asked Questions -### Conda-Store fails to build Conda environment and worker is spontaneously killed (9 SIGKILL) +### conda-store fails to build Conda environment and worker is spontaneously killed (9 SIGKILL) The following error most likely indicates that you have not allocated enough memory to `conda-store-workers` for solving and building the diff --git a/docs/api.md b/docs/api.md new file mode 100644 index 000000000..fe5789d3b --- /dev/null +++ b/docs/api.md @@ -0,0 +1,27 @@ +# REST API + +Below is an interactive view of conda-store's OpenAPI specification. + + + + + + + + +
+ + diff --git a/docs/conf.py b/docs/conf.py index b836d0994..9ca156d57 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -2,9 +2,9 @@ from recommonmark.transform import AutoStructify project = 'conda-store' -copyright = '2021, Chris Ostrouchov' -author = 'Chris Ostrouchov' -release = '0.1.0' +copyright = '2022, Quansight' +author = 'Quansight' +release = '0.4.11' extensions = [ 'recommonmark', diff --git a/docs/contributing.md b/docs/contributing.md index 2fb1b6fc5..9371c8762 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -1,8 +1,21 @@ # Contributing +## Naming + +When refering to `conda-store` it should always be written all +lowercase with a dash in between. `conda-store` should also be +lowercase when beginning a sentence. + ## Development -Install the following dependencies before developing on Conda-Store. +Significant effort has been put into simplifying the development and +deployment process of `conda-store`. There is a docker based +development workflow along with a non-containerized workflow if you +are using Linux. + +### Containerized development + +Install the following dependencies before developing on conda-store. - [docker](https://docs.docker.com/engine/install/) - [docker-compose](https://docs.docker.com/compose/install/) @@ -21,10 +34,19 @@ docker-compose up --build Notice the `architecture: amd64` whithin the docker-compose.yaml files. ``` +```eval_rst +.. warning :: + If you're developing on a Mac and run into issues that complain about `tcp 0.0.0.0:5000: bind: address already in use` you might need to deactivate the `Airplay Receiver` service from the `Sharing` section in Control Center. + Have a look at this [discussion on Apple.com](https://developer.apple.com/forums/thread/682332) + for more details. +``` + + The following resources will be available: - conda-store web server running at [http://localhost:5000](http://localhost:5000) - [MinIO](https://min.io/) s3 running at [http://localhost:9000](http://localhost:9000) with username `admin` and password `password` - [PostgreSQL](https://www.postgresql.org/) running at [localhost:5432](localhost:5432) with username `admin` and password `password` database `conda-store` + - [Redis](https://www.redis.com/) running at [localhost:6379](localhost:6379) with password `password` - [JupyterHub](https://jupyter.org/hub) running at [http://localhost:8000](http://localhost:8000) with any username and password `test` On a fast machine this deployment should only take 10 or so seconds @@ -37,6 +59,36 @@ docker-compose down # not always necessary docker-compose up --build ``` +### Linux development + +Install the following dependencies before developing on conda-store. + + - [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/linux.html) + +Install the development dependencies and activate the environment. + +```shell +conda env create -f conda-store-server/environment-dev.yaml +conda activate conda-store-server-dev +``` + +Running `conda-store`. `--standalone` mode launched celery as a +subprocess of the web server. + +python -m conda_store_server.server --standalone tests/assets/conda_store_standalone_config.py +``` + +Visit [localhost:5000](http://localhost:5000/) + +### Changes to API + +The REST API is considered somewhat stable. If any changes are made to +the API make sure the update the OpenAPI/Swagger specification in +`docs/_static/openapi.json`. This may be downloaded from the `/docs` +endpoint when running conda-store. Ensure that the +`c.CondaStoreServer.url_prefix` is set to `/` when generating the +endpoints. + ## Documentation Install the following dependencies before contributing to the @@ -107,7 +159,7 @@ After the PyPi release a release on PR must be created that updates to the released version ``. -Conda-Store has two PyPi packages `conda-store-server` and `conda-store`. +conda-store has two PyPi packages `conda-store-server` and `conda-store`. - update `recipies/meta.yaml` with the new version `{% set version = "" %}` - update `recipies/meta.yaml` with the appropriate sha256 for each @@ -120,166 +172,17 @@ the feedstock with the following comment `@conda-forge-admin please rerender`. An example of this can be found in [PR #2](https://github.com/conda-forge/conda-store-feedstock/pull/2) -## REST API - -### Status - - - `GET /api/v1/` :: get status of conda-store - -### Permissions - - - `GET /api/v1/permission/` :: get the permissions for the currently entity. Does not require authentication and returns the permissions and authentication status for entity. - -### Namespace - - - `GET /api/v1/namespace/?page=&size=&sort_by=&order=` :: list namespaces - - allowed `sort_by` values: `name` for the name of the namespace - - - `GET /api/v1/namespace//` :: get namespace - - - `POST /api/v1/namespace//` :: create namespace - - - `DELETE /api/v1/namespace//` :: delete namespace - -### Environments - - - `GET /api/v1/environment/?search=&page=&size=&sort_by=&order=` :: list environments - - allowed `sort_by` values : `namespace` for namespace name, `name` for environment name - - - `GET /api/v1/environment///` :: get environment - - - `PUT /api/v1/environment///` :: update environment to given build id - - - `DELETE /api/v1/environment///` :: delete the environment along with all artifacts and builds - -### Specifications - - - `POST /api/v1/environment/` :: create given environment - - JSON message with optional namespace (will use `CondaStore.default_namespace` if not specified) and a specification string that's a valid environment.yaml for Conda, like so: - ``` - { - "namespace": "some_namespace", - "specification": "name: some_environment_name\ndependencies:\n - python=3.10.2=h543edf9_0_cpython\n" - } - ``` - -### Builds - - - `GET /api/v1/build/?page=&size=&sort_by=&order=` :: list builds - - allowed `sort_by` values : `id` to sort by build id - - - `GET /api/v1/build//` :: get build - - - `PUT /api/v1/build//` :: trigger new build of given build specification - - - `DELETE /api/v1/build//` :: delete given build along with all artifacts that are not in `c.CondaStore.build_artifacts_kept_on_deletion` - - - `GET /api/v1/build//logs/` :: get build logs - - - `GET /api/v1/build//yaml/` :: export environment.yml specification for the given build - - - `GET /api/v1/build//packages/?search=&build=&page=&size=&sort_by=&order=` :: list packages within build - - allowed `sort_by` values : `channel` to sort by channel name, `name` to sort by package name - - `build` string to search within `build` for example strings include - `py27_0` etc which can be useful for filtering specific versions of - packages. - - `search` will search within the package names for a match. The search is fuzzy by default. To get the packages with the exact given name, add the parameter `exact=1`. - -### Conda Channels - - - `GET /api/v1/channel/?page=&size=` :: list channels - -### Conda Packages - - - `GET /api/v1/package/?search=&build=&page=&size=?sort_by=?order=&distinct_on=` :: list packages - - allowed `sort_by` values : `channel` to sort by channel name, `name` to sort by package name - - allowed `distinct_on` values : `channel` to be distinct on channel name, `name` to be distinct on package name, `version` to be distinct on version. - - `build` string to search within `build` for example strings include - `py27_0` etc which can be useful for filtering specific versions of - packages. - - `search` will search within the package names for a match. The search is fuzzy by default. To get the packages with the exact given name, add the parameter `exact=1`. - -### REST API query format - -For several paginated results the following query parameters are accepted. - - - `page` page numbers indexing start at 1 - - `size` the number of results to return in each page. The max size - is determined by the - [Traitlets](https://traitlets.readthedocs.io/en/stable/) parameter - `c.CondaStoreServer.max_page_size` with default of 100. - - `sort_by` (can be multiple order_by parameters) indicating a multi-column - ordering. Each route has a list of allowed sorting keys: - for example `namespace`, `name`, `channel`. All paginated routes support - this and have a default specific to the given resource. - - `distinct_on` (can be multiple distinct_on parameters) indicating a - multi-column distinct on. Each route has a list of allowed distinct - keys. - - `order` is either `desc` descending or `asc` ascending with a - default of `asc`. Only one order parameter is accepted. - -If a query requests a page that does not exist a data response of an -empty list is returned. - -### REST API Response Format - -Several Standard Error Codes are returned - - 200 :: response was processed normally - - 400 :: indicates a bad request that is invalid - - 401 :: indicates that request was unauthenticated indicates that authentication is required - - 403 :: indicates that request was not authorized to access resource - - 404 :: indicates that request for resource was not found - - 500 :: hopefully you don't see this error. If you do this is a bug - -Response Format for Errors. - -```json -{ - "status": "error", - "message": "" -} -``` - -Response Format for Success. Several of these response parts are -optional. A route may optionally return a `message` that may be -displayed to the user. - -If the route is paginated it will return a `page`, `size`, and `count` -key. - -``` -{ - "status": "ok", - "message": "", - "data": [...], - "page": , - "size": , - "count": , -} -``` - -If the route is not paginated the `page`, `size`, and `count` keys will -not be returned. - -``` -{ - "status": "ok", - "message": "", - "data": {}, -} -``` - ## Architecture -Conda Store was designed with the idea of scalable enterprise +conda-store was designed with the idea of scalable enterprise management of reproducible Conda environments. -![Conda Store architecture diagram](_static/images/conda-store-architecture.png) +![conda-store architecture diagram](_static/images/conda-store-architecture.png) ### Configuration [Traitlets](https://traitlets.readthedocs.io/en/stable/) is used for -all configuration of Conda-Store. In the beginning command line +all configuration of conda-store. In the beginning command line options were used but eventually we learned that there were too many options for the user. Traitlets provides a python configuration file that you can use to configure values of the applications. It is used @@ -287,9 +190,9 @@ for both the server and worker. See [`tests/assets/conda_store_config.py`](https://github.com/Quansight/conda-store/blob/main/tests/assets/conda_store_config.py) for a full example. -### Workers and Server +### Workers and server -Conda-Store can be broken into two components. The workers which have +conda-store can be broken into two components. The workers which have the following responsibilities: - build Conda environments from Conda `environment.yaml` specifications - build Conda pack archives @@ -327,13 +230,13 @@ database, Redis, and S3 compatible object storage. The S3 server is used to store all build artifacts for example logs, docker layers, and the [Conda-Pack](https://conda.github.io/conda-pack/) tarball. The PostgreSQL database is used for storing all states on environments and -builds along with powering the Conda-Store web server UI, REST API, +builds along with powering the conda-store web server UI, REST API, and Docker registry. Redis is used for keeping track of task state and results along with enabling locks and realtime streaming of logs. ### Terminology -![Conda Store terminology](_static/images/conda-store-terminology.png) +![conda-store terminology](_static/images/conda-store-terminology.png) `conda_environment = f(open("environment.yaml"), datatime.utcnow())` @@ -385,7 +288,7 @@ is because in general you can add There are two spots that introduce issues to reproducibility. The first issue is tracking when an `environment.yaml` file has changes. This can be easily tracked by taking a sha256 of the file -. This is what Conda-Store does but sorts the dependencies to make +. This is what conda-store does but sorts the dependencies to make sure it has a way of not triggering a rebuild if the order of two packages changes in the dependencies list. In step (2) `repodata.json` is updated regularly. When Conda solves for a user's environment it @@ -401,7 +304,7 @@ you are extending and using a form of OAuth2 use the `conda_store_server.server.auth.GenericOAuthAuthentication`. Similar to JupyterHub all configuration is modified via [Traitlets](https://traitlets.readthedocs.io/en/stable/). Below shows -an example of setting us OAuth2 via JupyterHub for Conda-Store. +an example of setting us OAuth2 via JupyterHub for conda-store. ```python c.CondaStoreServer.authentication_class = JupyterHubOAuthAuthentication @@ -427,7 +330,7 @@ of roles meaning. ### Authorization Model -Conda-Store implements role based authorization to supports a flexible +conda-store implements role based authorization to supports a flexible authorization model. A user or service is either authenticated or not. There are a set of default permissions assigned to authenticated and unauthenticated users via Traitlets. These can all be modified in @@ -454,7 +357,7 @@ c.RBACAuthorizationBackend.authenticated_role_bindings = { Once we have collected the role mappings that a given user has we then map `roles` to sets of permissions. Currently there are only a few -permissions but Conda-Store is capable of adapting in the future. +permissions but conda-store is capable of adapting in the future. ```python class Permissions(enum.Enum): @@ -571,7 +474,9 @@ build::update, build::delete}`. The delete environment action requires `build::delete` permissions which the user has thus the action is permitted. -### Database Model +## Database + +### Model At a high level the database model can be described in the image bellow. @@ -599,3 +504,63 @@ eralchemy -i "postgresql+psycopg2://admin:password@localhost:5432/conda-store" - ``` ![entity relationship diagram](_static/images/conda-store-entity-relationship-diagram.png) + +### Migrations + +conda-store relies on [SQLAlchemy](https://www.sqlalchemy.org/) for ORM mapping, and on [Alembic](https://alembic.sqlalchemy.org/en/latest/) for DB migrations. + +The procedure to modify the database is the following : +- First, modify [the ORM Model](https://github.com/Quansight/conda-store/blob/main/conda-store-server/conda_store_server/orm.py) according to the changes you want to make +- edit the file `conda-store-server/alembic.ini` and replace the value for entry `sqlalchemy.url` to match the connection URL of your database. + +- in your command line, run the following : +```sh +cd conda-store-server/conda_store_server +alembic revision --autogenerate -m "description of your changes" +``` +- You should have a new file in `conda-store-server/conda_store_server/alembic/versions/` . **Review it thoroughly**. It contains the [`alembic` operations](https://alembic.sqlalchemy.org/en/latest/ops.html) (`op`) to actually modify the database, either when upgrading (`upgrade` function) or downgrading (`downgrade`) + +- You can migrate your data within these `upgrade`/`downgrade` functions, for example : +```python +from alembic import op + +# revision identifiers, used by Alembic. +revision = 'abcdef01234567' +down_revision = '987654321f0edc' +branch_labels = None +depends_on = None + +def upgrade(): + + # operations to modify the database structure + # ... + op.create_table( + 'new_table', + Column('id', INTEGER, primary_key=True), + Column('field1', VARCHAR(50), nullable=False), + Column('field2', INTEGER), + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + # ... + + op.execute('''INSERT INTO new_table (field1, field2) + SELECT field1, field2 + FROM old_table''') + + # other operations to modify the database structure + # ... + + +def downgrade(): + + op.drop_table('new_table') + +``` + + +- Once you're sure about the changes generated, you can apply them by running : +```sh +alembic upgrade head +``` + +- Check your database : your changes should be reflected. If not, refer to [Alembic's documentation](https://alembic.sqlalchemy.org/en/latest/). diff --git a/docs/index.rst b/docs/index.rst index 1e150e0d0..f85b64d75 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,13 +1,13 @@ -Conda-Store +conda-store =========== End users think in terms of environments not packages. The core philosophy of conda-store is to serve identical conda environments in -as many ways as possible to users and services. Conda-store was +as many ways as possible to users and services. conda-store was developed due to a significant need in enterprise architectures. .. image:: _static/images/conda-store-authenticated.png - :alt: Conda Store Homepage + :alt: conda-store Homepage Use Cases --------- @@ -22,7 +22,7 @@ the package satisfies their constraints. This process may take several days and at best will not be immediate. While developers need packages in their environments as soon as possible to do interesting new research. This situation often led to a lot of frustration on both -sides for good reason. Conda-store aims to address this by allowing +sides for good reason. conda-store aims to address this by allowing users to control a set of environments in their namespace while allowing IT to having all environments under their control. @@ -36,7 +36,7 @@ notebook to run a given workflow. They will want to "submit" this notebook with the given environment and run it on a cron job. The only problem is that this creates a huge burden on IT. How is IT supposed to ensure that the environment that that notebook ran with is -preserved indefinitely? Conda-store addresses this by building all +preserved indefinitely? conda-store addresses this by building all environment separately(including updates). There is a unique key that identifies any given environment. Furthermore this environment is available in many different forms: yaml, lockfile, conda tarball, and @@ -53,7 +53,7 @@ containers are used everywhere. The burden of creating images with given packages can be cumbersome. There are tools that make this easier e.g. `repo2docker `_ however these project seem focused on bundling the data/repo with the -image. Conda-store has a feature to build on demand environments based +image. conda-store has a feature to build on demand environments based on the image name. For example the image name `localhost:5000/conda-store-dynamic/numpy/jupyterlab/scipy.gt.1.0` will create a docker image with `numpy`, `jupyterlab`, and `scipy > @@ -64,7 +64,7 @@ working test that demonstrates this. Features -------- -Conda Store controls the environment lifecycle: management, builds, +conda-store controls the environment lifecycle: management, builds, and serving of environments. It **manages** conda environments by: @@ -100,6 +100,7 @@ Contents user_guide administration contributing + api Indices and tables ================== diff --git a/docs/installation.md b/docs/installation.md index eaa32c15d..7d5bfb684 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,8 +1,15 @@ # Installation +## Linux + +```shell +conda install conda-store-server>=0.4.10 +conda-store-server --standalone --config tests/assets/conda_store_standalone_config.py +``` + ## Kubernetes -![Conda Store Kubernetes architecture diagram](_static/images/conda-store-installation-kubernetes.png) +![conda-store Kubernetes architecture diagram](_static/images/conda-store-installation-kubernetes.png) The following will describe a local [Kubernetes](https://kubernetes.io/) installation via [minikube](https://minikube.sigs.k8s.io/docs/). The @@ -12,7 +19,7 @@ files required are in `examples/kubernetes` minikube start --cpus 2 --memory 4096 --driver=docker ``` -Now we deploy the `conda-store` components. Note that Conda-Store is +Now we deploy the `conda-store` components. Note that conda-store is compatible with any general s3 like provider and any general database via SQLAlchemy. Currently the docker image is build with support for PostgreSQL and SQLite. Consult the [SQLAlchemy @@ -24,7 +31,7 @@ PostgreSQL deployments and use existing infrastructure. In the case of AWS this may mean using [Amazon RDS](https://aws.amazon.com/rds/) and [s3](https://aws.amazon.com/s3/). Consult your cloud provider for compatible services. In general if it is supported by SQLAlchemy and -there is a s3 compatible object store Conda-Store will +there is a s3 compatible object store conda-store will work. [kustomize](https://github.com/kubernetes-sigs/kustomize) is being used for the deployment which is part to the Kubernetes project itself. @@ -37,7 +44,7 @@ Make sure to change all the usernames and passwords for the deployment. If your installation worked you should be able to port forward the -Conda-Store web server. +conda-store web server. ```shell kubectl port-forward service/conda-store-server 5000:5000 @@ -50,7 +57,7 @@ guide](./administration.md) A good test that conda-store is functioning properly is to apply the `jupyterlab-conda-store` pod as a quick test. It will cause -Conda-Store to build an environment with JupyterLab and NumPy. This +conda-store to build an environment with JupyterLab and NumPy. This pod is not needed for running conda-store. ```shell @@ -78,7 +85,7 @@ Then visit via your web browser [https://conda-store.localhost/conda-store](http ## Local Automated systemd Install -Not all environment are containerized and Conda-Store recognizes +Not all environment are containerized and conda-store recognizes that. The goal of CONDA-STORE is to provide Conda environments in as many ways as possible so it SHOULD support non-contianerized environments. The example files required are in diff --git a/docs/user_guide.md b/docs/user_guide.md index d4e6bcd42..d89589893 100644 --- a/docs/user_guide.md +++ b/docs/user_guide.md @@ -73,7 +73,7 @@ conda-unpack ### Docker Registry -Conda-Store acts as a docker registry which allows for interesting +conda-store acts as a docker registry which allows for interesting ways to handle Conda environment. In addition this registry leverages [conda-docker](https://github.com/conda-incubator/conda-docker) which builds docker images without docker allowing for advanced caching, @@ -83,7 +83,15 @@ beginning of the url for example `localhost:5000/`. This is required to tell docker where the docker registry is located. Otherwise by default it will try and user docker hub. Your url will likely be different. +The `conda-store` docker registry requires authentication via any +username with password set to a token that is generated by visiting +the user page to generate a token. Alternatively in the +`conda_store_config.py` you can set +`c.AuthenticationBackend.predefined_tokens` which have environment +read permissions on the given docker images needed for pulling. + ``` +docker login -u token -p docker pull docker run -it python ``` @@ -108,9 +116,9 @@ docker run -it localhost:5000//: #### On Demand Docker Image -Conda-store has an additional feature which allow for specifying the +conda-store has an additional feature which allow for specifying the packages within the docker image name itself without requiring an -actual environment to be created on the Conda-Store UI side. +actual environment to be created on the conda-store UI side. The following convention is used `:/conda-store-dynamic/`. After @@ -120,15 +128,15 @@ for example `<=1.10` as `.lt.1.10`. As full example support we want python less than `3.8` and NumPy greater than `1.0`. This would be the following docker image -name. `:/conda-store-dynamic/python.lt.3.8/numpy.gt.1.0`. Conda-store +name. `:/conda-store-dynamic/python.lt.3.8/numpy.gt.1.0`. conda-store will then create the following environment and the docker image will download upon the docker image being built. -## Conda Store UI +## conda-store UI ### `/` Home Page -![Conda Store Homepage](_static/images/conda-store-authenticated.png) +![conda-store Homepage](_static/images/conda-store-authenticated.png) The home page shows all of the available environments in the form `/`. If you are authenticated there with @@ -147,7 +155,7 @@ lockfile](https://github.com/conda-incubator/conda-lock), ### `/login/` Login -![Conda Store Login](_static/images/conda-store-login-jupyterhub-oauth.png) +![conda-store Login](_static/images/conda-store-login-jupyterhub-oauth.png) If you are unauthenticated there is a `login` button on the top navigation bar. This will direct you to the login page. The example @@ -155,7 +163,7 @@ above shows what you will get with JupyterHub authentication. ### `/user/` User -![Conda Store User](_static/images/conda-store-user.png) +![conda-store User](_static/images/conda-store-user.png) Once a user has completed the authentication flow they will be directed to the user page. This page gives information about the @@ -163,7 +171,7 @@ current authenticated user along with the permissions. ### `/create/` Create Environment -![Conda Store Create Environment](_static/images/conda-store-create-environment.png) +![conda-store Create Environment](_static/images/conda-store-create-environment.png) A user authenticated or unauthenticated has set permissions that allow the user to create environments in a given namespace. Currently the @@ -174,7 +182,7 @@ issues with the format of the environment file. ### `/environment///` Environments -![Conda Store Environment](_static/images/conda-store-environment.png) +![conda-store Environment](_static/images/conda-store-environment.png) The environment page contains a lot of information for the developer. First we see the environment name and namespace along with @@ -208,12 +216,12 @@ For each build several options are available to the user: ### `/build/` builds -![Conda Store Build](_static/images/conda-store-build-complete.png) +![conda-store Build](_static/images/conda-store-build-complete.png) The build page gives all the information about a given build in -Conda-Store. At the top we see high level build metadata. +conda-store. At the top we see high level build metadata. -Conda-Store downloads Conda channel data so that it fully understands +conda-store downloads Conda channel data so that it fully understands the packages that exist within a given environment. A list is provided to the user of all packages within that environment. @@ -225,9 +233,96 @@ succeeded or failed. ### `/namespace/` manage namespaces -![Conda Store Namespace](_static/images/conda-store-namespace.png) +![conda-store Namespace](_static/images/conda-store-namespace.png) This namespace page allows a user with correct permissions to list, create, and delete namespaces. Note that the deletion of a namespace is destructive and deletes all environments and builds within that namespace. + +## conda-store cli + +The conda-store client can be easily installed via pip and conda. + +```shell +pip install conda-store +conda install -c conda-forge conda-store +``` + +The base cli is inspired by tools such as +[conda](https://docs.conda.io/en/latest/), +[kubectl](https://kubernetes.io/docs/reference/kubectl/), and +[docker](https://docs.docker.com/get-docker/). The base commands are +`download`, `info`, `list`, `run`, `wait`. + +```shell +$ conda-store --help +Usage: conda-store [OPTIONS] COMMAND [ARGS]... + +Options: + --conda-store-url TEXT conda-store base url including prefix + --auth [none|token|basic] conda-store authentication to use + --no-verify-ssl Disable tls verification on API requests + --help Show this message and exit. + +Commands: + download Download artifacts for given build + info Get current permissions and default namespace + list + run Execute given environment specified as a URI with COMMAND + solve Remotely solve given environment.yaml + wait Wait for given URI to complete or fail building +``` + +### `conda-store run` + +One of the motivating features of the `conda-store` cli is that you +can directly execute conda-store environments that exist remotely. + +```shell +conda-store run devops/datascience -- python -m "print(1)" +``` + +### `conda-store solve` + +conda-store is capable to remote solves of environment files. If +requested conda-store can perform intelligent solves with caching. + +### `conda-store download` + +### `conda-store info` + +### `conda-store wait` + +### `conda-store list [namespace|environment|build]` + +## conda-store shebang + +`conda-store` can be used as a +[shebang](https://en.wikipedia.org/wiki/Shebang_(Unix)) within Linux +allowing users to embed Conda environments within scripts for +reproducibility. Basic usage is as follows. Notice that the +`conda-store run` command is just the normal usage of the command. + +```shell +#!/usr/bin/env conda-store +#! conda-store run /: -- python + +print('script running within the conda-store environnent') +``` + +The first line must begin with the shebang `#!` along with ending in +`conda-store`. You cannot put arguments on the first line due to +limits in the shebang specification. Additional lines are then added +starting with `#! conda-store run ...` with are then used as arguments +to `conda-store run`. + +The path to the script being run is always appended as the last +argument to the command so the example above is interpreted as: + +``` +conda-store run /: -- python +``` + +This feature was heavily inspired by [`nix-shell` +shebangs](https://nixos.wiki/wiki/Nix-shell_shebang). diff --git a/examples/docker-without-nfs/assets/conda_store_config.py b/examples/docker-without-nfs/assets/conda_store_config.py new file mode 100644 index 000000000..3df60732a --- /dev/null +++ b/examples/docker-without-nfs/assets/conda_store_config.py @@ -0,0 +1,80 @@ +import logging + +from conda_store_server.storage import S3Storage +from conda_store_server.server.auth import JupyterHubOAuthAuthentication + +# ================================== +# conda-store settings +# ================================== +c.CondaStore.storage_class = S3Storage +c.CondaStore.store_directory = "/opt/conda-store/conda-store" +# Also edit `conda-store-server/alembic.ini` accordingly for key sqlalchemy.url +c.CondaStore.database_url = "postgresql+psycopg2://postgres:password@postgres/conda-store" +c.CondaStore.upgrade_db = True +c.CondaStore.redis_url = "redis://:password@redis:6379/0" +c.CondaStore.default_uid = 1000 +c.CondaStore.default_gid = 100 +c.CondaStore.default_permissions = "775" +c.CondaStore.conda_allowed_channels = [ + "https://repo.anaconda.com/pkgs/main", + "main", + "conda-forge", +] +c.CondaStore.conda_included_packages = [ + "ipykernel" +] + +c.S3Storage.internal_endpoint = "minio:9000" +c.S3Storage.external_endpoint = "conda-store.localhost:9080" +c.S3Storage.access_key = "admin" +c.S3Storage.secret_key = "password" +c.S3Storage.region = "us-east-1" # minio region default +c.S3Storage.bucket_name = "conda-store" +c.S3Storage.internal_secure = False +c.S3Storage.external_secure = True + +# ================================== +# server settings +# ================================== +c.CondaStoreServer.log_level = logging.INFO +c.CondaStoreServer.enable_ui = True +c.CondaStoreServer.enable_api = True +c.CondaStoreServer.enable_registry = True +c.CondaStoreServer.enable_metrics = True +c.CondaStoreServer.address = "0.0.0.0" +c.CondaStoreServer.port = 5000 +# This MUST start with `/` +c.CondaStoreServer.url_prefix = "/conda-store" +c.CondaStoreServer.behind_proxy = True + + +# ================================== +# auth settings +# ================================== +c.CondaStoreServer.authentication_class = JupyterHubOAuthAuthentication +c.JupyterHubOAuthAuthentication.jupyterhub_url = "https://conda-store.localhost" +c.JupyterHubOAuthAuthentication.client_id = "service-this-is-a-jupyterhub-client" +c.JupyterHubOAuthAuthentication.client_secret = "this-is-a-jupyterhub-secret" +c.JupyterHubOAuthAuthentication.oauth_callback_url = "/conda-store/oauth_callback/" +c.JupyterHubOAuthAuthentication.tls_verify = False + +# ================================== +# worker settings +# ================================== +c.CondaStoreWorker.log_level = logging.INFO +c.CondaStoreWorker.watch_paths = ["/opt/environments/"] +c.CondaStoreWorker.concurrency = 4 + +# For local dev, make it so that anybody can access any endpoint +c.RBACAuthorizationBackend.unauthenticated_role_bindings = { + "*/*": {"admin"}, +} + +c.AuthenticationBackend.predefined_tokens = { + 'this-is-a-jupyterhub-secret-token': { + 'primary_namespace': "default", + 'role_bindings': { + '*/*': ['admin'], + }, + } +} diff --git a/examples/docker-without-nfs/assets/environments/jupyterlab.yaml b/examples/docker-without-nfs/assets/environments/jupyterlab.yaml new file mode 100644 index 000000000..031ef8462 --- /dev/null +++ b/examples/docker-without-nfs/assets/environments/jupyterlab.yaml @@ -0,0 +1,8 @@ +name: jupyterlab +channels: + - conda-forge +dependencies: + - jupyterhub + - jupyterlab + - nb_conda_store_kernels + - ipykernel diff --git a/examples/docker-without-nfs/assets/environments/kernel.yaml b/examples/docker-without-nfs/assets/environments/kernel.yaml new file mode 100644 index 000000000..7628d57ee --- /dev/null +++ b/examples/docker-without-nfs/assets/environments/kernel.yaml @@ -0,0 +1,5 @@ +name: kernel +channels: + - conda-forge +dependencies: + - ipykernel diff --git a/examples/docker-without-nfs/assets/jupyterhub_config.py b/examples/docker-without-nfs/assets/jupyterhub_config.py new file mode 100644 index 000000000..d3ae9373b --- /dev/null +++ b/examples/docker-without-nfs/assets/jupyterhub_config.py @@ -0,0 +1,84 @@ +import json +import os +import asyncio + +from jupyterhub.spawner import SimpleLocalProcessSpawner +from jupyterhub.utils import maybe_future +from jupyterhub.auth import DummyAuthenticator +from conda_store.api import CondaStoreAPI + + +c.JupyterHub.ip = "0.0.0.0" + +c.JupyterHub.authenticator_class = DummyAuthenticator +c.DummyAuthenticator.password = 'test' + + +class CondaStoreSpawner(SimpleLocalProcessSpawner): + async def options_form(self, spawner): + token = await self.generate_token(spawner.user.name) + environments = await self.list_environments(token) + options = [f'' for i,_ in enumerate(environments)] + + return f''' +conda-store environment must contain jupyterhub, jupyterlab, nb_conda_store_kernels, and jupyterlab + +Choose an environment: + +''' + + async def options_from_form(self, form_data): + return {key: value[0] for key, value in form_data.items()} + + async def start(self): + self.conda_store_token = await self.generate_token(self.user.name) + self.cmd = ['/opt/conda/envs/conda-store/bin/conda-store', 'run', self.user_options['build_id'], '--', 'jupyter-labhub'] + self.args = ['--JupyterApp.kernel_spec_manager_class', 'nb_conda_store_kernels.manager.CondaStoreKernelSpecManager'] + return await super().start() + + def user_env(self, env): + env = super().user_env(env) + env['CONDA_STORE_URL'] = "https://conda-store.localhost/conda-store" + env['CONDA_STORE_AUTH'] = "token" + env['CONDA_STORE_NO_VERIFY'] = "true" + env['CONDA_STORE_TOKEN'] = self.conda_store_token + return env + + async def generate_token(self, username): + async with CondaStoreAPI( + conda_store_url=os.environ['CONDA_STORE_URL'], + auth_type=os.environ['CONDA_STORE_AUTH'], + verify_ssl='CONDA_STORE_NO_VERIFY' not in os.environ) as conda_store: + return await conda_store.create_token( + primary_namespace=username, + role_bindings={ + 'default/*': ['viewer'], + f'{username}/*': ['admin'], + } + ) + + async def list_environments(self, token: str): + async with CondaStoreAPI( + conda_store_url=os.environ['CONDA_STORE_URL'], + auth_type=os.environ['CONDA_STORE_AUTH'], + verify_ssl='CONDA_STORE_NO_VERIFY' not in os.environ) as conda_store: + return await conda_store.list_environments( + status='COMPLETED', + artifact='CONDA_PACK', + packages=['jupyterhub', 'jupyterlab', 'ipykernel', 'nb_conda_store_kernels']) + +c.JupyterHub.spawner_class = CondaStoreSpawner + +c.JupyterHub.services = [ + { + 'name': "conda-store", + 'oauth_client_id': "service-this-is-a-jupyterhub-client", + 'admin': True, + 'url': 'https://conda-store.localhost/conda-store/', + 'api_token': "this-is-a-jupyterhub-secret", + 'oauth_redirect_uri': '/conda-store/oauth_callback/', + 'oauth_no_confirm': True, # allows no authorize yes/no button + } +] diff --git a/examples/docker-without-nfs/docker-compose.yaml b/examples/docker-without-nfs/docker-compose.yaml new file mode 100644 index 000000000..e370476d4 --- /dev/null +++ b/examples/docker-without-nfs/docker-compose.yaml @@ -0,0 +1,157 @@ +version: "3.8" + +volumes: + conda_store_data: + +services: + traefik: + image: "traefik:v2.6" + container_name: "traefik" + command: + - "--api.insecure=true" + - "--providers.docker=true" + - "--providers.docker.exposedbydefault=false" + - "--entrypoints.web.address=:80" + - "--entrypoints.websecure.address=:443" + - "--entrypoints.minio.address=:9080" + - "--entrypoints.websecure.http.tls=true" + - "--entrypoints.minio.http.tls=true" + - "--entrypoints.web.http.redirections.entryPoint.to=websecure" + - "--entrypoints.web.http.redirections.entryPoint.scheme=https" + - "--entrypoints.web.http.redirections.entrypoint.permanent=true" + ports: + - "80:80" + - "443:443" + - "8080:8080" + - "9080:9080" + volumes: + - "/var/run/docker.sock:/var/run/docker.sock:ro" + + conda-store-worker: + build: ../../conda-store-server + user: 1000:1000 + volumes: + - conda_store_data:/opt/conda-store/ + - ./assets/conda_store_config.py:/etc/conda-store/conda_store_config.py:ro + - ./assets/environments:/opt/environments:ro + depends_on: + conda-store-server: + condition: service_healthy + links: + - "traefik:conda-store.localhost" + platform: linux/amd64 + command: ['conda-store-worker', '--config', '/etc/conda-store/conda_store_config.py'] + + conda-store-server: + build: ../../conda-store-server + user: 1000:1000 + labels: + - "traefik.enable=true" + - "traefik.http.routers.conda-store.rule=Host(`conda-store.localhost`) && PathPrefix(`/conda-store`)" + - "traefik.http.routers.conda-store.entrypoints=websecure" + - "traefik.port=5000" + depends_on: + postgres: + condition: service_healthy + minio: + condition: service_healthy + links: + - "traefik:conda-store.localhost" + volumes: + - ./assets/conda_store_config.py:/etc/conda-store/conda_store_config.py:ro + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5000/conda-store/api/v1/"] + interval: 10s + timeout: 5s + retries: 5 + platform: linux/amd64 + command: ['conda-store-server', '--config', '/etc/conda-store/conda_store_config.py'] + ports: + - "5000:5000" + + jupyterhub: + build: ../../conda-store + labels: + - "traefik.enable=true" + - "traefik.http.routers.jupyterhub.rule=Host(`conda-store.localhost`) && (Path(`/`) || PathPrefix(`/hub`) || PathPrefix(`/user`))" + - "traefik.http.routers.jupyterhub.entrypoints=websecure" + - "traefik.port=8000" + user: 1000:1000 + environment: + CONDA_STORE_URL: https://conda-store.localhost/conda-store + CONDA_STORE_AUTH: token + CONDA_STORE_NO_VERIFY: "true" + CONDA_STORE_TOKEN: this-is-a-jupyterhub-secret-token + TMPDIR: /tmp + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:8000/hub/api/"] + interval: 10s + timeout: 5s + retries: 5 + links: + - "traefik:conda-store.localhost" + volumes: + - conda_store_data:/opt/conda-store + - ./assets/jupyterhub_config.py:/opt/jupyterhub/jupyterhub_config.py:ro + platform: linux/amd64 + command: ['/opt/conda/envs/conda-store/bin/jupyterhub', '--config', '/opt/jupyterhub/jupyterhub_config.py', '--debug'] + ports: + - "8000:8000" + + minio: + image: minio/minio:RELEASE.2020-11-10T21-02-24Z + labels: + - "traefik.enable=true" + - "traefik.http.routers.minio.rule=Host(`conda-store.localhost`)" + - "traefik.http.routers.minio.entrypoints=minio" + - "traefik.port=9000" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] + interval: 10s + timeout: 5s + retries: 5 + entrypoint: sh + command: -c 'mkdir -p /data/conda-store && /usr/bin/minio server /data' + environment: + MINIO_ACCESS_KEY: admin + MINIO_SECRET_KEY: password + + postgres: + image: postgres:13 + user: postgres + ports: + - 5432:5432 + healthcheck: + test: ["CMD-SHELL", "pg_isready"] + interval: 10s + timeout: 5s + retries: 5 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: password + POSTGRES_DB: conda-store + + mysql: + image: mysql:8.0 + ports: + - 3306:3306 + healthcheck: + test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"] + interval: 10s + timeout: 5s + retries: 5 + environment: + MYSQL_ROOT_PASSWORD: hbgY8bkoWPMM7EjtcD6wrlVX + MYSQL_USER: admin + MYSQL_PASSWORD: password + MYSQL_DATABASE: conda-store + + redis: + image: bitnami/redis + healthcheck: + test: ["CMD", "redis-cli","ping"] + interval: 10s + timeout: 5s + retries: 5 + environment: + REDIS_PASSWORD: password diff --git a/examples/docker/assets/conda_store_config.py b/examples/docker/assets/conda_store_config.py index 8ef26ba16..565f27876 100644 --- a/examples/docker/assets/conda_store_config.py +++ b/examples/docker/assets/conda_store_config.py @@ -8,10 +8,12 @@ # ================================== c.CondaStore.storage_class = S3Storage c.CondaStore.store_directory = "/opt/conda-store/conda-store" +# Also edit `conda-store-server/alembic.ini` accordingly for key sqlalchemy.url c.CondaStore.database_url = "postgresql+psycopg2://postgres:password@postgres/conda-store" +c.CondaStore.upgrade_db = True c.CondaStore.redis_url = "redis://:password@redis:6379/0" c.CondaStore.default_uid = 1000 -c.CondaStore.default_gid = 100 +c.CondaStore.default_gid = 1000 c.CondaStore.default_permissions = "775" c.CondaStore.conda_included_packages = [ "ipykernel" @@ -48,6 +50,7 @@ c.JupyterHubOAuthAuthentication.jupyterhub_url = "https://conda-store.localhost" c.JupyterHubOAuthAuthentication.client_id = "service-this-is-a-jupyterhub-client" c.JupyterHubOAuthAuthentication.client_secret = "this-is-a-jupyterhub-secret" +c.JupyterHubOAuthAuthentication.oauth_callback_url = "/conda-store/oauth_callback/" c.JupyterHubOAuthAuthentication.tls_verify = False # ================================== diff --git a/examples/docker/assets/jupyter_notebook_config.py b/examples/docker/assets/jupyter_notebook_config.py index 080654f89..332174aa2 100644 --- a/examples/docker/assets/jupyter_notebook_config.py +++ b/examples/docker/assets/jupyter_notebook_config.py @@ -1,6 +1,6 @@ c.LauncherShortcuts.shortcuts = { 'conda-store': { - 'title': 'Conda Store', + 'title': 'conda-store', 'target': 'http://conda-store.localhost/conda-store/', } } diff --git a/examples/docker/assets/jupyterhub_config.py b/examples/docker/assets/jupyterhub_config.py index 5a61877ca..4845c2ad0 100644 --- a/examples/docker/assets/jupyterhub_config.py +++ b/examples/docker/assets/jupyterhub_config.py @@ -41,7 +41,9 @@ def preexec(): 'name': "conda-store", 'oauth_client_id': "service-this-is-a-jupyterhub-client", 'admin': True, + 'url': 'https://conda-store.localhost/conda-store/', 'api_token': "this-is-a-jupyterhub-secret", - 'oauth_redirect_uri': 'https://conda-store.localhost/conda-store/oauth_callback/', + 'oauth_redirect_uri': '/conda-store/oauth_callback/', + 'oauth_no_confirm': True, # allows no authorize yes/no button } ] diff --git a/examples/docker/docker-compose.yaml b/examples/docker/docker-compose.yaml index 4e120acb9..afab8592c 100644 --- a/examples/docker/docker-compose.yaml +++ b/examples/docker/docker-compose.yaml @@ -29,14 +29,24 @@ services: volumes: - "/var/run/docker.sock:/var/run/docker.sock:ro" + initializer: + image: alpine + restart: "no" + # hack to set perimssions on volume + entrypoint: | + /bin/sh -c "chown -R 1000:1000 /opt/conda-store" + volumes: + - conda_store_data:/opt/conda-store/ + conda-store-worker: build: ../../conda-store-server + user: 1000:1000 volumes: - conda_store_data:/opt/conda-store/ - ./assets/conda_store_config.py:/etc/conda-store/conda_store_config.py:ro depends_on: - redis: - condition: service_healthy + initializer: + condition: service_completed_successfully conda-store-server: condition: service_healthy links: @@ -46,9 +56,10 @@ services: conda-store-server: build: ../../conda-store-server + user: 1000:1000 labels: - "traefik.enable=true" - - "traefik.http.routers.conda-store.rule=Host(`conda-store.localhost`) && PathPrefix(`/conda-store`)" + - "traefik.http.routers.conda-store.rule=Host(`conda-store.localhost`) && (PathPrefix(`/conda-store`) || PathPrefix(`/v2`))" - "traefik.http.routers.conda-store.entrypoints=websecure" - "traefik.port=5000" depends_on: @@ -88,7 +99,7 @@ services: - "traefik.http.routers.jupyterhub.rule=Host(`conda-store.localhost`) && (Path(`/`) || PathPrefix(`/hub`) || PathPrefix(`/user`))" - "traefik.http.routers.jupyterhub.entrypoints=websecure" - "traefik.port=8000" - user: "1000:1000" + user: 1000:1000 healthcheck: test: ["CMD", "curl", "--fail", "http://localhost:8000/hub/api/"] interval: 10s @@ -101,7 +112,7 @@ services: - ./assets/jupyter_notebook_config.py:/etc/jupyter/jupyter_notebook_config.py:ro - ./assets/jupyterhub_config.py:/opt/jupyterhub/jupyterhub_config.py:ro platform: linux/amd64 - command: ['/opt/conda/envs/conda-store/bin/jupyterhub', '--config', '/opt/jupyterhub/jupyterhub_config.py'] + command: ['/opt/conda/envs/conda-store/bin/jupyterhub', '--config', '/opt/jupyterhub/jupyterhub_config.py', '--debug'] ports: - "8000:8000" @@ -134,6 +145,7 @@ services: timeout: 5s retries: 5 environment: + POSTGRES_USER: postgres POSTGRES_PASSWORD: password POSTGRES_DB: conda-store diff --git a/examples/kubernetes/conda-store-server.yaml b/examples/kubernetes/conda-store-server.yaml index 9ef31ac50..f0dabd688 100644 --- a/examples/kubernetes/conda-store-server.yaml +++ b/examples/kubernetes/conda-store-server.yaml @@ -28,7 +28,7 @@ spec: spec: containers: - name: conda-store-server - image: quansight/conda-store-server:v0.4.2 + image: quansight/conda-store-server:v0.4.5 args: - "conda-store-server" - "--config" diff --git a/examples/kubernetes/conda-store-worker.yaml b/examples/kubernetes/conda-store-worker.yaml index 72dc856d5..36ee6beaf 100644 --- a/examples/kubernetes/conda-store-worker.yaml +++ b/examples/kubernetes/conda-store-worker.yaml @@ -27,7 +27,7 @@ spec: spec: containers: - name: conda-store-server - image: quansight/conda-store-server:v0.4.2 + image: quansight/conda-store-server:v0.4.5 args: - "conda-store-worker" - "--config" diff --git a/examples/standalone/README.md b/examples/standalone/README.md new file mode 100644 index 000000000..0fb8ea198 --- /dev/null +++ b/examples/standalone/README.md @@ -0,0 +1,4 @@ +# Standalone + +A script for minimal configuration to run conda-store. The end goal is +that everything should run under a single container image. diff --git a/examples/standalone/docker-compose.yaml b/examples/standalone/docker-compose.yaml new file mode 100644 index 000000000..bf0a5cbc8 --- /dev/null +++ b/examples/standalone/docker-compose.yaml @@ -0,0 +1,32 @@ +version: "3.8" + +volumes: + conda_store_data: + +services: + initializer: + image: alpine + restart: "no" + # hack to set perimssions on volume + entrypoint: | + /bin/sh -c "chown -R 1000:1000 /var/lib/conda-store/" + volumes: + - conda_store_data:/var/lib/conda-store/ + + conda-store-server: + build: ../../conda-store-server + user: 1000:1000 + depends_on: + initializer: + condition: service_completed_successfully + volumes: + - conda_store_data:/var/lib/conda-store/ + healthcheck: + test: ["CMD", "curl", "--fail", "http://localhost:5000/api/v1/"] + interval: 10s + timeout: 5s + retries: 5 + platform: linux/amd64 + command: ['conda-store-server', '--standalone'] + ports: + - "5000:5000" diff --git a/examples/ubuntu2004/tasks/conda-store.yaml b/examples/ubuntu2004/tasks/conda-store.yaml index d96cefe1a..4ac71c0a4 100644 --- a/examples/ubuntu2004/tasks/conda-store.yaml +++ b/examples/ubuntu2004/tasks/conda-store.yaml @@ -30,7 +30,7 @@ copy: content: | [Unit] - Description=Conda-Store Server + Description=conda-store Server Wants=network-online.target After=network-online.target AssertFileIsExecutable=/opt/conda/envs/conda-store/bin/conda-store-server @@ -55,7 +55,7 @@ copy: content: | [Unit] - Description=Conda-Store Worker + Description=conda-store Worker Wants=network-online.target After=network-online.target AssertFileIsExecutable=/opt/conda/envs/conda-store/bin/conda-store-worker @@ -76,7 +76,7 @@ mode: 0644 register: _conda_store_worker_service - - name: Ensure Conda-Store Server is enabled on boot + - name: Ensure conda-store Server is enabled on boot become: true systemd: daemon_reload: true @@ -85,7 +85,7 @@ state: restarted when: _conda_store_server_service.changed or _conda_store_config.changed - - name: Ensure Conda-Store Worker is enabled on boot + - name: Ensure conda-store Worker is enabled on boot become: true systemd: daemon_reload: true diff --git a/flake.nix b/flake.nix index 1ca5ae84c..37f7977e6 100644 --- a/flake.nix +++ b/flake.nix @@ -1,5 +1,5 @@ { - description = "Conda-Store"; + description = "conda-store"; inputs = { nixpkgs = { url = "github:nixos/nixpkgs/nixpkgs-unstable"; }; @@ -20,6 +20,7 @@ pkgs.k9s pkgs.docker-compose + # conda-store-server pythonPackages.yarl pythonPackages.requests pythonPackages.pydantic @@ -31,11 +32,30 @@ pythonPackages.pyjwt pythonPackages.minio pythonPackages.filelock + pythonPackages.sqlalchemy + pythonPackages.psycopg2 + # conda-store + pythonPackages.rich + pythonPackages.click + pythonPackages.aiohttp + pythonPackages.ruamel-yaml + + # dev pythonPackages.pytest pythonPackages.black pythonPackages.flake8 + pythonPackages.build + pythonPackages.setuptools + pythonPackages.alembic ]; + + shellHook = '' + export CONDA_STORE_URL=http://localhost:5000/conda-store + export CONDA_STORE_AUTH=basic + export CONDA_STORE_USERNAME=username + export CONDA_STORE_PASSWORD=password + ''; }; }; } diff --git a/resources/helm/chartpress.yaml b/resources/helm/chartpress.yaml new file mode 100644 index 000000000..6aa2a3ce4 --- /dev/null +++ b/resources/helm/chartpress.yaml @@ -0,0 +1,27 @@ +# This is configuration for chartpress, a CLI for Helm chart management. +# +# chartpress is used to test, package, and publish the conda-store Helm chart +# +# chartpress is used to: +# - Build images for multiple CPU architectures +# - Update Chart.yaml (version) and values.yaml (image tags) +# - Package and publish Helm charts to a GitHub based Helm chart repository +# +# Configuration reference: +# https://github.com/jupyterhub/chartpress#configuration +# +charts: + - name: conda-store + imagePrefix: quansight/ + repo: + git: quansight/conda-store-helm-chart # Not yet published + published: https://quansight.github.io/conda-store-helm-chart # Not yet published + images: + conda-store: + imageName: quansight/conda-store + contextPath: ../../conda-store + valuesPath: {} + conda-store-server: + imageName: quansight/conda-store-server + contextPath: ../../conda-store-server + valuesPath: {} diff --git a/resources/helm/conda-store/Chart.yaml b/resources/helm/conda-store/Chart.yaml new file mode 100644 index 000000000..334b9f89a --- /dev/null +++ b/resources/helm/conda-store/Chart.yaml @@ -0,0 +1,11 @@ +# Chart.yaml v2 reference: https://helm.sh/docs/topics/charts/#the-chartyaml-file +apiVersion: v2 +name: conda-store +version: 0.4.6-n005.h04d9611 +appVersion: 0.4.7 +description: Serve identical Conda environments in as many ways as possible +home: https://conda-store.readthedocs.io/ +sources: + - https://github.com/Quansight/conda-store +icon: https://github.com/Quansight.png +kubeVersion: ">=1.20.0-0" diff --git a/resources/helm/conda-store/values.yaml b/resources/helm/conda-store/values.yaml new file mode 100644 index 000000000..1b3812698 --- /dev/null +++ b/resources/helm/conda-store/values.yaml @@ -0,0 +1,232 @@ +gateway: + # Number of instances of the conda-store-server to run + replicas: 1 + + # Annotations to apply to the conda-store-server pods + annotations: {} + + # Resource requests/limits for the conda-store-server pod + resources: {} + + # Path prefix to serve conda-store-server api requests under + prefix: / + + # The conda-store-server log level + loglevel: INFO + + # The image to use for the conda-store-server pod + image: + name: quansight/conda-store-server + tag: "set-by-chartpress" + pullPolicy: IfNotPresent + + imagePullSecrets: [] + + # Configuration for the conda-store-server + service: + annotations: {} + + auth: + # The auth type to use. One of {simple, kerberos, jupyterhub, custom}. + type: simple + + simple: + # A shared password to use for all users. + password: + + kerberos: + # Path to the HTTP keytab for this node. + keytab: + + jupyterhub: + apiToken: + apiUrl: + + custom: + # The full authenticator class name. + class: + + # Configuration fields to set on the authenticator class. + config: {} + + livenessProbe: + # Enables the livenessProbe. + enabled: true + # Configures the livenessProbe. + initialDelaySeconds: 5 + timeoutSeconds: 2 + periodSeconds: 10 + failureThreshold: 6 + readinessProbe: + # Enables the readinessProbe. + enabled: true + # Configures the readinessProbe. + initialDelaySeconds: 5 + timeoutSeconds: 2 + periodSeconds: 10 + failureThreshold: 3 + + # nodeSelector, affinity, and tolerations the for the `api` pod conda-store-server + nodeSelector: {} + affinity: {} + tolerations: [] + + extraConfig: {} + + backend: + image: + # The image to use for both schedulers and workers + name: quansight/conda-store + tag: "set-by-chartpress" + pullPolicy: IfNotPresent + + namespace: + + # A mapping of environment variables to set for both schedulers and workers. + environment: {} + + scheduler: + extraPodConfig: {} + + extraContainerConfig: {} + + # Cores request/limit for the scheduler. + cores: + request: + limit: + + # Memory request/limit for the scheduler. + memory: + request: + limit: + + worker: + extraPodConfig: {} + + extraContainerConfig: {} + + # Cores request/limit for each worker. + cores: + request: + limit: + + # Memory request/limit for each worker. + memory: + request: + limit: + + threads: + +controller: + enabled: true + + # Any annotations to add to the controller pod + annotations: {} + + # Resource requests/limits for the controller pod + resources: {} + + # Image pull secrets for controller pod + imagePullSecrets: [] + + # The controller log level + loglevel: INFO + + # Max time (in seconds) to keep around records of completed clusters. + # Default is 24 hours. + completedClusterMaxAge: 86400 + + # Time (in seconds) between cleanup tasks removing records of completed + # clusters. Default is 5 minutes. + completedClusterCleanupPeriod: 600 + + # Base delay (in seconds) for backoff when retrying after failures. + backoffBaseDelay: 0.1 + + # Max delay (in seconds) for backoff when retrying after failures. + backoffMaxDelay: 300 + + # Limit on the average number of k8s api calls per second. + k8sApiRateLimit: 50 + + # Limit on the maximum number of k8s api calls per second. + k8sApiRateLimitBurst: 100 + + # The image to use for the controller pod. + image: + name: quansight/conda-store-server + tag: "set-by-chartpress" + pullPolicy: IfNotPresent + + # Settings for nodeSelector, affinity, and tolerations for the controller pods + nodeSelector: {} + affinity: {} + tolerations: [] + +# traefik nested config relates to the traefik Pod and Traefik running within it +# that is acting as a proxy for traffic towards the gateway +traefik: + # Number of instances of the proxy to run + replicas: 1 + + # Any annotations to add to the proxy pods + annotations: {} + + # Resource requests/limits for the proxy pods + resources: {} + + # The image to use for the proxy pod + image: + name: traefik + tag: "2.6.3" + pullPolicy: IfNotPresent + imagePullSecrets: [] + + # Any additional arguments to forward to traefik + additionalArguments: [] + + # The proxy log level + loglevel: WARN + + # Whether to expose the dashboard on port 9000 (enable for debugging only!) + dashboard: false + + # Additional configuration for the traefik service + service: + type: LoadBalancer + annotations: {} + spec: {} + ports: + web: + port: 80 + nodePort: + tcp: + port: web + nodePort: + + nodeSelector: {} + affinity: {} + tolerations: [] + +# rbac nested configuration relates to the choice of creating or replacing +# resources like (Cluster)Role, (Cluster)RoleBinding, and ServiceAccount. +rbac: + enabled: true + + # Existing names to use if ClusterRoles, ClusterRoleBindings, and + # ServiceAccounts have already been created by other means (leave set to + # `null` to create all required roles at install time) + controller: + serviceAccountName: + + gateway: + serviceAccountName: + + traefik: + serviceAccountName: + +# global nested configuration is accessible by all Helm charts that may depend +# on each other, but not used by this Helm chart. An entry is created here to +# validate its use and catch YAML typos via this configurations associated JSON +# schema. +global: {} diff --git a/tests/assets/conda_store_config.py b/tests/assets/conda_store_config.py index 574bfcd4d..5a45cd3f5 100644 --- a/tests/assets/conda_store_config.py +++ b/tests/assets/conda_store_config.py @@ -13,7 +13,7 @@ c.CondaStore.database_url = "postgresql+psycopg2://postgres:password@postgres/conda-store" c.CondaStore.redis_url = "redis://:password@redis:6379/0" c.CondaStore.default_uid = 1000 -c.CondaStore.default_gid = 100 +c.CondaStore.default_gid = 1000 c.CondaStore.default_permissions = "775" c.CondaStore.conda_included_packages = [ 'ipykernel' @@ -51,6 +51,10 @@ # auth settings # ================================== c.CondaStoreServer.authentication_class = DummyAuthentication +c.CondaStoreServer.template_vars = { + "banner": '', + "logo": "https://quansight.com/_next/image?url=https%3A%2F%2Fa.storyblok.com%2Ff%2F147759%2F1076x520%2Fe6cd6af012%2Fquansight-logo-no-tagline.png&w=3840&q=75", +} # ================================== # worker settings @@ -58,3 +62,19 @@ c.CondaStoreWorker.log_level = logging.INFO c.CondaStoreWorker.watch_paths = ["/opt/environments"] c.CondaStoreWorker.concurrency = 4 + +# ================================== +# registry settings +# ================================== +# from python_docker.registry import Registry +# import os + +# def _configure_docker_registry(registry_url: str): +# return Registry( +# "https://registry-1.docker.io", +# username=os.environ.get('DOCKER_USERNAME'), +# password=os.environ.get('DOCKER_PASSWORD')) + +# c.ContainerRegistry.container_registries = { +# 'https://registry-1.docker.io': _configure_docker_registry +# } diff --git a/tests/e2e/cypress/integration/main.js b/tests/e2e/cypress/integration/main.js index cc6e34f44..c32d9a68d 100644 --- a/tests/e2e/cypress/integration/main.js +++ b/tests/e2e/cypress/integration/main.js @@ -33,13 +33,10 @@ describe('First Test', () => { .type('password') cy.get('form').submit() - cy.url().should('include', 'user') - - // visit home page again - cy.get('a.navbar-brand').click() + // should redirect to home page // visit environment - cy.get('h5.card-title > a').contains('filesystem/python-flask-env').click() + cy.get('h5.card-title > a').contains('filesystem / python-flask-env').click() cy.url().should('include', 'environment') // visit build @@ -48,5 +45,8 @@ describe('First Test', () => { // wait for build to complete reloadPageUntilCompleted() + + // visit user page + cy.visit('/conda-store/user/'); }) }) diff --git a/tests/e2e/package-lock.json b/tests/e2e/package-lock.json index 0252ae09a..eaa5b1c5a 100644 --- a/tests/e2e/package-lock.json +++ b/tests/e2e/package-lock.json @@ -1633,9 +1633,9 @@ } }, "node_modules/moment": { - "version": "2.29.2", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", - "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==", + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", "dev": true, "engines": { "node": "*" @@ -3593,9 +3593,9 @@ } }, "moment": { - "version": "2.29.2", - "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", - "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==", + "version": "2.29.4", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.4.tgz", + "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", "dev": true }, "ms": { diff --git a/tests/links-check/markdown.links.config.json b/tests/links-check/markdown.links.config.json deleted file mode 100644 index 6e1c2b393..000000000 --- a/tests/links-check/markdown.links.config.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "ignorePatterns": [ - { - "pattern": "localhost:\d+", - } - ] -} diff --git a/tests/scripts/shebang.py b/tests/scripts/shebang.py new file mode 100755 index 000000000..8fbad7669 --- /dev/null +++ b/tests/scripts/shebang.py @@ -0,0 +1,9 @@ +#!/usr/bin/env conda-store +#! conda-store run filesystem/python-flask-env:1 -- python + +# filesystem/python-flask-env:1 is populated from tests/assets/environments/python-flask-env.yaml +import sys + +assert sys.version_info[:2] == (3, 9) +import flask +print('shebang script ran') diff --git a/tests/test_api.py b/tests/test_api.py index 42405634a..9c096ab19 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -302,7 +302,8 @@ def test_api_list_conda_channels_unauth(testclient): api_channels = set(_.name for _ in r.data) assert api_channels == { 'https://conda.anaconda.org/main', - 'https://conda.anaconda.org/conda-forge' + 'https://repo.anaconda.com/pkgs/main', + 'https://conda.anaconda.org/conda-forge', } diff --git a/tests/vale/styles/Google/AMPM.yml b/tests/vale/styles/Google/AMPM.yml deleted file mode 100644 index fbdc6e4f8..000000000 --- a/tests/vale/styles/Google/AMPM.yml +++ /dev/null @@ -1,9 +0,0 @@ -extends: existence -message: "Use 'AM' or 'PM' (preceded by a space)." -link: 'https://developers.google.com/style/word-list' -level: error -nonword: true -tokens: - - '\d{1,2}[AP]M' - - '\d{1,2} ?[ap]m' - - '\d{1,2} ?[aApP]\.[mM]\.' diff --git a/tests/vale/styles/Google/Acronyms.yml b/tests/vale/styles/Google/Acronyms.yml deleted file mode 100644 index 18bbbf7d1..000000000 --- a/tests/vale/styles/Google/Acronyms.yml +++ /dev/null @@ -1,67 +0,0 @@ -extends: conditional -message: "Spell out '%s', if it's unfamiliar to the audience." -link: 'https://developers.google.com/style/abbreviations' -level: suggestion -ignorecase: false -# Ensures that the existence of 'first' implies the existence of 'second'. -first: '\b([A-Z]{3,5})\b' -second: '(?:\b[A-Z][a-z]+ )+\(([A-Z]{3,5})\)' -# ... with the exception of these: -exceptions: - - API - - AWS - - ASP - - CLI - - CNAME - - CPU - - CSS - - CSV - - DEBUG - - DOM - - DPI - - FAQ - - GCC - - GDB - - GET - - GKE - - GPU - - GTK - - GUI - - HTML - - HTTP - - HTTPS - - IDE - - JAR - - JSON - - JSX - - LESS - - LLDB - - NET - - NOTE - - NVDA - - OSS - - PATH - - PDF - - PHP - - POST - - RAM - - REPL - - RSA - - SCM - - SCSS - - SDK - - SQL - - SSH - - SSL - - SVG - - TBD - - TCP - - TODO - - URI - - URL - - USB - - UTF - - XML - - XSS - - YAML - - ZIP \ No newline at end of file diff --git a/tests/vale/styles/Google/Colons.yml b/tests/vale/styles/Google/Colons.yml deleted file mode 100644 index 99363fbd4..000000000 --- a/tests/vale/styles/Google/Colons.yml +++ /dev/null @@ -1,8 +0,0 @@ -extends: existence -message: "'%s' should be in lowercase." -link: 'https://developers.google.com/style/colons' -nonword: true -level: warning -scope: sentence -tokens: - - ':\s[A-Z]' diff --git a/tests/vale/styles/Google/Contractions.yml b/tests/vale/styles/Google/Contractions.yml deleted file mode 100644 index 95234987b..000000000 --- a/tests/vale/styles/Google/Contractions.yml +++ /dev/null @@ -1,30 +0,0 @@ -extends: substitution -message: "Feel free to use '%s' instead of '%s'." -link: 'https://developers.google.com/style/contractions' -level: suggestion -ignorecase: true -action: - name: replace -swap: - are not: aren't - cannot: can't - could not: couldn't - did not: didn't - do not: don't - does not: doesn't - has not: hasn't - have not: haven't - how is: how's - is not: isn't - it is: it's - should not: shouldn't - that is: that's - they are: they're - was not: wasn't - we are: we're - we have: we've - were not: weren't - what is: what's - when is: when's - where is: where's - will not: won't diff --git a/tests/vale/styles/Google/DateFormat.yml b/tests/vale/styles/Google/DateFormat.yml deleted file mode 100644 index e9d227fa1..000000000 --- a/tests/vale/styles/Google/DateFormat.yml +++ /dev/null @@ -1,9 +0,0 @@ -extends: existence -message: "Use 'July 31, 2016' format, not '%s'." -link: 'https://developers.google.com/style/dates-times' -ignorecase: true -level: error -nonword: true -tokens: - - '\d{1,2}(?:\.|/)\d{1,2}(?:\.|/)\d{4}' - - '\d{1,2} (?:Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)|May|Jun(?:e)|Jul(?:y)|Aug(?:ust)|Sep(?:tember)?|Oct(?:ober)|Nov(?:ember)?|Dec(?:ember)?) \d{4}' diff --git a/tests/vale/styles/Google/Ellipses.yml b/tests/vale/styles/Google/Ellipses.yml deleted file mode 100644 index 1e070517b..000000000 --- a/tests/vale/styles/Google/Ellipses.yml +++ /dev/null @@ -1,9 +0,0 @@ -extends: existence -message: "In general, don't use an ellipsis." -link: 'https://developers.google.com/style/ellipses' -nonword: true -level: warning -action: - name: remove -tokens: - - '\.\.\.' diff --git a/tests/vale/styles/Google/EmDash.yml b/tests/vale/styles/Google/EmDash.yml deleted file mode 100644 index 1befe72aa..000000000 --- a/tests/vale/styles/Google/EmDash.yml +++ /dev/null @@ -1,12 +0,0 @@ -extends: existence -message: "Don't put a space before or after a dash." -link: 'https://developers.google.com/style/dashes' -nonword: true -level: error -action: - name: edit - params: - - remove - - ' ' -tokens: - - '\s[—–]\s' diff --git a/tests/vale/styles/Google/EnDash.yml b/tests/vale/styles/Google/EnDash.yml deleted file mode 100644 index b314dc4e9..000000000 --- a/tests/vale/styles/Google/EnDash.yml +++ /dev/null @@ -1,13 +0,0 @@ -extends: existence -message: "Use an em dash ('—') instead of '–'." -link: 'https://developers.google.com/style/dashes' -nonword: true -level: error -action: - name: edit - params: - - replace - - '-' - - '—' -tokens: - - '–' diff --git a/tests/vale/styles/Google/Exclamation.yml b/tests/vale/styles/Google/Exclamation.yml deleted file mode 100644 index 3e15181b2..000000000 --- a/tests/vale/styles/Google/Exclamation.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Don't use exclamation points in text." -link: 'https://developers.google.com/style/exclamation-points' -nonword: true -level: error -tokens: - - '\w!(?:\s|$)' diff --git a/tests/vale/styles/Google/FirstPerson.yml b/tests/vale/styles/Google/FirstPerson.yml deleted file mode 100644 index 0b7b8828c..000000000 --- a/tests/vale/styles/Google/FirstPerson.yml +++ /dev/null @@ -1,13 +0,0 @@ -extends: existence -message: "Avoid first-person pronouns such as '%s'." -link: 'https://developers.google.com/style/pronouns#personal-pronouns' -ignorecase: true -level: warning -nonword: true -tokens: - - (?:^|\s)I\s - - (?:^|\s)I,\s - - \bI'm\b - - \bme\b - - \bmy\b - - \bmine\b diff --git a/tests/vale/styles/Google/Gender.yml b/tests/vale/styles/Google/Gender.yml deleted file mode 100644 index c8486181d..000000000 --- a/tests/vale/styles/Google/Gender.yml +++ /dev/null @@ -1,9 +0,0 @@ -extends: existence -message: "Don't use '%s' as a gender-neutral pronoun." -link: 'https://developers.google.com/style/pronouns#gender-neutral-pronouns' -level: error -ignorecase: true -tokens: - - he/she - - s/he - - \(s\)he diff --git a/tests/vale/styles/Google/GenderBias.yml b/tests/vale/styles/Google/GenderBias.yml deleted file mode 100644 index 261cfb666..000000000 --- a/tests/vale/styles/Google/GenderBias.yml +++ /dev/null @@ -1,45 +0,0 @@ -extends: substitution -message: "Consider using '%s' instead of '%s'." -link: 'https://developers.google.com/style/inclusive-documentation' -ignorecase: true -level: error -swap: - (?:alumna|alumnus): graduate - (?:alumnae|alumni): graduates - air(?:m[ae]n|wom[ae]n): pilot(s) - anchor(?:m[ae]n|wom[ae]n): anchor(s) - authoress: author - camera(?:m[ae]n|wom[ae]n): camera operator(s) - chair(?:m[ae]n|wom[ae]n): chair(s) - congress(?:m[ae]n|wom[ae]n): member(s) of congress - door(?:m[ae]|wom[ae]n): concierge(s) - draft(?:m[ae]n|wom[ae]n): drafter(s) - fire(?:m[ae]n|wom[ae]n): firefighter(s) - fisher(?:m[ae]n|wom[ae]n): fisher(s) - fresh(?:m[ae]n|wom[ae]n): first-year student(s) - garbage(?:m[ae]n|wom[ae]n): waste collector(s) - lady lawyer: lawyer - ladylike: courteous - landlord: building manager - mail(?:m[ae]n|wom[ae]n): mail carriers - man and wife: husband and wife - man enough: strong enough - mankind: human kind - manmade: manufactured - manpower: personnel - men and girls: men and women - middle(?:m[ae]n|wom[ae]n): intermediary - news(?:m[ae]n|wom[ae]n): journalist(s) - ombuds(?:man|woman): ombuds - oneupmanship: upstaging - poetess: poet - police(?:m[ae]n|wom[ae]n): police officer(s) - repair(?:m[ae]n|wom[ae]n): technician(s) - sales(?:m[ae]n|wom[ae]n): salesperson or sales people - service(?:m[ae]n|wom[ae]n): soldier(s) - steward(?:ess)?: flight attendant - tribes(?:m[ae]n|wom[ae]n): tribe member(s) - waitress: waiter - woman doctor: doctor - woman scientist[s]?: scientist(s) - work(?:m[ae]n|wom[ae]n): worker(s) diff --git a/tests/vale/styles/Google/HeadingPunctuation.yml b/tests/vale/styles/Google/HeadingPunctuation.yml deleted file mode 100644 index b538be5b4..000000000 --- a/tests/vale/styles/Google/HeadingPunctuation.yml +++ /dev/null @@ -1,13 +0,0 @@ -extends: existence -message: "Don't put a period at the end of a heading." -link: 'https://developers.google.com/style/capitalization#capitalization-in-titles-and-headings' -nonword: true -level: warning -scope: heading -action: - name: edit - params: - - remove - - '.' -tokens: - - '[a-z0-9][.]\s*$' diff --git a/tests/vale/styles/Google/Headings.yml b/tests/vale/styles/Google/Headings.yml deleted file mode 100644 index a53301338..000000000 --- a/tests/vale/styles/Google/Headings.yml +++ /dev/null @@ -1,29 +0,0 @@ -extends: capitalization -message: "'%s' should use sentence-style capitalization." -link: 'https://developers.google.com/style/capitalization#capitalization-in-titles-and-headings' -level: warning -scope: heading -match: $sentence -indicators: - - ':' -exceptions: - - Azure - - CLI - - Code - - Cosmos - - Docker - - Emmet - - gRPC - - I - - Kubernetes - - Linux - - macOS - - Marketplace - - MongoDB - - REPL - - Studio - - TypeScript - - URLs - - Visual - - VS - - Windows diff --git a/tests/vale/styles/Google/Latin.yml b/tests/vale/styles/Google/Latin.yml deleted file mode 100644 index d91700de3..000000000 --- a/tests/vale/styles/Google/Latin.yml +++ /dev/null @@ -1,11 +0,0 @@ -extends: substitution -message: "Use '%s' instead of '%s'." -link: 'https://developers.google.com/style/abbreviations' -ignorecase: true -level: error -nonword: true -action: - name: replace -swap: - '\b(?:eg|e\.g\.)[\s,]': for example - '\b(?:ie|i\.e\.)[\s,]': that is diff --git a/tests/vale/styles/Google/LyHyphens.yml b/tests/vale/styles/Google/LyHyphens.yml deleted file mode 100644 index ac8f557a4..000000000 --- a/tests/vale/styles/Google/LyHyphens.yml +++ /dev/null @@ -1,14 +0,0 @@ -extends: existence -message: "'%s' doesn't need a hyphen." -link: 'https://developers.google.com/style/hyphens' -level: error -ignorecase: false -nonword: true -action: - name: edit - params: - - replace - - '-' - - ' ' -tokens: - - '\s[^\s-]+ly-' diff --git a/tests/vale/styles/Google/OptionalPlurals.yml b/tests/vale/styles/Google/OptionalPlurals.yml deleted file mode 100644 index f858ea6fe..000000000 --- a/tests/vale/styles/Google/OptionalPlurals.yml +++ /dev/null @@ -1,12 +0,0 @@ -extends: existence -message: "Don't use plurals in parentheses such as in '%s'." -link: 'https://developers.google.com/style/plurals-parentheses' -level: error -nonword: true -action: - name: edit - params: - - remove - - '(s)' -tokens: - - '\b\w+\(s\)' diff --git a/tests/vale/styles/Google/Ordinal.yml b/tests/vale/styles/Google/Ordinal.yml deleted file mode 100644 index d1ac7d27e..000000000 --- a/tests/vale/styles/Google/Ordinal.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Spell out all ordinal numbers ('%s') in text." -link: 'https://developers.google.com/style/numbers' -level: error -nonword: true -tokens: - - \d+(?:st|nd|rd|th) diff --git a/tests/vale/styles/Google/OxfordComma.yml b/tests/vale/styles/Google/OxfordComma.yml deleted file mode 100644 index b9ba21ebb..000000000 --- a/tests/vale/styles/Google/OxfordComma.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Use the Oxford comma in '%s'." -link: 'https://developers.google.com/style/commas' -scope: sentence -level: warning -tokens: - - '(?:[^,]+,){1,}\s\w+\s(?:and|or)' diff --git a/tests/vale/styles/Google/Parens.yml b/tests/vale/styles/Google/Parens.yml deleted file mode 100644 index 3b8711d0c..000000000 --- a/tests/vale/styles/Google/Parens.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Use parentheses judiciously." -link: 'https://developers.google.com/style/parentheses' -nonword: true -level: suggestion -tokens: - - '\(.+\)' diff --git a/tests/vale/styles/Google/Passive.yml b/tests/vale/styles/Google/Passive.yml deleted file mode 100644 index 3265890e5..000000000 --- a/tests/vale/styles/Google/Passive.yml +++ /dev/null @@ -1,184 +0,0 @@ -extends: existence -link: 'https://developers.google.com/style/voice' -message: "In general, use active voice instead of passive voice ('%s')." -ignorecase: true -level: suggestion -raw: - - \b(am|are|were|being|is|been|was|be)\b\s* -tokens: - - '[\w]+ed' - - awoken - - beat - - become - - been - - begun - - bent - - beset - - bet - - bid - - bidden - - bitten - - bled - - blown - - born - - bought - - bound - - bred - - broadcast - - broken - - brought - - built - - burnt - - burst - - cast - - caught - - chosen - - clung - - come - - cost - - crept - - cut - - dealt - - dived - - done - - drawn - - dreamt - - driven - - drunk - - dug - - eaten - - fallen - - fed - - felt - - fit - - fled - - flown - - flung - - forbidden - - foregone - - forgiven - - forgotten - - forsaken - - fought - - found - - frozen - - given - - gone - - gotten - - ground - - grown - - heard - - held - - hidden - - hit - - hung - - hurt - - kept - - knelt - - knit - - known - - laid - - lain - - leapt - - learnt - - led - - left - - lent - - let - - lighted - - lost - - made - - meant - - met - - misspelt - - mistaken - - mown - - overcome - - overdone - - overtaken - - overthrown - - paid - - pled - - proven - - put - - quit - - read - - rid - - ridden - - risen - - run - - rung - - said - - sat - - sawn - - seen - - sent - - set - - sewn - - shaken - - shaven - - shed - - shod - - shone - - shorn - - shot - - shown - - shrunk - - shut - - slain - - slept - - slid - - slit - - slung - - smitten - - sold - - sought - - sown - - sped - - spent - - spilt - - spit - - split - - spoken - - spread - - sprung - - spun - - stolen - - stood - - stridden - - striven - - struck - - strung - - stuck - - stung - - stunk - - sung - - sunk - - swept - - swollen - - sworn - - swum - - swung - - taken - - taught - - thought - - thrived - - thrown - - thrust - - told - - torn - - trodden - - understood - - upheld - - upset - - wed - - wept - - withheld - - withstood - - woken - - won - - worn - - wound - - woven - - written - - wrung diff --git a/tests/vale/styles/Google/Periods.yml b/tests/vale/styles/Google/Periods.yml deleted file mode 100644 index d24a6a6c0..000000000 --- a/tests/vale/styles/Google/Periods.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Don't use periods with acronyms or initialisms such as '%s'." -link: 'https://developers.google.com/style/abbreviations' -level: error -nonword: true -tokens: - - '\b(?:[A-Z]\.){3,}' diff --git a/tests/vale/styles/Google/Quotes.yml b/tests/vale/styles/Google/Quotes.yml deleted file mode 100644 index 3cb6f1abd..000000000 --- a/tests/vale/styles/Google/Quotes.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Commas and periods go inside quotation marks." -link: 'https://developers.google.com/style/quotation-marks' -level: error -nonword: true -tokens: - - '"[^"]+"[.,?]' diff --git a/tests/vale/styles/Google/Ranges.yml b/tests/vale/styles/Google/Ranges.yml deleted file mode 100644 index 3ec045e77..000000000 --- a/tests/vale/styles/Google/Ranges.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Don't add words such as 'from' or 'between' to describe a range of numbers." -link: 'https://developers.google.com/style/hyphens' -nonword: true -level: warning -tokens: - - '(?:from|between)\s\d+\s?-\s?\d+' diff --git a/tests/vale/styles/Google/Semicolons.yml b/tests/vale/styles/Google/Semicolons.yml deleted file mode 100644 index bb8b85b42..000000000 --- a/tests/vale/styles/Google/Semicolons.yml +++ /dev/null @@ -1,8 +0,0 @@ -extends: existence -message: "Use semicolons judiciously." -link: 'https://developers.google.com/style/semicolons' -nonword: true -scope: sentence -level: suggestion -tokens: - - ';' diff --git a/tests/vale/styles/Google/Slang.yml b/tests/vale/styles/Google/Slang.yml deleted file mode 100644 index 63f4c248a..000000000 --- a/tests/vale/styles/Google/Slang.yml +++ /dev/null @@ -1,11 +0,0 @@ -extends: existence -message: "Don't use internet slang abbreviations such as '%s'." -link: 'https://developers.google.com/style/abbreviations' -ignorecase: true -level: error -tokens: - - 'tl;dr' - - ymmv - - rtfm - - imo - - fwiw diff --git a/tests/vale/styles/Google/Spacing.yml b/tests/vale/styles/Google/Spacing.yml deleted file mode 100644 index 27f7ca2bd..000000000 --- a/tests/vale/styles/Google/Spacing.yml +++ /dev/null @@ -1,8 +0,0 @@ -extends: existence -message: "'%s' should have one space." -link: 'https://developers.google.com/style/sentence-spacing' -level: error -nonword: true -tokens: - - '[a-z][.?!] {2,}[A-Z]' - - '[a-z][.?!][A-Z]' diff --git a/tests/vale/styles/Google/Spelling.yml b/tests/vale/styles/Google/Spelling.yml deleted file mode 100644 index 57acb8841..000000000 --- a/tests/vale/styles/Google/Spelling.yml +++ /dev/null @@ -1,8 +0,0 @@ -extends: existence -message: "In general, use American spelling instead of '%s'." -link: 'https://developers.google.com/style/spelling' -ignorecase: true -level: warning -tokens: - - '(?:\w+)nised?' - - '(?:\w+)logue' diff --git a/tests/vale/styles/Google/Units.yml b/tests/vale/styles/Google/Units.yml deleted file mode 100644 index 379fad6b8..000000000 --- a/tests/vale/styles/Google/Units.yml +++ /dev/null @@ -1,8 +0,0 @@ -extends: existence -message: "Put a nonbreaking space between the number and the unit in '%s'." -link: 'https://developers.google.com/style/units-of-measure' -nonword: true -level: error -tokens: - - \d+(?:B|kB|MB|GB|TB) - - \d+(?:ns|ms|s|min|h|d) diff --git a/tests/vale/styles/Google/We.yml b/tests/vale/styles/Google/We.yml deleted file mode 100644 index c7ac7d362..000000000 --- a/tests/vale/styles/Google/We.yml +++ /dev/null @@ -1,11 +0,0 @@ -extends: existence -message: "Try to avoid using first-person plural like '%s'." -link: 'https://developers.google.com/style/pronouns#personal-pronouns' -level: warning -ignorecase: true -tokens: - - we - - we'(?:ve|re) - - ours? - - us - - let's diff --git a/tests/vale/styles/Google/Will.yml b/tests/vale/styles/Google/Will.yml deleted file mode 100644 index 128a91836..000000000 --- a/tests/vale/styles/Google/Will.yml +++ /dev/null @@ -1,7 +0,0 @@ -extends: existence -message: "Avoid using '%s'." -link: 'https://developers.google.com/style/tense' -ignorecase: true -level: warning -tokens: - - will diff --git a/tests/vale/styles/Google/WordList.yml b/tests/vale/styles/Google/WordList.yml deleted file mode 100644 index bb711517e..000000000 --- a/tests/vale/styles/Google/WordList.yml +++ /dev/null @@ -1,80 +0,0 @@ -extends: substitution -message: "Use '%s' instead of '%s'." -link: 'https://developers.google.com/style/word-list' -level: warning -ignorecase: false -action: - name: replace -swap: - '(?:API Console|dev|developer) key': API key - '(?:cell ?phone|smart ?phone)': phone|mobile phone - '(?:dev|developer|APIs) console': API console - '(?:e-mail|Email|E-mail)': email - '(?:file ?path|path ?name)': path - '(?:kill|terminate|abort)': stop|exit|cancel|end - '(?:OAuth ?2|Oauth)': OAuth 2.0 - '(?:ok|Okay)': OK|okay - '(?:WiFi|wifi)': Wi-Fi - '[\.]+apk': APK - '3\-D': 3D - 'Google (?:I\-O|IO)': Google I/O - 'tap (?:&|and) hold': touch & hold - 'un(?:check|select)': clear - above: preceding - account name: username - action bar: app bar - admin: administrator - Ajax: AJAX - Android device: Android-powered device - android: Android - API explorer: APIs Explorer - application: app - approx\.: approximately - authN: authentication - authZ: authorization - autoupdate: automatically update - cellular data: mobile data - cellular network: mobile network - chapter: documents|pages|sections - check box: checkbox - check: select - CLI: command-line tool - click on: click|click in - Cloud: Google Cloud Platform|GCP - Container Engine: Kubernetes Engine - content type: media type - curated roles: predefined roles - data are: data is - Developers Console: Google API Console|API Console - disabled?: turn off|off - ephemeral IP address: ephemeral external IP address - fewer data: less data - file name: filename - firewalls: firewall rules - functionality: capability|feature - Google account: Google Account - Google accounts: Google Accounts - Googling: search with Google - grayed-out: unavailable - HTTPs: HTTPS - in order to: to - ingest: import|load - k8s: Kubernetes - long press: touch & hold - network IP address: internal IP address - omnibox: address bar - open-source: open source - overview screen: recents screen - regex: regular expression - SHA1: SHA-1|HAS-SHA1 - sign into: sign in to - sign-?on: single sign-on - static IP address: static external IP address - stylesheet: style sheet - synch: sync - tablename: table name - tablet: device - touch: tap - url: URL - vs\.: versus - World Wide Web: web diff --git a/tests/vale/styles/Google/meta.json b/tests/vale/styles/Google/meta.json deleted file mode 100644 index a5da2a848..000000000 --- a/tests/vale/styles/Google/meta.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "feed": "https://github.com/errata-ai/Google/releases.atom", - "vale_version": ">=1.0.0" -} diff --git a/tests/vale/styles/Google/vocab.txt b/tests/vale/styles/Google/vocab.txt deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/vale/styles/vocab.txt b/tests/vale/styles/vocab.txt deleted file mode 100644 index 862e748be..000000000 --- a/tests/vale/styles/vocab.txt +++ /dev/null @@ -1,52 +0,0 @@ -AWS -Conda -Conda-Pack -Conda-Store -DockerHub -EFS -GCP -GitHub -IP -Starlette -FastAPI -JupyterHub -JupyterLab -MinIO -NumPy -PostgreSQL -SQLAlchemy -SQLite -Traitlets -YAML -cpu -filestore -hardlinks -hostname -kustomize -lockfile -minikube -namespace -namespaces -nfs -noarch -systemd -url -libvirt -conda-lock -Azure -prefetch -repodata -uid -gid -glibc -presigned -OAuthenticator -RBAC -Prometheus -Micromamba -pytest -lorri -nixery -PyPi -Traefik -Nginx