diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..a9a66543c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Shell scripts +*.sh text eol=lf diff --git a/.github/.release-please-manifest.json b/.github/.release-please-manifest.json index 49bcca2a8..c69cd293a 100644 --- a/.github/.release-please-manifest.json +++ b/.github/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "4.5.0" + ".": "4.6.0" } diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 000000000..12c8d19fa --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,82 @@ +# Contributing to `testcontainers-python` + +Welcome to the `testcontainers-python` community! +This should give you an idea about how we build, test and release `testcontainers-python`! + +Highly recommended to read this document thoroughly to understand what we're working on right now +and what our priorities are before you are trying to contribute something. + +This will greatly increase your chances of getting prompt replies as the maintainers are volunteers themselves. + +## Before you Begin + +We recommend following these steps: + +1. Finish reading this document. +2. Read the [recently updated issues][1] +3. Look for existing issues on the subject you are interested in - we do our best to label everything correctly + + +## Local Development + +### Pre-Requisites + +You need to have the following tools available to you: +- `make` - You'll need a GNU Make for common developer activities +- `poetry` - This is the primary package manager for the project +- `pyenv` **Recommended**: For installing python versions for your system. + Poetry infers the current latest version from what it can find on the `PATH` so you are still fine if you don't use `pyenv`. + +### Build and test + + +- Run `make install` to get `poetry` to install all dependencies and set up `pre-commit` + - **Recommended**: Run `make` or `make help` to see other commands available to you. +- After this, you should have a working virtual environment and proceed with writing code with your favourite IDE +- **TIP**: You can run `make core/tests` or `make module//tests` to run the tests specifically for that to speed up feedback cycles +- You can also run `make lint` to run the `pre-commit` for the entire codebase. + + +## Adding new containers + +We have an [issue template](.github/ISSUE_TEMPLATE/new-container.md) for adding new containers, please refer to that for more information. +Once you've talked to the maintainers (we do our best to reply!) then you can proceed with contributing the new container. + +> [!WARNING] +> PLease raise an issue before you try to contribute a new container! It helps maintainers understand your use-case and motivation. +> This way we can keep pull requests foruced on the "how", not the "why"! :pray: +> It also gives maintainers a chance to give you last-minute guidance on caveats or expectations, particularly with +> new extra dependencies and how to manage them. + + +## Raising Issues + +We have [Issue Templates][2] to cover most cases, please try to adhere to them, they will guide you through the process. +Try to look through the existing issues before you raise a new one. + + +## Releasing Versions + +We have automated Semantic Versioning and release via [release-please](workflows/release-please.yml). +This takes care of: +- Detecting the next version, based on the commits that landed on `main` +- When a Release PR has been merged + - Create a GitHub Release with the CHANGELOG included + - Update the [CHANGELOG](../CHANGELOG.md), similar to the GitHub Release + - Release to PyPI via a [trusted publisher](https://docs.pypi.org/trusted-publishers/using-a-publisher/) + - Automatically script updates in files where it's needed instead of hand-crafting it (i.e. in `pyproject.toml`) + +> [!CRITICAL] +> Community modules are supported on a best-effort basis and for maintenance reasons, any change to them +> is only covered under minor and patch changes. +> +> Community modules changes DO NOT contribute to major version changes! +> +> If your community module container was broken by a minor or patch version change, check out the change logs! + +# Thank you! + +Thanks for reading, feedback on documentation is always welcome! + +[1]: https://github.com/testcontainers/testcontainers-python/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc "Recently Updated Issues showing you what we're focusing on" +[2]: https://github.com/testcontainers/testcontainers-python/issues/new/choose "List of current issue templates, please use them" diff --git a/.github/ISSUE_TEMPLATE/new-container.md b/.github/ISSUE_TEMPLATE/new-container.md new file mode 100644 index 000000000..b089c2e18 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/new-container.md @@ -0,0 +1,26 @@ +--- +name: New Container +about: Tell the Testcontainers-Python team about a container you'd like to have support for. +title: 'New Container: ' +labels: '🚀 enhancement' +assignees: '' + +--- + + + +**What is the new container you'd like to have?** + +Please link some docker containers as well as documentation/arguments to the benefits of having this container. + +**Why not just use a generic container for this?** + +Please describe why the `DockerContainer("my-image:latest")` approach is not useful enough. + +Having a dedicated `TestContainer` usually means the need for some or all of these: +- complicated setup/configuration +- the wait strategy is complex for the container, usually more than just an http wait + +**Other references:** + +Include any other relevant reading material about the enhancement. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md index 9a7af6ead..b05282d90 100644 --- a/.github/ISSUE_TEMPLATE/question.md +++ b/.github/ISSUE_TEMPLATE/question.md @@ -9,27 +9,33 @@ assignees: '' -**What are you trying to do?** +## What are you trying to do? Ask your question here -**Where are you trying to do it?** +## Where are you trying to do it? Provide a self-contained code snippet that illustrates the bug or unexpected behavior. Ideally, include a link to a public repository with a minimal project where someone from the testcontainers-python can submit a PR with a solution to the problem you are facing with the library. -**Runtime environment** +## Runtime environment -Provide a summary of your runtime environment. Which operating system, python version, and docker version are you using? What is the version of `testcontainers-python` you are using? You can run the following commands to get the relevant information. +Provide a summary of your runtime environment. Which operating system, python version, and docker version are you using? +What is the version of `testcontainers-python` you are using? You can run the following commands to get the relevant information. + +Paste the results of the bash below + +```bash +uname -a +echo "------" +docker info +echo "------" +poetry run python --version +echo "------" +poetry show --tree +``` ```bash -# Get the operating system information (on a unix os). -$ uname -a -# Get the python version. -$ python --version -# Get the docker version and other docker information. -$ docker info -# Get all python packages. -$ pip freeze +paste-me-here ``` diff --git a/.github/PULL_REQUEST_TEMPLATE/new_container.md b/.github/PULL_REQUEST_TEMPLATE/new_container.md index 29b8190d4..27057310d 100644 --- a/.github/PULL_REQUEST_TEMPLATE/new_container.md +++ b/.github/PULL_REQUEST_TEMPLATE/new_container.md @@ -1,8 +1,40 @@ -You have implemented a new container and would like to contribute it? Great! Here are the necessary steps. - -- [ ] Create a new feature directory and populate it with the package structure [described in the documentation](https://testcontainers-python.readthedocs.io/en/latest/#package-structure). Copying one of the existing features is likely the best way to get started. -- [ ] Implement the new feature (typically in `__init__.py`) and corresponding tests. -- [ ] Update the feature `README.rst` and add it to the table of contents (`toctree` directive) in the top-level `README.rst`. -- [ ] Add a line `[feature name]` to the list of components in the GitHub Action workflow in `.github/workflows/main.yml` to run tests, build, and publish your package when pushed to the `main` branch. -- [ ] Rebase your development branch on `main` (or merge `main` into your development branch). -- [ ] Add a line `-e file:[feature name]` to `requirements.in` and open a pull request. Opening a pull request will automatically generate lock files to ensure reproducible builds (see the [pip-tools documentation](https://pip-tools.readthedocs.io/en/latest/) for details). Finally, run `python get_requirements.py --pr=[your PR number]` to fetch the updated requirement files (the build needs to have succeeded). +# New Container + + + +Fixes ... + + + + +# PR Checklist + +- [ ] Your PR title follows the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) syntax + as we make use of this for detecting Semantic Versioning changes. +- [ ] Your PR allows maintainers to edit your branch, this will speed up resolving minor issues! +- [ ] The new container is implemented under `modules/*` + - Your module follows [PEP 420](https://peps.python.org/pep-0420/) with implicit namespace packages + (if unsure, look at other existing community modules) + - Your package namespacing follows `testcontainers..*` + and you DO NOT have an `__init__.py` above your module's level. + - Your module has it's own tests under `modules/*/tests` + - Your module has a `README.rst` and hooks in the `.. auto-class` and `.. title` of your container + - Implement the new feature (typically in `__init__.py`) and corresponding tests. +- [ ] Your module is added in `pyproject.toml` + - it is declared under `tool.poetry.packages` - see other community modules + - it is declared under `tool.poetry.extras` with the same name as your module name, + we still prefer adding _NO EXTRA DEPENDENCIES_, meaning `mymodule = []` is the preferred addition + (see the notes at the bottom) +- [ ] The `INDEX.rst` at the project root includes your module under the `.. toctree` directive +- [ ] Your branch is up to date (or we'll use GH's "update branch" function through the UI) + +# Preferred implementation + +- The current consensus among maintainers is to try to avoid enforcing the client library + for the given tools you are triyng to implement. +- This means we want you to avoid adding specific libraries as dependencies to `testcontainers`. +- Therefore, you should implement the configuration and the waiting with as little extra as possible +- You may still find it useful to add your preferred client library as a dev dependency diff --git a/.github/workflows/ci-core.yml b/.github/workflows/ci-core.yml index f794f98e3..0f6a5e4e2 100644 --- a/.github/workflows/ci-core.yml +++ b/.github/workflows/ci-core.yml @@ -9,7 +9,7 @@ on: branches: [main] jobs: - test: + run-tests-and-coverage: runs-on: ubuntu-22.04 strategy: fail-fast: false @@ -27,5 +27,34 @@ jobs: run: poetry build && poetry run twine check dist/*.tar.gz - name: Run tests run: make core/tests + - name: Rename coverage file + run: mv .coverage .coverage.${{ matrix.python-version}} + - name: "Save coverage artifact" + uses: actions/upload-artifact@v4 + with: + name: "coverage-artifact-${{ matrix.python-version}}" + path: ".coverage.*" + retention-days: 1 - name: Run doctests run: make core/doctests + + coverage-compile: + needs: "run-tests-and-coverage" + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: ./.github/actions/setup-env + - name: Install Python dependencies + run: poetry install --all-extras + - name: "Download coverage artifacts" + uses: actions/download-artifact@v4 + with: + pattern: "coverage-artifact-*" + merge-multiple: true + - name: Compile coverage + run: make coverage + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f0f003de..120adbeee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,35 @@ # Changelog +## [4.6.0](https://github.com/testcontainers/testcontainers-python/compare/testcontainers-v4.5.1...testcontainers-v4.6.0) (2024-06-18) + + +### Features + +* **core:** Added ServerContainer ([#595](https://github.com/testcontainers/testcontainers-python/issues/595)) ([0768490](https://github.com/testcontainers/testcontainers-python/commit/076849015ad3542384ecf8cf6c205d5d498e4986)) +* **core:** Image build (Dockerfile support) ([#585](https://github.com/testcontainers/testcontainers-python/issues/585)) ([54c88cf](https://github.com/testcontainers/testcontainers-python/commit/54c88cf00ad7bb08eb7894c52bed7a9010fd7786)) + + +### Bug Fixes + +* Add Cockroach DB Module to Testcontainers ([#608](https://github.com/testcontainers/testcontainers-python/issues/608)) ([4aff679](https://github.com/testcontainers/testcontainers-python/commit/4aff6793f28fbeb8358adcc728283ea9a7b94e5f)) +* Container for Milvus database ([#606](https://github.com/testcontainers/testcontainers-python/issues/606)) ([ec76df2](https://github.com/testcontainers/testcontainers-python/commit/ec76df27c3d95ac1b79df3a049b4e2c12539081d)) +* move TESTCONTAINERS_HOST_OVERRIDE to config.py ([#603](https://github.com/testcontainers/testcontainers-python/issues/603)) ([2a5a190](https://github.com/testcontainers/testcontainers-python/commit/2a5a1904391020a9da4be17b32f23b36d9385c29)), closes [#602](https://github.com/testcontainers/testcontainers-python/issues/602) +* **mqtt:** Add mqtt.MosquittoContainer ([#568](https://github.com/testcontainers/testcontainers-python/issues/568)) ([#599](https://github.com/testcontainers/testcontainers-python/issues/599)) ([59cb6fc](https://github.com/testcontainers/testcontainers-python/commit/59cb6fc4e7d93870ff2d0d961d14ccd5142a8a05)) + + +### Documentation + +* **main:** Private registry ([#598](https://github.com/testcontainers/testcontainers-python/issues/598)) ([9045c0a](https://github.com/testcontainers/testcontainers-python/commit/9045c0aea6029283490c89aea985e625dcdfc7b9)) +* Update private registry instructions ([#604](https://github.com/testcontainers/testcontainers-python/issues/604)) ([f5a019b](https://github.com/testcontainers/testcontainers-python/commit/f5a019b6d2552788478e4a10cd17f7a2b453abb9)) + +## [4.5.1](https://github.com/testcontainers/testcontainers-python/compare/testcontainers-v4.5.0...testcontainers-v4.5.1) (2024-05-31) + + +### Bug Fixes + +* **k3s:** add configuration parameter for disabling cgroup mount to avoid "unable to apply cgroup configuration" ([#592](https://github.com/testcontainers/testcontainers-python/issues/592)) ([8917772](https://github.com/testcontainers/testcontainers-python/commit/8917772d8c90d26086af3b9606657c95928e2b9d)) +* **keycloak:** realm import ([#584](https://github.com/testcontainers/testcontainers-python/issues/584)) ([111bd09](https://github.com/testcontainers/testcontainers-python/commit/111bd094428b83233d7eca693d94e10b34ee8ae8)) + ## [4.5.0](https://github.com/testcontainers/testcontainers-python/compare/testcontainers-v4.4.1...testcontainers-v4.5.0) (2024-05-25) diff --git a/Dockerfile b/Dockerfile index 4172f86fe..c86c9e2df 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ -ARG version=3.8 -FROM python:${version} +ARG PYTHON_VERSION +FROM python:${version}-slim-bookworm WORKDIR /workspace RUN pip install --upgrade pip \ @@ -7,7 +7,10 @@ RUN pip install --upgrade pip \ && apt-get install -y \ freetds-dev \ && rm -rf /var/lib/apt/lists/* + +# install requirements we exported from poetry COPY build/requirements.txt requirements.txt -COPY setup.py README.rst ./ RUN pip install -r requirements.txt + +# copy project source COPY . . diff --git a/Makefile b/Makefile index 1816f64b9..9a4fd6f94 100644 --- a/Makefile +++ b/Makefile @@ -1,73 +1,78 @@ -PYTHON_VERSIONS = 3.9 3.10 3.11 +.DEFAULT_GOAL := help + + PYTHON_VERSION ?= 3.10 IMAGE = testcontainers-python:${PYTHON_VERSION} -RUN = docker run --rm -it -# Get all directories that contain a setup.py and get the directory name. PACKAGES = core $(addprefix modules/,$(notdir $(wildcard modules/*))) -# All */dist folders for each of the packages. -DISTRIBUTIONS = $(addsuffix /dist,${PACKAGES}) UPLOAD = $(addsuffix /upload,${PACKAGES}) -# All */tests folders for each of the test suites. TESTS = $(addsuffix /tests,$(filter-out meta,${PACKAGES})) TESTS_DIND = $(addsuffix -dind,${TESTS}) DOCTESTS = $(addsuffix /doctests,$(filter-out modules/README.md,${PACKAGES})) -# All linting targets. -LINT = $(addsuffix /lint,${PACKAGES}) - -# Targets to build a distribution for each package. -dist: ${DISTRIBUTIONS} -${DISTRIBUTIONS} : %/dist : %/setup.py - cd $* \ - && python setup.py bdist_wheel \ - && twine check dist/* - -# Targets to run the test suite for each package. -tests : ${TESTS} -${TESTS} : %/tests : + + +install: ## Set up the project for development + poetry install --all-extras + poetry run pre-commit install + +build: ## Build the python package + poetry build && poetry run twine check dist/* + +tests: ${TESTS} ## Run tests for each package +${TESTS}: %/tests: poetry run pytest -v --cov=testcontainers.$* $*/tests -# Target to lint the code. -lint: - pre-commit run -a - -# Targets to publish packages. -upload : ${UPLOAD} -${UPLOAD} : %/upload : - if [ ${TWINE_REPOSITORY}-$* = testpypi-meta ]; then \ - echo "Cannot upload meta package to testpypi because of missing permissions."; \ - else \ - twine upload --non-interactive --skip-existing $*/dist/*; \ - fi - -# Targets to build docker images -image: +coverage: ## Target to combine and report coverage. + poetry run coverage combine + poetry run coverage report + poetry run coverage xml + poetry run coverage html + +lint: ## Lint all files in the project, which we also run in pre-commit + poetry run pre-commit run -a + +image: ## Make the docker image for dind tests poetry export -f requirements.txt -o build/requirements.txt - docker build --build-arg version=${PYTHON_VERSION} -t ${IMAGE} . + docker build --build-arg PYTHON_VERSION=${PYTHON_VERSION} -t ${IMAGE} . -# Targets to run tests in docker containers -tests-dind : ${TESTS_DIND} +DOCKER_RUN = docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -${TESTS_DIND} : %/tests-dind : image - ${RUN} -v /var/run/docker.sock:/var/run/docker.sock ${IMAGE} \ - bash -c "make $*/lint $*/tests" +tests-dind: ${TESTS_DIND} ## Run the tests in docker containers to test `dind` +${TESTS_DIND}: %/tests-dind: image + ${DOCKER_RUN} ${IMAGE} \ + bash -c "make $*/tests" -# Target to build the documentation -docs : +docs: ## Build the docs for the project poetry run sphinx-build -nW . docs/_build -doctests : ${DOCTESTS} +# Target to build docs watching for changes as per https://stackoverflow.com/a/21389615 +docs-watch : + poetry run sphinx-autobuild . docs/_build # requires 'pip install sphinx-autobuild' + +doctests: ${DOCTESTS} ## Run doctests found across the documentation. poetry run sphinx-build -b doctest . docs/_build -${DOCTESTS} : %/doctests : +${DOCTESTS}: %/doctests: ## Run doctests found for a module. poetry run sphinx-build -b doctest -c doctests $* docs/_build -# Remove any generated files. -clean : + +clean: ## Remove generated files. rm -rf docs/_build - rm -rf */build - rm -rf */dist + rm -rf build + rm -rf dist rm -rf */*.egg-info +clean-all: clean ## Remove all generated files and reset the local virtual environment + rm -rf .venv + # Targets that do not generate file-level artifacts. -.PHONY : clean dists ${DISTRIBUTIONS} docs doctests image tests ${TESTS} +.PHONY: clean docs doctests image tests ${TESTS} + + +# Implements this pattern for autodocumenting Makefiles: +# https://marmelab.com/blog/2016/02/29/auto-documented-makefile.html +# +# Picks up all comments that start with a ## and are at the end of a target definition line. +.PHONY: help +help: ## Display command usage + @grep -E '^[0-9a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/README.md b/README.md index 036723d61..cec096a47 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,15 @@ +[![Poetry](https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json)](https://python-poetry.org/) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) +![PyPI - Version](https://img.shields.io/pypi/v/testcontainers) +[![PyPI - License](https://img.shields.io/pypi/l/testcontainers.svg)](https://github.com/testcontainers/testcontainers-python/blob/main/LICENSE) +[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/testcontainers.svg)](https://pypi.python.org/pypi/testcontainers) +[![codecov](https://codecov.io/gh/testcontainers/testcontainers-python/branch/master/graph/badge.svg)](https://codecov.io/gh/testcontainers/testcontainers-python) +![Core Tests](https://github.com/testcontainers/testcontainers-python/actions/workflows/ci-core.yml/badge.svg) +![Community Tests](https://github.com/testcontainers/testcontainers-python/actions/workflows/ci-community.yml/badge.svg) +[![Docs](https://readthedocs.org/projects/testcontainers-python/badge/?version=latest)](http://testcontainers-python.readthedocs.io/en/latest/?badge=latest) + +[![Codespace](https://github.com/codespaces/badge.svg)](https://codespaces.new/testcontainers/testcontainers-python) + # Testcontainers Python `testcontainers-python` facilitates the use of Docker containers for functional and integration testing. @@ -23,6 +35,10 @@ For more information, see [the docs][readthedocs]. The snippet above will spin up a postgres database in a container. The `get_connection_url()` convenience method returns a `sqlalchemy` compatible url we use to connect to the database and retrieve the database version. +## Contributing / Development / Release + +See [CONTRIBUTING.md](.github/CONTRIBUTING.md) for more details. + ## Configuration | Env Variable | Example | Description | diff --git a/core/README.rst b/core/README.rst index 2256bd204..8cc9a2780 100644 --- a/core/README.rst +++ b/core/README.rst @@ -1,6 +1,32 @@ -testcontainers-core +Testcontainers Core =================== :code:`testcontainers-core` is the core functionality for spinning up Docker containers in test environments. .. autoclass:: testcontainers.core.container.DockerContainer + +.. autoclass:: testcontainers.core.image.DockerImage + +.. autoclass:: testcontainers.core.generic.DbContainer + +.. raw:: html + +
+ +Examples +-------- + +Using `DockerContainer` and `DockerImage` to create a container: + +.. doctest:: + + >>> from testcontainers.core.container import DockerContainer + >>> from testcontainers.core.waiting_utils import wait_for_logs + >>> from testcontainers.core.image import DockerImage + + >>> with DockerImage(path="./core/tests/image_fixtures/sample/", tag="test-sample:latest") as image: + ... with DockerContainer(str(image)) as container: + ... delay = wait_for_logs(container, "Test Sample Image") + +The `DockerImage` class is used to build the image from the specified path and tag. +The `DockerContainer` class is then used to create a container from the image. diff --git a/core/testcontainers/compose/compose.py b/core/testcontainers/compose/compose.py index 951aee6d3..08dd313a4 100644 --- a/core/testcontainers/compose/compose.py +++ b/core/testcontainers/compose/compose.py @@ -236,6 +236,10 @@ def stop(self, down=True) -> None: down_cmd += ["down", "--volumes"] else: down_cmd += ["stop"] + + if self.services: + down_cmd.extend(self.services) + self._run_command(cmd=down_cmd) def get_logs(self, *services: str) -> tuple[str, str]: diff --git a/core/testcontainers/core/config.py b/core/testcontainers/core/config.py index 5e038b451..3522b91f0 100644 --- a/core/testcontainers/core/config.py +++ b/core/testcontainers/core/config.py @@ -14,6 +14,7 @@ RYUK_DISABLED: bool = environ.get("TESTCONTAINERS_RYUK_DISABLED", "false") == "true" RYUK_DOCKER_SOCKET: str = environ.get("TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE", "/var/run/docker.sock") RYUK_RECONNECTION_TIMEOUT: str = environ.get("RYUK_RECONNECTION_TIMEOUT", "10s") +TC_HOST_OVERRIDE: Optional[str] = environ.get("TC_HOST", environ.get("TESTCONTAINERS_HOST_OVERRIDE")) TC_FILE = ".testcontainers.properties" TC_GLOBAL = Path.home() / TC_FILE @@ -52,12 +53,18 @@ class TestcontainersConfiguration: ryuk_reconnection_timeout: str = RYUK_RECONNECTION_TIMEOUT tc_properties: dict[str, str] = field(default_factory=read_tc_properties) _docker_auth_config: Optional[str] = field(default_factory=lambda: environ.get("DOCKER_AUTH_CONFIG")) + tc_host_override: Optional[str] = TC_HOST_OVERRIDE + """ + https://github.com/testcontainers/testcontainers-go/blob/dd76d1e39c654433a3d80429690d07abcec04424/docker.go#L644 + if os env TC_HOST is set, use it + """ @property def docker_auth_config(self): - if "DOCKER_AUTH_CONFIG" in _WARNINGS: + config = self._docker_auth_config + if config and "DOCKER_AUTH_CONFIG" in _WARNINGS: warning(_WARNINGS.pop("DOCKER_AUTH_CONFIG")) - return self._docker_auth_config + return config @docker_auth_config.setter def docker_auth_config(self, value: str): diff --git a/core/testcontainers/core/docker_client.py b/core/testcontainers/core/docker_client.py index 485adb594..9b7fe7479 100644 --- a/core/testcontainers/core/docker_client.py +++ b/core/testcontainers/core/docker_client.py @@ -16,10 +16,12 @@ import os import urllib import urllib.parse +from collections.abc import Iterable from typing import Callable, Optional, TypeVar, Union import docker from docker.models.containers import Container, ContainerCollection +from docker.models.images import Image, ImageCollection from typing_extensions import ParamSpec from testcontainers.core.config import testcontainers_config as c @@ -40,6 +42,14 @@ def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _T: return wrapper +def _wrapped_image_collection(function: Callable[_P, _T]) -> Callable[_P, _T]: + @ft.wraps(ImageCollection.build) + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _T: + return function(*args, **kwargs) + + return wrapper + + class DockerClient: """ Thin wrapper around :class:`docker.DockerClient` for a more functional interface. @@ -94,6 +104,17 @@ def run( ) return container + @_wrapped_image_collection + def build(self, path: str, tag: str, rm: bool = True, **kwargs) -> tuple[Image, Iterable[dict]]: + """ + Build a Docker image from a directory containing the Dockerfile. + + :return: A tuple containing the image object and the build logs. + """ + image_object, image_logs = self.client.images.build(path=path, tag=tag, rm=rm, **kwargs) + + return image_object, image_logs + def find_host_network(self) -> Optional[str]: """ Try to find the docker host network. @@ -166,18 +187,14 @@ def host(self) -> str: """ Get the hostname or ip address of the docker host. """ - # https://github.com/testcontainers/testcontainers-go/blob/dd76d1e39c654433a3d80429690d07abcec04424/docker.go#L644 - # if os env TC_HOST is set, use it - host = os.environ.get("TC_HOST") - if not host: - host = os.environ.get("TESTCONTAINERS_HOST_OVERRIDE") + host = c.tc_host_override if host: return host try: url = urllib.parse.urlparse(self.client.api.base_url) except ValueError: - return None + return "localhost" if "http" in url.scheme or "tcp" in url.scheme: return url.hostname if inside_container() and ("unix" in url.scheme or "npipe" in url.scheme): diff --git a/core/testcontainers/core/generic.py b/core/testcontainers/core/generic.py index 6dd635e69..b2cd3010d 100644 --- a/core/testcontainers/core/generic.py +++ b/core/testcontainers/core/generic.py @@ -29,6 +29,8 @@ class DbContainer(DockerContainer): """ + **DEPRECATED (for removal)** + Generic database container. """ diff --git a/core/testcontainers/core/image.py b/core/testcontainers/core/image.py new file mode 100644 index 000000000..6d793f83e --- /dev/null +++ b/core/testcontainers/core/image.py @@ -0,0 +1,98 @@ +from os import PathLike +from typing import TYPE_CHECKING, Optional, Union + +from typing_extensions import Self + +from testcontainers.core.docker_client import DockerClient +from testcontainers.core.utils import setup_logger + +if TYPE_CHECKING: + from docker.models.containers import Image + +logger = setup_logger(__name__) + + +class DockerImage: + """ + Basic image object to build Docker images. + + .. doctest:: + + >>> from testcontainers.core.image import DockerImage + + >>> with DockerImage(path="./core/tests/image_fixtures/sample/", tag="test-image") as image: + ... logs = image.get_logs() + + :param tag: Tag for the image to be built (default: None) + :param path: Path to the build context + :param dockerfile_path: Path to the Dockerfile within the build context path (default: Dockerfile) + :param no_cache: Bypass build cache; CLI's --no-cache + """ + + def __init__( + self, + path: Union[str, PathLike], + docker_client_kw: Optional[dict] = None, + tag: Optional[str] = None, + clean_up: bool = True, + dockerfile_path: Union[str, PathLike] = "Dockerfile", + no_cache: bool = False, + **kwargs, + ) -> None: + self.tag = tag + self.path = path + self._docker = DockerClient(**(docker_client_kw or {})) + self.clean_up = clean_up + self._kwargs = kwargs + self._image = None + self._logs = None + self._dockerfile_path = dockerfile_path + self._no_cache = no_cache + + def build(self, **kwargs) -> Self: + logger.info(f"Building image from {self.path}") + docker_client = self.get_docker_client() + self._image, self._logs = docker_client.build( + path=str(self.path), tag=self.tag, dockerfile=self._dockerfile_path, nocache=self._no_cache, **kwargs + ) + logger.info(f"Built image {self.short_id} with tag {self.tag}") + return self + + @property + def short_id(self) -> str: + """ + The ID of the image truncated to 12 characters, without the ``sha256:`` prefix. + """ + if self._image.id.startswith("sha256:"): + return self._image.id.split(":")[1][:12] + return self._image.id[:12] + + def remove(self, force=True, noprune=False) -> None: + """ + Remove the image. + + :param force: Remove the image even if it is in use + :param noprune: Do not delete untagged parent images + """ + if self._image and self.clean_up: + logger.info(f"Removing image {self.short_id}") + self._image.remove(force=force, noprune=noprune) + self.get_docker_client().client.close() + + def __str__(self) -> str: + return f"{self.tag if self.tag else self.short_id}" + + def __enter__(self) -> Self: + return self.build() + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.remove() + + def get_wrapped_image(self) -> "Image": + return self._image + + def get_docker_client(self) -> DockerClient: + return self._docker + + def get_logs(self) -> list[dict]: + return list(self._logs) diff --git a/core/testcontainers/core/version.py b/core/testcontainers/core/version.py new file mode 100644 index 000000000..cac51fc18 --- /dev/null +++ b/core/testcontainers/core/version.py @@ -0,0 +1,30 @@ +from typing import Callable + +from packaging.version import Version + + +class ComparableVersion: + def __init__(self, version): + self.version = Version(version) + + def __lt__(self, other: str): + return self._apply_op(other, lambda x, y: x < y) + + def __le__(self, other: str): + return self._apply_op(other, lambda x, y: x <= y) + + def __eq__(self, other: str): + return self._apply_op(other, lambda x, y: x == y) + + def __ne__(self, other: str): + return self._apply_op(other, lambda x, y: x != y) + + def __gt__(self, other: str): + return self._apply_op(other, lambda x, y: x > y) + + def __ge__(self, other: str): + return self._apply_op(other, lambda x, y: x >= y) + + def _apply_op(self, other: str, op: Callable[[Version, Version], bool]): + other = Version(other) + return op(self.version, other) diff --git a/core/tests/compose_fixtures/basic_multiple/docker-compose.yaml b/core/tests/compose_fixtures/basic_multiple/docker-compose.yaml new file mode 100644 index 000000000..38bd92b94 --- /dev/null +++ b/core/tests/compose_fixtures/basic_multiple/docker-compose.yaml @@ -0,0 +1,15 @@ +services: + alpine1: + image: alpine:latest + init: true + command: + - sh + - -c + - 'while true; do sleep 0.1 ; date -Ins; done' + alpine2: + image: alpine:latest + init: true + command: + - sh + - -c + - 'while true; do sleep 0.1 ; date -Ins; done' diff --git a/core/tests/conftest.py b/core/tests/conftest.py new file mode 100644 index 000000000..4f69565f4 --- /dev/null +++ b/core/tests/conftest.py @@ -0,0 +1,22 @@ +import pytest +from typing import Callable +from testcontainers.core.container import DockerClient + + +@pytest.fixture +def check_for_image() -> Callable[[str, bool], None]: + """Warp the check_for_image function in a fixture""" + + def _check_for_image(image_short_id: str, cleaned: bool) -> None: + """ + Validates if the image is present or not. + + :param image_short_id: The short id of the image + :param cleaned: True if the image should not be present, False otherwise + """ + client = DockerClient() + images = client.client.images.list() + found = any(image.short_id.endswith(image_short_id) for image in images) + assert found is not cleaned, f'Image {image_short_id} was {"found" if cleaned else "not found"}' + + return _check_for_image diff --git a/core/tests/Dockerfile b/core/tests/image_fixtures/busybox/Dockerfile similarity index 100% rename from core/tests/Dockerfile rename to core/tests/image_fixtures/busybox/Dockerfile diff --git a/core/tests/image_fixtures/sample/Dockerfile b/core/tests/image_fixtures/sample/Dockerfile new file mode 100644 index 000000000..d7d786035 --- /dev/null +++ b/core/tests/image_fixtures/sample/Dockerfile @@ -0,0 +1,2 @@ +FROM alpine:latest +CMD echo "Test Sample Image" diff --git a/core/tests/test_compose.py b/core/tests/test_compose.py index 0a244220b..e1a42655e 100644 --- a/core/tests/test_compose.py +++ b/core/tests/test_compose.py @@ -37,6 +37,55 @@ def test_compose_start_stop(): basic.stop() +def test_start_stop_multiple(): + """Start and stop multiple containers individually.""" + + # Create two DockerCompose instances from the same file, one service each. + dc_a = DockerCompose(context=FIXTURES / "basic_multiple", services=["alpine1"]) + dc_b = DockerCompose(context=FIXTURES / "basic_multiple", services=["alpine2"]) + + # After starting the first instance, alpine1 should be running + dc_a.start() + dc_a.get_container("alpine1") # Raises if it isn't running + dc_b.get_container("alpine1") # Raises if it isn't running + + # Both instances report the same number of containers + assert len(dc_a.get_containers()) == 1 + assert len(dc_b.get_containers()) == 1 + + # Although alpine1 is running, alpine2 has not started yet. + with pytest.raises(ContainerIsNotRunning): + dc_a.get_container("alpine2") + with pytest.raises(ContainerIsNotRunning): + dc_b.get_container("alpine2") + + # After starting the second instance, alpine2 should also be running + dc_b.start() + dc_a.get_container("alpine2") # No longer raises + dc_b.get_container("alpine2") # No longer raises + assert len(dc_a.get_containers()) == 2 + assert len(dc_b.get_containers()) == 2 + + # After stopping the first instance, alpine1 should no longer be running + dc_a.stop() + dc_a.get_container("alpine2") + dc_b.get_container("alpine2") + assert len(dc_a.get_containers()) == 1 + assert len(dc_b.get_containers()) == 1 + + # alpine1 no longer running + with pytest.raises(ContainerIsNotRunning): + dc_a.get_container("alpine1") + with pytest.raises(ContainerIsNotRunning): + dc_b.get_container("alpine1") + + # Stop the second instance + dc_b.stop() + + assert len(dc_a.get_containers()) == 0 + assert len(dc_b.get_containers()) == 0 + + def test_compose(): """stream-of-consciousness e2e test""" basic = DockerCompose(context=FIXTURES / "basic") diff --git a/core/tests/test_core.py b/core/tests/test_core.py index 4ebe90409..8d0c77944 100644 --- a/core/tests/test_core.py +++ b/core/tests/test_core.py @@ -1,6 +1,13 @@ import pytest +import tempfile +import random +import os + +from pathlib import Path +from typing import Optional from testcontainers.core.container import DockerContainer +from testcontainers.core.image import DockerImage from testcontainers.core.waiting_utils import wait_for_logs @@ -31,3 +38,57 @@ def test_can_get_logs(): assert isinstance(stdout, bytes) assert isinstance(stderr, bytes) assert stdout, "There should be something on stdout" + + +@pytest.mark.parametrize("test_cleanup", [True, False]) +@pytest.mark.parametrize("test_image_tag", [None, "test-image:latest"]) +def test_docker_image(test_image_tag: Optional[str], test_cleanup: bool, check_for_image): + with tempfile.TemporaryDirectory() as temp_directory: + # It's important to use a random string to avoid image caching + random_string = "Hello from Docker Image! " + str(random.randint(0, 1000)) + with open(f"{temp_directory}/Dockerfile", "w") as f: + f.write( + f""" + FROM alpine:latest + CMD echo "{random_string}" + """ + ) + with DockerImage(path=temp_directory, tag=test_image_tag, clean_up=test_cleanup) as image: + image_short_id = image.short_id + assert image.tag is test_image_tag, f"Expected {test_image_tag}, got {image.tag}" + assert image.short_id is not None, "Short ID should not be None" + logs = image.get_logs() + assert isinstance(logs, list), "Logs should be a list" + assert logs[0] == {"stream": "Step 1/2 : FROM alpine:latest"} + assert logs[3] == {"stream": f'Step 2/2 : CMD echo "{random_string}"'} + with DockerContainer(str(image)) as container: + assert container._container.image.short_id.endswith(image_short_id), "Image ID mismatch" + assert container.get_logs() == ((random_string + "\n").encode(), b""), "Container logs mismatch" + + check_for_image(image_short_id, test_cleanup) + + +@pytest.mark.parametrize("dockerfile_path", [None, Path("subdir/my.Dockerfile")]) +def test_docker_image_with_custom_dockerfile_path(dockerfile_path: Optional[Path]): + with tempfile.TemporaryDirectory() as temp_directory: + temp_dir_path = Path(temp_directory) + if dockerfile_path: + os.makedirs(temp_dir_path / dockerfile_path.parent, exist_ok=True) + dockerfile_rel_path = dockerfile_path + dockerfile_kwargs = {"dockerfile_path": dockerfile_path} + else: + dockerfile_rel_path = Path("Dockerfile") # default + dockerfile_kwargs = {} + + with open(temp_dir_path / dockerfile_rel_path, "x") as f: + f.write( + f""" + FROM alpine:latest + CMD echo "Hello world!" + """ + ) + with DockerImage(path=temp_directory, tag="test", clean_up=True, no_cache=True, **dockerfile_kwargs) as image: + image_short_id = image.short_id + with DockerContainer(str(image)) as container: + assert container._container.image.short_id.endswith(image_short_id), "Image ID mismatch" + assert container.get_logs() == (("Hello world!\n").encode(), b""), "Container logs mismatch" diff --git a/core/tests/test_docker_client.py b/core/tests/test_docker_client.py index cfd95be91..9234d3062 100644 --- a/core/tests/test_docker_client.py +++ b/core/tests/test_docker_client.py @@ -9,6 +9,7 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.docker_client import DockerClient from testcontainers.core.utils import parse_docker_auth_config +from testcontainers.core.image import DockerImage def test_docker_client_from_env(): @@ -54,3 +55,12 @@ def test_container_docker_client_kw(): DockerContainer(image="", docker_client_kw=test_kwargs) mock_docker.from_env.assert_called_with(**test_kwargs) + + +def test_image_docker_client_kw(): + test_kwargs = {"test_kw": "test_value"} + mock_docker = MagicMock(spec=docker) + with patch("testcontainers.core.docker_client.docker", mock_docker): + DockerImage(name="", path="", docker_client_kw=test_kwargs) + + mock_docker.from_env.assert_called_with(**test_kwargs) diff --git a/core/tests/test_registry.py b/core/tests/test_registry.py new file mode 100644 index 000000000..384b06693 --- /dev/null +++ b/core/tests/test_registry.py @@ -0,0 +1,83 @@ +"""Integration test using login to a private registry. + +Note: Using the testcontainers-python library to test the Docker registry. +This could be considered a bad practice as it is not recommended to use the same library to test itself. +However, it is a very good use case for DockerRegistryContainer and allows us to test it thoroughly. +""" + +import json +import os +import base64 +import pytest + +from docker.errors import NotFound + +from testcontainers.core.config import testcontainers_config +from testcontainers.core.container import DockerContainer +from testcontainers.core.docker_client import DockerClient +from testcontainers.core.waiting_utils import wait_container_is_ready + +from testcontainers.registry import DockerRegistryContainer + + +def test_missing_on_private_registry(monkeypatch): + username = "user" + password = "pass" + image = "hello-world" + tag = "test" + + with DockerRegistryContainer(username=username, password=password) as registry: + registry_url = registry.get_registry() + + # prepare auth config + creds: bytes = base64.b64encode(f"{username}:{password}".encode("utf-8")) + config = {"auths": {f"{registry_url}": {"auth": creds.decode("utf-8")}}} + monkeypatch.setattr(testcontainers_config, name="docker_auth_config", value=json.dumps(config)) + assert testcontainers_config.docker_auth_config, "docker_auth_config not set" + + with pytest.raises(NotFound): + # Test a container with image from private registry + with DockerContainer(f"{registry_url}/{image}:{tag}") as test_container: + wait_container_is_ready(test_container) + + +@pytest.mark.parametrize( + "image,tag,username,password", + [ + ("nginx", "test", "user", "pass"), + ("hello-world", "latest", "new_user", "new_pass"), + ("alpine", "3.12", None, None), + ], +) +def test_with_private_registry(image, tag, username, password, monkeypatch): + client = DockerClient().client + + with DockerRegistryContainer(username=username, password=password) as registry: + registry_url = registry.get_registry() + + # prepare image + _image = client.images.pull(image) + assert _image.tag(repository=f"{registry_url}/{image}", tag=tag), "Image not tagged" + + # login to private registry + client.login(registry=registry_url, username=username, password=password) + + # push image to private registry + client.images.push(f"{registry_url}/{image}") + + # clear local image so we will pull from private registry + client.images.remove(f"{registry_url}/{image}:{tag}") + + # prepare auth config + creds: bytes = base64.b64encode(f"{username}:{password}".encode("utf-8")) + config = {"auths": {f"{registry_url}": {"auth": creds.decode("utf-8")}}} + monkeypatch.setattr(testcontainers_config, name="docker_auth_config", value=json.dumps(config)) + assert testcontainers_config.docker_auth_config, "docker_auth_config not set" + + # Test a container with image from private registry + with DockerContainer(f"{registry_url}/{image}:{tag}") as test_container: + wait_container_is_ready(test_container) + + # cleanup + client.images.remove(f"{registry_url}/{image}:{tag}") + client.close() diff --git a/core/tests/test_version.py b/core/tests/test_version.py new file mode 100644 index 000000000..397cd0523 --- /dev/null +++ b/core/tests/test_version.py @@ -0,0 +1,78 @@ +import pytest +from packaging.version import InvalidVersion + +from testcontainers.core.version import ComparableVersion + + +@pytest.fixture +def version(): + return ComparableVersion("1.0.0") + + +@pytest.mark.parametrize("other_version, expected", [("0.9.0", False), ("1.0.0", False), ("1.1.0", True)]) +def test_lt(version, other_version, expected): + assert (version < other_version) == expected + + +@pytest.mark.parametrize("other_version, expected", [("0.9.0", False), ("1.0.0", True), ("1.1.0", True)]) +def test_le(version, other_version, expected): + assert (version <= other_version) == expected + + +@pytest.mark.parametrize("other_version, expected", [("0.9.0", False), ("1.0.0", True), ("1.1.0", False)]) +def test_eq(version, other_version, expected): + assert (version == other_version) == expected + + +@pytest.mark.parametrize("other_version, expected", [("0.9.0", True), ("1.0.0", False), ("1.1.0", True)]) +def test_ne(version, other_version, expected): + assert (version != other_version) == expected + + +@pytest.mark.parametrize("other_version, expected", [("0.9.0", True), ("1.0.0", False), ("1.1.0", False)]) +def test_gt(version, other_version, expected): + assert (version > other_version) == expected + + +@pytest.mark.parametrize("other_version, expected", [("0.9.0", True), ("1.0.0", True), ("1.1.0", False)]) +def test_ge(version, other_version, expected): + assert (version >= other_version) == expected + + +@pytest.mark.parametrize( + "invalid_version", + [ + "invalid", + "1..0", + ], +) +def test_invalid_version_raises_error(invalid_version): + with pytest.raises(InvalidVersion): + ComparableVersion(invalid_version) + + +@pytest.mark.parametrize( + "invalid_version", + [ + "invalid", + "1..0", + ], +) +def test_comparison_with_invalid_version_raises_error(version, invalid_version): + with pytest.raises(InvalidVersion): + assert version < invalid_version + + with pytest.raises(InvalidVersion): + assert version <= invalid_version + + with pytest.raises(InvalidVersion): + assert version == invalid_version + + with pytest.raises(InvalidVersion): + assert version != invalid_version + + with pytest.raises(InvalidVersion): + assert version > invalid_version + + with pytest.raises(InvalidVersion): + assert version >= invalid_version diff --git a/index.rst b/index.rst index 3c7fcc140..8c02832fe 100644 --- a/index.rst +++ b/index.rst @@ -15,36 +15,7 @@ testcontainers-python facilitates the use of Docker containers for functional an .. toctree:: core/README - modules/arangodb/README - modules/azurite/README - modules/cassandra/README - modules/chroma/README - modules/clickhouse/README - modules/elasticsearch/README - modules/google/README - modules/influxdb/README - modules/k3s/README - modules/kafka/README - modules/keycloak/README - modules/localstack/README - modules/memcached/README - modules/minio/README - modules/mongodb/README - modules/mssql/README - modules/mysql/README - modules/nats/README - modules/neo4j/README - modules/nginx/README - modules/opensearch/README - modules/oracle-free/README - modules/postgres/README - modules/qdrant/README - modules/rabbitmq/README - modules/redis/README - modules/registry/README - modules/selenium/README - modules/vault/README - modules/weaviate/README + modules/index Getting Started --------------- @@ -95,6 +66,17 @@ Version `4.0.0` onwards we do not support the `testcontainers-*` packages as it Instead packages can be installed by specifying `extras `__, e.g., :code:`pip install testcontainers[postgres]`. +Custom Containers +----------------- + +Crafting containers that are based on custom images is supported by the `core` module. Please check the `core documentation `_ for more information. + +This allows you to create containers from images that are not part of the modules provided by testcontainers-python. + +For common use cases, you can also use the generic containers provided by the `testcontainers-generic` module. Please check the `generic documentation `_ for more information. +(example: `ServerContainer` for running a FastAPI server) + + Docker in Docker (DinD) ----------------------- diff --git a/modules/cockroachdb/README.rst b/modules/cockroachdb/README.rst new file mode 100644 index 000000000..7b53fc336 --- /dev/null +++ b/modules/cockroachdb/README.rst @@ -0,0 +1,2 @@ +.. autoclass:: testcontainers.cockroachdb.CockroachDBContainer +.. title:: testcontainers.cockroachdb.CockroachDBContainer diff --git a/modules/cockroachdb/testcontainers/cockroachdb/__init__.py b/modules/cockroachdb/testcontainers/cockroachdb/__init__.py new file mode 100644 index 000000000..13a17ed5c --- /dev/null +++ b/modules/cockroachdb/testcontainers/cockroachdb/__init__.py @@ -0,0 +1,100 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +from os import environ +from typing import Optional +from urllib.error import HTTPError, URLError +from urllib.request import urlopen + +from testcontainers.core.generic import DbContainer +from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs + + +class CockroachDBContainer(DbContainer): + """ + CockroachDB database container. + + Example: + + The example will spin up a CockroachDB database to which you can connect with the credentials + passed in the constructor. Alternatively, you may use the :code:`get_connection_url()` + method which returns a sqlalchemy-compatible url in format + :code:`dialect+driver://username:password@host:port/database`. + + .. doctest:: + + >>> import sqlalchemy + >>> from testcontainers.cockroachdb import CockroachDBContainer + + >>> with CockroachDBContainer('cockroachdb/cockroach:v24.1.1') as crdb: + ... engine = sqlalchemy.create_engine(crdb.get_connection_url()) + ... with engine.begin() as connection: + ... result = connection.execute(sqlalchemy.text("select version()")) + ... version, = result.fetchone() + + """ + + COCKROACH_DB_PORT: int = 26257 + COCKROACH_API_PORT: int = 8080 + + def __init__( + self, + image: str = "cockroachdb/cockroach:v24.1.1", + username: Optional[str] = None, + password: Optional[str] = None, + dbname: Optional[str] = None, + dialect="cockroachdb+psycopg2", + **kwargs, + ) -> None: + super().__init__(image, **kwargs) + + self.with_exposed_ports(self.COCKROACH_DB_PORT, self.COCKROACH_API_PORT) + self.username = username or environ.get("COCKROACH_USER", "cockroach") + self.password = password or environ.get("COCKROACH_PASSWORD", "arthropod") + self.dbname = dbname or environ.get("COCKROACH_DATABASE", "roach") + self.dialect = dialect + + def _configure(self) -> None: + self.with_env("COCKROACH_DATABASE", self.dbname) + self.with_env("COCKROACH_USER", self.username) + self.with_env("COCKROACH_PASSWORD", self.password) + + cmd = "start-single-node" + if not self.password: + cmd += " --insecure" + self.with_command(cmd) + + @wait_container_is_ready(HTTPError, URLError) + def _connect(self) -> None: + host = self.get_container_host_ip() + url = f"http://{host}:{self.get_exposed_port(self.COCKROACH_API_PORT)}/health" + self._wait_for_health(url) + wait_for_logs(self, "finished creating default user*") + + @staticmethod + def _wait_for_health(url): + with urlopen(url) as response: + response.read() + + def get_connection_url(self) -> str: + conn_str = super()._create_connection_url( + dialect=self.dialect, + username=self.username, + password=self.password, + dbname=self.dbname, + port=self.COCKROACH_DB_PORT, + ) + + if self.password: + conn_str += "?sslmode=require" + + return conn_str diff --git a/modules/cockroachdb/tests/test_cockroachdb.py b/modules/cockroachdb/tests/test_cockroachdb.py new file mode 100644 index 000000000..af20fd580 --- /dev/null +++ b/modules/cockroachdb/tests/test_cockroachdb.py @@ -0,0 +1,14 @@ +import sqlalchemy + +from testcontainers.cockroachdb import CockroachDBContainer + + +def test_docker_run_mysql(): + config = CockroachDBContainer("cockroachdb/cockroach:v24.1.1") + with config as crdb: + engine = sqlalchemy.create_engine(crdb.get_connection_url()) + with engine.begin() as connection: + result = connection.execute(sqlalchemy.text("select version()")) + for row in result: + assert "CockroachDB" in row[0] + assert "v24.1.1" in row[0] diff --git a/modules/cosmosdb/README.rst b/modules/cosmosdb/README.rst new file mode 100644 index 000000000..802cffa4e --- /dev/null +++ b/modules/cosmosdb/README.rst @@ -0,0 +1,5 @@ +.. autoclass:: testcontainers.cosmosdb.CosmosDBMongoEndpointContainer +.. title:: testcontainers.cosmosdb.CosmosDBMongoEndpointContainer + +.. autoclass:: testcontainers.cosmosdb.CosmosDBNoSQLEndpointContainer +.. title:: testcontainers.cosmosdb.CosmosDBNoSQLEndpointContainer diff --git a/modules/cosmosdb/testcontainers/cosmosdb/__init__.py b/modules/cosmosdb/testcontainers/cosmosdb/__init__.py new file mode 100644 index 000000000..619ddb3b4 --- /dev/null +++ b/modules/cosmosdb/testcontainers/cosmosdb/__init__.py @@ -0,0 +1,4 @@ +from .mongodb import CosmosDBMongoEndpointContainer +from .nosql import CosmosDBNoSQLEndpointContainer + +__all__ = ["CosmosDBMongoEndpointContainer", "CosmosDBNoSQLEndpointContainer"] diff --git a/modules/cosmosdb/testcontainers/cosmosdb/_emulator.py b/modules/cosmosdb/testcontainers/cosmosdb/_emulator.py new file mode 100644 index 000000000..161a01c29 --- /dev/null +++ b/modules/cosmosdb/testcontainers/cosmosdb/_emulator.py @@ -0,0 +1,110 @@ +import os +import socket +import ssl +from collections.abc import Iterable +from distutils.util import strtobool +from urllib.error import HTTPError, URLError +from urllib.request import urlopen + +from typing_extensions import Self + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs + +from . import _grab as grab + +__all__ = ["CosmosDBEmulatorContainer"] + +EMULATOR_PORT = 8081 + + +class CosmosDBEmulatorContainer(DockerContainer): + """ + Abstract class for CosmosDB Emulator endpoints. + + Concrete implementations for each endpoint is provided by a separate class: + NoSQLEmulatorContainer and MongoDBEmulatorContainer. + """ + + def __init__( + self, + image: str = os.getenv( + "AZURE_COSMOS_EMULATOR_IMAGE", "mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator:latest" + ), + partition_count: int = os.getenv("AZURE_COSMOS_EMULATOR_PARTITION_COUNT", None), + enable_data_persistence: bool = strtobool(os.getenv("AZURE_COSMOS_EMULATOR_ENABLE_DATA_PERSISTENCE", "false")), + key: str = os.getenv( + "AZURE_COSMOS_EMULATOR_KEY", + "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==", + ), + bind_ports: bool = strtobool(os.getenv("AZURE_COSMOS_EMULATOR_BIND_PORTS", "true")), + endpoint_ports: Iterable[int] = [], + **other_kwargs, + ): + super().__init__(image=image, **other_kwargs) + self.endpoint_ports = endpoint_ports + self.partition_count = partition_count + self.key = key + self.enable_data_persistence = enable_data_persistence + self.bind_ports = bind_ports + + @property + def host(self) -> str: + """ + Emulator host + """ + return self.get_container_host_ip() + + @property + def server_certificate_pem(self) -> bytes: + """ + PEM-encoded server certificate + """ + return self._cert_pem_bytes + + def start(self) -> Self: + self._configure() + super().start() + self._wait_until_ready() + self._cert_pem_bytes = self._download_cert() + return self + + def _configure(self) -> None: + all_ports = {EMULATOR_PORT, *self.endpoint_ports} + if self.bind_ports: + for port in all_ports: + self.with_bind_ports(port, port) + else: + self.with_exposed_ports(*all_ports) + + ( + self.with_env("AZURE_COSMOS_EMULATOR_PARTITION_COUNT", str(self.partition_count)) + .with_env("AZURE_COSMOS_EMULATOR_IP_ADDRESS_OVERRIDE", socket.gethostbyname(socket.gethostname())) + .with_env("AZURE_COSMOS_EMULATOR_ENABLE_DATA_PERSISTENCE", str(self.enable_data_persistence)) + .with_env("AZURE_COSMOS_EMULATOR_KEY", str(self.key)) + ) + + def _wait_until_ready(self) -> Self: + wait_for_logs(container=self, predicate="Started\\s*$") + + if self.bind_ports: + self._wait_for_url(f"https://{self.host}:{EMULATOR_PORT}/_explorer/index.html") + self._wait_for_query_success() + + return self + + def _download_cert(self) -> bytes: + with grab.file( + self.get_wrapped_container(), + "/tmp/cosmos/appdata/.system/profiles/Client/AppData/Local/CosmosDBEmulator/emulator.pem", + ) as cert: + return cert.read() + + @wait_container_is_ready(HTTPError, URLError) + def _wait_for_url(self, url: str) -> Self: + with urlopen(url, context=ssl._create_unverified_context()) as response: + response.read() + return self + + def _wait_for_query_success(self) -> None: + pass diff --git a/modules/cosmosdb/testcontainers/cosmosdb/_grab.py b/modules/cosmosdb/testcontainers/cosmosdb/_grab.py new file mode 100644 index 000000000..e1895019a --- /dev/null +++ b/modules/cosmosdb/testcontainers/cosmosdb/_grab.py @@ -0,0 +1,26 @@ +import tarfile +import tempfile +from contextlib import contextmanager +from os import path +from pathlib import Path + +from docker.models.containers import Container + + +@contextmanager +def file(container: Container, target: str): + target_path = Path(target) + assert target_path.is_absolute(), "target must be an absolute path" + + with tempfile.TemporaryDirectory() as tmp: + archive = Path(tmp) / "grabbed.tar" + + # download from container as tar archive + with open(archive, "wb") as f: + tar_bits, _ = container.get_archive(target) + for chunk in tar_bits: + f.write(chunk) + + # extract target file from tar archive + with tarfile.TarFile(archive) as tar: + yield tar.extractfile(path.basename(target)) diff --git a/modules/cosmosdb/testcontainers/cosmosdb/mongodb.py b/modules/cosmosdb/testcontainers/cosmosdb/mongodb.py new file mode 100644 index 000000000..82e8c096b --- /dev/null +++ b/modules/cosmosdb/testcontainers/cosmosdb/mongodb.py @@ -0,0 +1,47 @@ +import os + +from ._emulator import CosmosDBEmulatorContainer + +__all__ = ["CosmosDBMongoEndpointContainer"] + +ENDPOINT_PORT = 10255 + + +class CosmosDBMongoEndpointContainer(CosmosDBEmulatorContainer): + """ + CosmosDB MongoDB enpoint Emulator. + + Example: + + .. code-block:: python + + >>> from testcontainers.cosmosdb import CosmosDBMongoEndpointContainer + + >>> with CosmosDBMongoEndpointContainer(mongodb_version="4.0") as emulator: + ... print(f"Point your MongoDB client at {emulator.host}:{emulator.port} using key {emulator.key}") + ... print(f"and eiher disable TLS server auth or trust the server's self signed cert (emulator.server_certificate_pem)") + + """ + + def __init__( + self, + mongodb_version: str, + image: str = os.getenv( + "AZURE_COSMOS_EMULATOR_IMAGE", "mcr.microsoft.com/cosmosdb/linux/azure-cosmos-emulator:mongodb" + ), + **other_kwargs, + ): + super().__init__(image=image, endpoint_ports=[ENDPOINT_PORT], **other_kwargs) + assert mongodb_version is not None, "A MongoDB version is required to use the MongoDB Endpoint" + self.mongodb_version = mongodb_version + + @property + def port(self) -> str: + """ + The exposed port to the MongoDB endpoint + """ + return self.get_exposed_port(ENDPOINT_PORT) + + def _configure(self) -> None: + super()._configure() + self.with_env("AZURE_COSMOS_EMULATOR_ENABLE_MONGODB_ENDPOINT", self.mongodb_version) diff --git a/modules/cosmosdb/testcontainers/cosmosdb/nosql.py b/modules/cosmosdb/testcontainers/cosmosdb/nosql.py new file mode 100644 index 000000000..f78469674 --- /dev/null +++ b/modules/cosmosdb/testcontainers/cosmosdb/nosql.py @@ -0,0 +1,69 @@ +from azure.core.exceptions import ServiceRequestError +from azure.cosmos import CosmosClient as SyncCosmosClient +from azure.cosmos.aio import CosmosClient as AsyncCosmosClient + +from testcontainers.core.waiting_utils import wait_container_is_ready + +from ._emulator import CosmosDBEmulatorContainer + +__all__ = ["CosmosDBNoSQLEndpointContainer"] + +NOSQL_PORT = 8081 + + +class CosmosDBNoSQLEndpointContainer(CosmosDBEmulatorContainer): + """ + CosmosDB NoSQL enpoint Emulator. + + Example: + + .. code-block:: python + + >>> from testcontainers.cosmosdb import CosmosDBNoSQLEndpointContainer + >>> with CosmosDBNoSQLEndpointContainer() as emulator: + ... db = emulator.insecure_sync_client().create_database_if_not_exists("test") + + .. code-block:: python + + >>> from testcontainers.cosmosdb import CosmosDBNoSQLEndpointContainer + >>> from azure.cosmos import CosmosClient + + >>> with CosmosDBNoSQLEndpointContainer() as emulator: + ... client = CosmosClient(url=emulator.url, credential=emulator.key, connection_verify=False) + ... db = client.create_database_if_not_exists("test") + + """ + + def __init__(self, **kwargs): + super().__init__(endpoint_ports=[NOSQL_PORT], **kwargs) + + @property + def port(self) -> str: + """ + The exposed port to the NoSQL endpoint + """ + return self.get_exposed_port(NOSQL_PORT) + + @property + def url(self) -> str: + """ + The url to the NoSQL endpoint + """ + return f"https://{self.host}:{self.port}" + + def insecure_async_client(self): + """ + Returns an asynchronous CosmosClient instance + """ + return AsyncCosmosClient(url=self.url, credential=self.key, connection_verify=False) + + def insecure_sync_client(self): + """ + Returns a synchronous CosmosClient instance + """ + return SyncCosmosClient(url=self.url, credential=self.key, connection_verify=False) + + @wait_container_is_ready(ServiceRequestError) + def _wait_for_query_success(self) -> None: + with self.insecure_sync_client() as c: + list(c.list_databases()) diff --git a/modules/cosmosdb/tests/test_emulator.py b/modules/cosmosdb/tests/test_emulator.py new file mode 100644 index 000000000..542ddd11c --- /dev/null +++ b/modules/cosmosdb/tests/test_emulator.py @@ -0,0 +1,8 @@ +import pytest +from testcontainers.cosmosdb._emulator import CosmosDBEmulatorContainer + + +def test_runs(): + with CosmosDBEmulatorContainer(partition_count=1, bind_ports=False) as emulator: + assert emulator.server_certificate_pem is not None + assert emulator.get_exposed_port(8081) is not None diff --git a/modules/cosmosdb/tests/test_mongodb.py b/modules/cosmosdb/tests/test_mongodb.py new file mode 100644 index 000000000..a50ee82ea --- /dev/null +++ b/modules/cosmosdb/tests/test_mongodb.py @@ -0,0 +1,16 @@ +import pytest +from testcontainers.cosmosdb import CosmosDBMongoEndpointContainer + + +def test_requires_a_version(): + with pytest.raises(AssertionError, match="A MongoDB version is required"): + CosmosDBMongoEndpointContainer(mongodb_version=None) + + # instanciates + CosmosDBMongoEndpointContainer(mongodb_version="4.0") + + +def test_runs(): + with CosmosDBMongoEndpointContainer(mongodb_version="4.0", partition_count=1, bind_ports=False) as emulator: + assert emulator.env["AZURE_COSMOS_EMULATOR_ENABLE_MONGODB_ENDPOINT"] == "4.0" + assert emulator.get_exposed_port(10255) is not None, "The MongoDB endpoint's port should be exposed" diff --git a/modules/cosmosdb/tests/test_nosql.py b/modules/cosmosdb/tests/test_nosql.py new file mode 100644 index 000000000..a9460a1b0 --- /dev/null +++ b/modules/cosmosdb/tests/test_nosql.py @@ -0,0 +1,7 @@ +import pytest +from testcontainers.cosmosdb import CosmosDBNoSQLEndpointContainer + + +def test_runs(): + with CosmosDBNoSQLEndpointContainer(partition_count=1, bind_ports=False) as emulator: + assert emulator.get_exposed_port(8081) is not None, "The NoSQL endpoint's port should be exposed" diff --git a/modules/generic/README.rst b/modules/generic/README.rst new file mode 100644 index 000000000..7e12da700 --- /dev/null +++ b/modules/generic/README.rst @@ -0,0 +1,20 @@ +:code:`testcontainers-generic` is a set of generic containers modules that can be used to creat containers. + +.. autoclass:: testcontainers.generic.ServerContainer +.. title:: testcontainers.generic.ServerContainer + +FastAPI container that is using :code:`ServerContainer` + +.. doctest:: + + >>> from testcontainers.generic import ServerContainer + >>> from testcontainers.core.waiting_utils import wait_for_logs + + >>> with DockerImage(path="./modules/generic/tests/samples/fastapi", tag="fastapi-test:latest") as image: + ... with ServerContainer(port=80, image=image) as fastapi_server: + ... delay = wait_for_logs(fastapi_server, "Uvicorn running on http://0.0.0.0:80") + ... fastapi_server.get_api_url = lambda: fastapi_server._create_connection_url() + "/api/v1/" + ... client = fastapi_server.get_client() + ... response = client.get("/") + ... assert response.status_code == 200 + ... assert response.json() == {"Status": "Working"} diff --git a/modules/generic/testcontainers/generic/__init__.py b/modules/generic/testcontainers/generic/__init__.py new file mode 100644 index 000000000..f239a80c6 --- /dev/null +++ b/modules/generic/testcontainers/generic/__init__.py @@ -0,0 +1 @@ +from .server import ServerContainer # noqa: F401 diff --git a/modules/generic/testcontainers/generic/server.py b/modules/generic/testcontainers/generic/server.py new file mode 100644 index 000000000..03a546772 --- /dev/null +++ b/modules/generic/testcontainers/generic/server.py @@ -0,0 +1,80 @@ +from typing import Union +from urllib.error import HTTPError +from urllib.request import urlopen + +import httpx + +from testcontainers.core.container import DockerContainer +from testcontainers.core.exceptions import ContainerStartException +from testcontainers.core.image import DockerImage +from testcontainers.core.waiting_utils import wait_container_is_ready + + +class ServerContainer(DockerContainer): + """ + Container for a generic server that is based on a custom image. + + Example: + + .. doctest:: + + >>> import httpx + >>> from testcontainers.generic import ServerContainer + >>> from testcontainers.core.waiting_utils import wait_for_logs + >>> from testcontainers.core.image import DockerImage + + >>> with DockerImage(path="./modules/generic/tests/samples/python_server", tag="test-srv:latest") as image: + ... with ServerContainer(port=9000, image=image) as srv: + ... url = srv._create_connection_url() + ... response = httpx.get(f"{url}", timeout=5) + ... assert response.status_code == 200, "Response status code is not 200" + ... delay = wait_for_logs(srv, "GET / HTTP/1.1") + + + :param path: Path to the Dockerfile to build the image + :param tag: Tag for the image to be built (default: None) + """ + + def __init__(self, port: int, image: Union[str, DockerImage]) -> None: + super().__init__(str(image)) + self.internal_port = port + self.with_exposed_ports(self.internal_port) + + @wait_container_is_ready(HTTPError) + def _connect(self) -> None: + # noinspection HttpUrlsUsage + url = self._create_connection_url() + try: + with urlopen(url) as r: + assert b"" in r.read() + except HTTPError as e: + # 404 is expected, as the server may not have the specific endpoint we are looking for + if e.code == 404: + pass + else: + raise + + def get_api_url(self) -> str: + raise NotImplementedError + + def _create_connection_url(self) -> str: + if self._container is None: + raise ContainerStartException("container has not been started") + host = self.get_container_host_ip() + exposed_port = self.get_exposed_port(self.internal_port) + url = f"http://{host}:{exposed_port}" + return url + + def start(self) -> "ServerContainer": + super().start() + self._connect() + return self + + def stop(self, force=True, delete_volume=True) -> None: + super().stop(force, delete_volume) + + def get_client(self) -> httpx.Client: + return httpx.Client(base_url=self.get_api_url()) + + def get_stdout(self) -> str: + return self.get_logs()[0].decode("utf-8") diff --git a/modules/generic/tests/conftest.py b/modules/generic/tests/conftest.py new file mode 100644 index 000000000..4f69565f4 --- /dev/null +++ b/modules/generic/tests/conftest.py @@ -0,0 +1,22 @@ +import pytest +from typing import Callable +from testcontainers.core.container import DockerClient + + +@pytest.fixture +def check_for_image() -> Callable[[str, bool], None]: + """Warp the check_for_image function in a fixture""" + + def _check_for_image(image_short_id: str, cleaned: bool) -> None: + """ + Validates if the image is present or not. + + :param image_short_id: The short id of the image + :param cleaned: True if the image should not be present, False otherwise + """ + client = DockerClient() + images = client.client.images.list() + found = any(image.short_id.endswith(image_short_id) for image in images) + assert found is not cleaned, f'Image {image_short_id} was {"found" if cleaned else "not found"}' + + return _check_for_image diff --git a/modules/generic/tests/samples/fastapi/Dockerfile b/modules/generic/tests/samples/fastapi/Dockerfile new file mode 100644 index 000000000..f56288cd5 --- /dev/null +++ b/modules/generic/tests/samples/fastapi/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.9 + +WORKDIR /app + +RUN pip install fastapi + +COPY ./app /app + +EXPOSE 80 + +CMD ["fastapi", "run", "main.py", "--port", "80"] diff --git a/modules/generic/tests/samples/fastapi/app/__init__.py b/modules/generic/tests/samples/fastapi/app/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/modules/generic/tests/samples/fastapi/app/main.py b/modules/generic/tests/samples/fastapi/app/main.py new file mode 100644 index 000000000..f96073d9f --- /dev/null +++ b/modules/generic/tests/samples/fastapi/app/main.py @@ -0,0 +1,8 @@ +from fastapi import FastAPI + +app = FastAPI() + + +@app.get("/api/v1/") +def read_root(): + return {"Status": "Working"} diff --git a/modules/generic/tests/samples/python_server/Dockerfile b/modules/generic/tests/samples/python_server/Dockerfile new file mode 100644 index 000000000..844acf2b3 --- /dev/null +++ b/modules/generic/tests/samples/python_server/Dockerfile @@ -0,0 +1,3 @@ +FROM python:3-alpine +EXPOSE 9000 +CMD ["python", "-m", "http.server", "9000"] diff --git a/modules/generic/tests/test_generic.py b/modules/generic/tests/test_generic.py new file mode 100644 index 000000000..5943b4a4d --- /dev/null +++ b/modules/generic/tests/test_generic.py @@ -0,0 +1,53 @@ +import re +from pathlib import Path +from typing import Optional + +import pytest +from httpx import get + +from testcontainers.core.waiting_utils import wait_for_logs +from testcontainers.core.image import DockerImage +from testcontainers.generic import ServerContainer + +TEST_DIR = Path(__file__).parent + + +@pytest.mark.parametrize("test_image_cleanup", [True, False]) +@pytest.mark.parametrize("test_image_tag", [None, "custom-image:test"]) +def test_server_container(test_image_tag: Optional[str], test_image_cleanup: bool, check_for_image, port=9000): + with ( + DockerImage( + path=TEST_DIR / "samples/python_server", + tag=test_image_tag, + clean_up=test_image_cleanup, + # + ) as docker_image, + ServerContainer(port=port, image=docker_image) as srv, + ): + image_short_id = docker_image.short_id + image_build_logs = docker_image.get_logs() + # check if dict is in any of the logs + assert {"stream": f"Step 2/3 : EXPOSE {port}"} in image_build_logs, "Image logs mismatch" + assert (port, None) in srv.ports.items(), "Port mismatch" + with pytest.raises(NotImplementedError): + srv.get_api_url() + test_url = srv._create_connection_url() + assert re.match(r"http://localhost:\d+", test_url), "Connection URL mismatch" + + check_for_image(image_short_id, test_image_cleanup) + + +def test_server_container_no_port(): + with pytest.raises(TypeError): + with ServerContainer(path="./modules/generic/tests/samples/python_server", tag="test-srv:latest"): + pass + + +def test_like_doctest(): + with DockerImage(path=TEST_DIR / "samples/python_server", tag="test-srv:latest") as image: + with ServerContainer(port=9000, image=image) as srv: + url = srv._create_connection_url() + response = get(f"{url}", timeout=5) + assert response.status_code == 200, "Response status code is not 200" + delay = wait_for_logs(srv, "GET / HTTP/1.1") + print(delay) diff --git a/modules/index.rst b/modules/index.rst new file mode 100644 index 000000000..d2a67a3d4 --- /dev/null +++ b/modules/index.rst @@ -0,0 +1,11 @@ +Community Modules +================= + +.. + glob: + https://stackoverflow.com/a/44572883/4971476 + +.. toctree:: + :glob: + + */README diff --git a/modules/k3s/testcontainers/k3s/__init__.py b/modules/k3s/testcontainers/k3s/__init__.py index 2682df356..6e5354175 100644 --- a/modules/k3s/testcontainers/k3s/__init__.py +++ b/modules/k3s/testcontainers/k3s/__init__.py @@ -11,6 +11,8 @@ # License for the specific language governing permissions and limitations # under the License. +import logging + from testcontainers.core.config import testcontainers_config from testcontainers.core.container import DockerContainer from testcontainers.core.waiting_utils import wait_for_logs @@ -37,13 +39,16 @@ class K3SContainer(DockerContainer): KUBE_SECURE_PORT = 6443 RANCHER_WEBHOOK_PORT = 8443 - def __init__(self, image="rancher/k3s:latest", **kwargs) -> None: + def __init__(self, image="rancher/k3s:latest", enable_cgroup_mount=True, **kwargs) -> None: super().__init__(image, **kwargs) self.with_exposed_ports(self.KUBE_SECURE_PORT, self.RANCHER_WEBHOOK_PORT) self.with_env("K3S_URL", f"https://{self.get_container_host_ip()}:{self.KUBE_SECURE_PORT}") self.with_command("server --disable traefik --tls-san=" + self.get_container_host_ip()) self.with_kwargs(privileged=True, tmpfs={"/run": "", "/var/run": ""}) - self.with_volume_mapping("/sys/fs/cgroup", "/sys/fs/cgroup", "rw") + if enable_cgroup_mount: + self.with_volume_mapping("/sys/fs/cgroup", "/sys/fs/cgroup", "rw") + else: + logging.warning("'enable_cgroup_mount' is experimental, see testcontainers/testcontainers-python#591)") def _connect(self) -> None: wait_for_logs(self, predicate="Node controller sync successful", timeout=testcontainers_config.timeout) diff --git a/modules/kafka/testcontainers/kafka/__init__.py b/modules/kafka/testcontainers/kafka/__init__.py index 7dd71b633..ea837be37 100644 --- a/modules/kafka/testcontainers/kafka/__init__.py +++ b/modules/kafka/testcontainers/kafka/__init__.py @@ -3,8 +3,11 @@ from io import BytesIO from textwrap import dedent +from typing_extensions import Self + from testcontainers.core.container import DockerContainer from testcontainers.core.utils import raise_for_deprecated_parameter +from testcontainers.core.version import ComparableVersion from testcontainers.core.waiting_utils import wait_for_logs from testcontainers.kafka._redpanda import RedpandaContainer @@ -26,18 +29,29 @@ class KafkaContainer(DockerContainer): >>> with KafkaContainer() as kafka: ... connection = kafka.get_bootstrap_server() + + # Using KRaft protocol + >>> with KafkaContainer().with_kraft() as kafka: + ... connection = kafka.get_bootstrap_server() """ TC_START_SCRIPT = "/tc-start.sh" + MIN_KRAFT_TAG = "7.0.0" def __init__(self, image: str = "confluentinc/cp-kafka:7.6.0", port: int = 9093, **kwargs) -> None: raise_for_deprecated_parameter(kwargs, "port_to_expose", "port") super().__init__(image, **kwargs) self.port = port + self.kraft_enabled = False + self.wait_for = r".*\[KafkaServer id=\d+\] started.*" + self.boot_command = "" + self.cluster_id = "MkU3OEVBNTcwNTJENDM2Qk" + self.listeners = f"PLAINTEXT://0.0.0.0:{self.port},BROKER://0.0.0.0:9092" + self.security_protocol_map = "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT" + self.with_exposed_ports(self.port) - listeners = f"PLAINTEXT://0.0.0.0:{self.port},BROKER://0.0.0.0:9092" - self.with_env("KAFKA_LISTENERS", listeners) - self.with_env("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "BROKER:PLAINTEXT,PLAINTEXT:PLAINTEXT") + self.with_env("KAFKA_LISTENERS", self.listeners) + self.with_env("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", self.security_protocol_map) self.with_env("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER") self.with_env("KAFKA_BROKER_ID", "1") @@ -46,6 +60,74 @@ def __init__(self, image: str = "confluentinc/cp-kafka:7.6.0", port: int = 9093, self.with_env("KAFKA_LOG_FLUSH_INTERVAL_MESSAGES", "10000000") self.with_env("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0") + def with_kraft(self) -> Self: + self._verify_min_kraft_version() + self.kraft_enabled = True + return self + + def _verify_min_kraft_version(self): + actual_version = self.image.split(":")[-1] + + if ComparableVersion(actual_version) < self.MIN_KRAFT_TAG: + raise ValueError( + f"Provided Confluent Platform's version {actual_version} " + f"is not supported in Kraft mode" + f" (must be {self.MIN_KRAFT_TAG} or above)" + ) + + def with_cluster_id(self, cluster_id: str) -> Self: + self.cluster_id = cluster_id + return self + + def configure(self): + if self.kraft_enabled: + self._configure_kraft() + else: + self._configure_zookeeper() + + def _configure_kraft(self) -> None: + self.wait_for = r".*Kafka Server started.*" + + self.with_env("CLUSTER_ID", self.cluster_id) + self.with_env("KAFKA_NODE_ID", 1) + self.with_env( + "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", + f"{self.security_protocol_map},CONTROLLER:PLAINTEXT", + ) + self.with_env( + "KAFKA_LISTENERS", + f"{self.listeners},CONTROLLER://0.0.0.0:9094", + ) + self.with_env("KAFKA_PROCESS_ROLES", "broker,controller") + + network_alias = self._get_network_alias() + controller_quorum_voters = f"1@{network_alias}:9094" + self.with_env("KAFKA_CONTROLLER_QUORUM_VOTERS", controller_quorum_voters) + self.with_env("KAFKA_CONTROLLER_LISTENER_NAMES", "CONTROLLER") + + self.boot_command = f""" + sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure + echo 'kafka-storage format --ignore-formatted -t {self.cluster_id} -c /etc/kafka/kafka.properties' >> /etc/confluent/docker/configure + """ + + def _get_network_alias(self): + if self._network: + return next( + iter(self._network_aliases or [self._network.name or self._kwargs.get("network", [])]), + None, + ) + + return "localhost" + + def _configure_zookeeper(self) -> None: + self.boot_command = """ + echo 'clientPort=2181' > zookeeper.properties + echo 'dataDir=/var/lib/zookeeper/data' >> zookeeper.properties + echo 'dataLogDir=/var/lib/zookeeper/log' >> zookeeper.properties + zookeeper-server-start zookeeper.properties & + export KAFKA_ZOOKEEPER_CONNECT='localhost:2181' + """ + def get_bootstrap_server(self) -> str: host = self.get_container_host_ip() port = self.get_exposed_port(self.port) @@ -59,11 +141,7 @@ def tc_start(self) -> None: dedent( f""" #!/bin/bash - echo 'clientPort=2181' > zookeeper.properties - echo 'dataDir=/var/lib/zookeeper/data' >> zookeeper.properties - echo 'dataLogDir=/var/lib/zookeeper/log' >> zookeeper.properties - zookeeper-server-start zookeeper.properties & - export KAFKA_ZOOKEEPER_CONNECT='localhost:2181' + {self.boot_command} export KAFKA_ADVERTISED_LISTENERS={listeners} . /etc/confluent/docker/bash-config /etc/confluent/docker/configure @@ -78,10 +156,11 @@ def tc_start(self) -> None: def start(self, timeout=30) -> "KafkaContainer": script = KafkaContainer.TC_START_SCRIPT command = f'sh -c "while [ ! -f {script} ]; do sleep 0.1; done; sh {script}"' + self.configure() self.with_command(command) super().start() self.tc_start() - wait_for_logs(self, r".*\[KafkaServer id=\d+\] started.*", timeout=timeout) + wait_for_logs(self, self.wait_for, timeout=timeout) return self def create_file(self, content: bytes, path: str) -> None: diff --git a/modules/kafka/tests/test_kafka.py b/modules/kafka/tests/test_kafka.py index 1f3826adf..eb1a48127 100644 --- a/modules/kafka/tests/test_kafka.py +++ b/modules/kafka/tests/test_kafka.py @@ -8,6 +8,12 @@ def test_kafka_producer_consumer(): produce_and_consume_kafka_message(container) +def test_kafka_with_kraft_producer_consumer(): + with KafkaContainer().with_kraft() as container: + assert container.kraft_enabled + produce_and_consume_kafka_message(container) + + def test_kafka_producer_consumer_custom_port(): with KafkaContainer(port=9888) as container: assert container.port == 9888 diff --git a/modules/keycloak/testcontainers/keycloak/__init__.py b/modules/keycloak/testcontainers/keycloak/__init__.py index 27b6b20d1..6addf09ab 100644 --- a/modules/keycloak/testcontainers/keycloak/__init__.py +++ b/modules/keycloak/testcontainers/keycloak/__init__.py @@ -80,7 +80,6 @@ def _readiness_probe(self) -> None: wait_for_logs(self, "Added user .* to realm .*") def start(self) -> "KeycloakContainer": - self._configure() super().start() self._readiness_probe() return self diff --git a/modules/milvus/README.rst b/modules/milvus/README.rst new file mode 100644 index 000000000..f823d7fe9 --- /dev/null +++ b/modules/milvus/README.rst @@ -0,0 +1,2 @@ +.. autoclass:: testcontainers.milvus.MilvusContainer +.. title:: testcontainers.milvus.MilvusContainer diff --git a/modules/milvus/testcontainers/milvus/__init__.py b/modules/milvus/testcontainers/milvus/__init__.py new file mode 100644 index 000000000..39a1403e9 --- /dev/null +++ b/modules/milvus/testcontainers/milvus/__init__.py @@ -0,0 +1,85 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +import requests + +from testcontainers.core.config import testcontainers_config as c +from testcontainers.core.generic import DockerContainer +from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs + + +class MilvusContainer(DockerContainer): + """ + Milvus database container. + + Read mode about Milvus: https://milvus.io/docs + + Example: + + The example spins up a Milvus database and connects to it client using MilvisClient. + + .. doctest:: + + >>> from testcontainers.milvus import MilvusContainer + >>> with MilvusContainer("milvusdb/milvus:v2.4.4") as milvus_container: + ... milvus_container.get_exposed_port(milvus_container.port) in milvus_container.get_connection_url() + True + """ + + def __init__( + self, + image: str = "milvusdb/milvus:latest", + port: int = 19530, + **kwargs, + ) -> None: + super().__init__(image=image, **kwargs) + self.port = port + self.healthcheck_port = 9091 + self.with_exposed_ports(self.port, self.healthcheck_port) + self.cmd = "milvus run standalone" + + envs = {"ETCD_USE_EMBED": "true", "ETCD_DATA_DIR": "/var/lib/milvus/etcd", "COMMON_STORAGETYPE": "local"} + + for env, value in envs.items(): + self.with_env(env, value) + + def get_connection_url(self) -> str: + ip = self.get_container_host_ip() + port = self.get_exposed_port(self.port) + return f"http://{ip}:{port}" + + @wait_container_is_ready() + def _connect(self) -> None: + msg = "Welcome to use Milvus!" + wait_for_logs(self, f".*{msg}.*", c.max_tries, c.sleep_time) + self._healthcheck() + + def _get_healthcheck_url(self) -> str: + ip = self.get_container_host_ip() + port = self.get_exposed_port(self.healthcheck_port) + return f"http://{ip}:{port}" + + @wait_container_is_ready(requests.exceptions.HTTPError) + def _healthcheck(self) -> None: + healthcheck_url = self._get_healthcheck_url() + response = requests.get(f"{healthcheck_url}/healthz", timeout=1) + response.raise_for_status() + + def start(self) -> "MilvusContainer": + """This method starts the Milvus container and runs the healthcheck + to verify that the container is ready to use.""" + self.with_command(self.cmd) + super().start() + self._connect() + self._healthcheck() + return self diff --git a/modules/milvus/tests/test_milvus.py b/modules/milvus/tests/test_milvus.py new file mode 100644 index 000000000..12887a49b --- /dev/null +++ b/modules/milvus/tests/test_milvus.py @@ -0,0 +1,39 @@ +import pytest +from pymilvus import MilvusClient + +from testcontainers.milvus import MilvusContainer + +VERSIONS = ["v2.4.0", "v2.4.4"] + + +class ClientMilvusContainer(MilvusContainer): + def get_client(self, *, dbname: str = "default", token: str = "root:Milvus") -> MilvusClient: + connection_url = self.get_connection_url() + client = MilvusClient(uri=connection_url, dbname=dbname, token=token) + return client + + +@pytest.mark.parametrize("version", VERSIONS) +def test_run_milvus_success(version: str): + image = f"milvusdb/milvus:{version}" + + with MilvusContainer(image=image) as milvus_container: + exposed_port = milvus_container.get_exposed_port(milvus_container.port) + url = milvus_container.get_connection_url() + + assert url and exposed_port in url + + +@pytest.mark.parametrize("version", VERSIONS) +def test_milvus_client_success(version: str): + image = f"milvusdb/milvus:{version}" + test_collection = "test_collection" + + with ClientMilvusContainer(image=image) as milvus_container: + client = milvus_container.get_client() + client.create_collection(test_collection, dimension=2) + collections = client.list_collections() + assert test_collection in collections + + client.drop_collection(test_collection) + assert not client.has_collection(test_collection) diff --git a/modules/mqtt/README.rst b/modules/mqtt/README.rst new file mode 100644 index 000000000..2e088cbbb --- /dev/null +++ b/modules/mqtt/README.rst @@ -0,0 +1,2 @@ +.. autoclass:: testcontainers.mqtt.MosquittoContainer +.. title:: testcontainers.mqtt.MosquittoContainer diff --git a/modules/mqtt/testcontainers/mqtt/__init__.py b/modules/mqtt/testcontainers/mqtt/__init__.py new file mode 100644 index 000000000..1382762ae --- /dev/null +++ b/modules/mqtt/testcontainers/mqtt/__init__.py @@ -0,0 +1,155 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from pathlib import Path +from typing import TYPE_CHECKING, Optional + +from typing_extensions import Self + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs + +if TYPE_CHECKING: + from paho.mqtt.client import Client + from paho.mqtt.enums import MQTTErrorCode + + +class MosquittoContainer(DockerContainer): + """ + Specialization of DockerContainer for MQTT broker Mosquitto. + Example: + + .. doctest:: + + >>> from testcontainers.mqtt import MosquittoContainer + + >>> with MosquittoContainer() as mosquitto_broker: + ... mqtt_client = mosquitto_broker.get_client() + """ + + TESTCONTAINERS_CLIENT_ID = "TESTCONTAINERS-CLIENT" + MQTT_PORT = 1883 + CONFIG_FILE = "testcontainers-mosquitto-default-configuration.conf" + + def __init__( + self, + image: str = "eclipse-mosquitto:latest", + # password: Optional[str] = None, + **kwargs, + ) -> None: + super().__init__(image, **kwargs) + # self.password = password + # reusable client context: + self.client: Optional["Client"] = None + + @wait_container_is_ready() + def get_client(self) -> "Client": + """ + Creates and connects a client, caching the result in `self.client` + returning that if it exists. + + Connection attempts are retried using `@wait_container_is_ready`. + + Returns: + a client from the paho library + """ + if self.client: + return self.client + client, err = self.new_client() + # 0 is a conventional "success" value in C, which is falsy in python + if err: + # retry, maybe it is not available yet + raise ConnectionError(f"Failed to establish a connection: {err}") + if not client.is_connected(): + raise TimeoutError("The Paho MQTT secondary thread has not connected yet!") + self.client = client + return client + + def new_client(self, **kwargs) -> tuple["Client", "MQTTErrorCode"]: + """ + Get a paho.mqtt client connected to this container. + Check the returned object is_connected() method before use + + Usage of this method is required for versions <2; + versions >=2 will wait for log messages to determine container readiness. + There is no way to pass arguments to new_client in versions <2, + please use an up-to-date version. + + Args: + **kwargs: Keyword arguments passed to `paho.mqtt.client`. + + Returns: + client: MQTT client to connect to the container. + error: an error code or MQTT_ERR_SUCCESS. + """ + try: + from paho.mqtt.client import CallbackAPIVersion, Client + from paho.mqtt.enums import MQTTErrorCode + except ImportError as i: + raise ImportError("'pip install paho-mqtt' required for MosquittoContainer.new_client") from i + + err = MQTTErrorCode.MQTT_ERR_SUCCESS + if self.client is None: + self.client = Client( + client_id=MosquittoContainer.TESTCONTAINERS_CLIENT_ID, + callback_api_version=CallbackAPIVersion.VERSION2, + userdata=self, + **kwargs, + ) + self.client._connect_timeout = 1.0 + + # connect() is a blocking call: + err = self.client.connect(self.get_container_host_ip(), int(self.get_exposed_port(self.MQTT_PORT))) + self.client.loop_start() # launch a thread to call loop() and dequeue the message + + return self.client, err + + def start(self, configfile: Optional[str] = None) -> Self: + # setup container: + self.with_exposed_ports(self.MQTT_PORT) + if configfile is None: + # default config file + configfile = Path(__file__).parent / MosquittoContainer.CONFIG_FILE + self.with_volume_mapping(configfile, "/mosquitto/config/mosquitto.conf") + # if self.password: + # # TODO: add authentication + # pass + + # do container start + super().start() + + self._wait() + return self + + def _wait(self): + if self.image.split(":")[-1].startswith("1"): + import logging + + logging.warning( + "You are using version 1 of eclipse-mosquitto which is not supported for use by this module without paho-mqtt also installed" + ) + self.get_client() + else: + wait_for_logs(self, r"mosquitto version \d+.\d+.\d+ running", timeout=30) + + def stop(self, force=True, delete_volume=True) -> None: + if self.client is not None: + self.client.disconnect() + self.client = None # force recreation of the client object at next start() + super().stop(force, delete_volume) + + def publish_message(self, topic: str, payload: str, timeout: int = 2) -> None: + ret = self.get_client().publish(topic, payload) + ret.wait_for_publish(timeout=timeout) + if not ret.is_published(): + raise RuntimeError(f"Could not publish a message on topic {topic} to Mosquitto broker: {ret}") diff --git a/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf b/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf new file mode 100644 index 000000000..13728cec0 --- /dev/null +++ b/modules/mqtt/testcontainers/mqtt/testcontainers-mosquitto-default-configuration.conf @@ -0,0 +1,20 @@ +# see https://mosquitto.org/man/mosquitto-conf-5.html + +protocol mqtt +user root +log_dest stdout +allow_anonymous true + +log_type error +log_type warning +log_type notice +log_type information + +log_timestamp_format %Y-%m-%d %H:%M:%S +persistence true +persistence_location /data/ + +listener 1883 +protocol mqtt + +sys_interval 1 diff --git a/modules/mqtt/tests/test_mosquitto.py b/modules/mqtt/tests/test_mosquitto.py new file mode 100644 index 000000000..63ce7fcd9 --- /dev/null +++ b/modules/mqtt/tests/test_mosquitto.py @@ -0,0 +1,18 @@ +import pytest + +from testcontainers.mqtt import MosquittoContainer + +VERSIONS = ["1.6.15", "2.0.18"] + + +@pytest.mark.parametrize("version", VERSIONS) +def test_mosquitto(version): + with MosquittoContainer(image=f"eclipse-mosquitto:{version}") as container: + external_port = int(container.get_exposed_port(container.MQTT_PORT)) + print(f"listening on port: {external_port}") + + +@pytest.mark.parametrize("version", VERSIONS) +def test_mosquitto_client(version): + with MosquittoContainer(image=f"eclipse-mosquitto:{version}") as container: + container.get_client() diff --git a/modules/ollama/README.rst b/modules/ollama/README.rst new file mode 100644 index 000000000..dc18fe265 --- /dev/null +++ b/modules/ollama/README.rst @@ -0,0 +1,2 @@ +.. autoclass:: testcontainers.ollama.OllamaContainer +.. title:: testcontainers.ollama.OllamaContainer diff --git a/modules/ollama/testcontainers/ollama/__init__.py b/modules/ollama/testcontainers/ollama/__init__.py new file mode 100644 index 000000000..ea089f149 --- /dev/null +++ b/modules/ollama/testcontainers/ollama/__init__.py @@ -0,0 +1,160 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from os import PathLike +from typing import Any, Optional, TypedDict, Union + +from docker.types.containers import DeviceRequest +from requests import get + +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_for_logs + + +class OllamaModel(TypedDict): + name: str + model: str + modified_at: str + size: int + digest: str + details: dict[str, Any] + + +class OllamaContainer(DockerContainer): + """ + Ollama Container + + :param: image - the ollama image to use (default: :code:`ollama/ollama:0.1.44`) + :param: ollama_home - the directory to mount for model data (default: None) + + you may pass :code:`pathlib.Path.home() / ".ollama"` to re-use models + that have already been pulled with ollama running on this host outside the container. + + Examples: + + .. doctest:: + + >>> from testcontainers.ollama import OllamaContainer + >>> with OllamaContainer() as ollama: + ... ollama.list_models() + [] + + .. code-block:: python + + >>> from json import loads + >>> from pathlib import Path + >>> from requests import post + >>> from testcontainers.ollama import OllamaContainer + >>> def split_by_line(generator): + ... data = b'' + ... for each_item in generator: + ... for line in each_item.splitlines(True): + ... data += line + ... if data.endswith((b'\\r\\r', b'\\n\\n', b'\\r\\n\\r\\n', b'\\n')): + ... yield from data.splitlines() + ... data = b'' + ... if data: + ... yield from data.splitlines() + + >>> with OllamaContainer(ollama_home=Path.home() / ".ollama") as ollama: + ... if "llama3:latest" not in [e["name"] for e in ollama.list_models()]: + ... print("did not find 'llama3:latest', pulling") + ... ollama.pull_model("llama3:latest") + ... endpoint = ollama.get_endpoint() + ... for chunk in split_by_line( + ... post(url=f"{endpoint}/api/chat", stream=True, json={ + ... "model": "llama3:latest", + ... "messages": [{ + ... "role": "user", + ... "content": "what color is the sky? MAX ONE WORD" + ... }] + ... }) + ... ): + ... print(loads(chunk)["message"]["content"], end="") + Blue. + """ + + OLLAMA_PORT = 11434 + + def __init__( + self, + image: str = "ollama/ollama:0.1.44", + ollama_home: Optional[Union[str, PathLike]] = None, + **kwargs, + # + ): + super().__init__(image=image, **kwargs) + self.ollama_home = ollama_home + self.with_exposed_ports(OllamaContainer.OLLAMA_PORT) + self._check_and_add_gpu_capabilities() + + def _check_and_add_gpu_capabilities(self): + info = self.get_docker_client().client.info() + if "nvidia" in info["Runtimes"]: + self._kwargs = {**self._kwargs, "device_requests": DeviceRequest(count=-1, capabilities=[["gpu"]])} + + def start(self) -> "OllamaContainer": + """ + Start the Ollama server + """ + if self.ollama_home: + self.with_volume_mapping(self.ollama_home, "/root/.ollama", "rw") + super().start() + wait_for_logs(self, "Listening on ", timeout=30) + + return self + + def get_endpoint(self): + """ + Return the endpoint of the Ollama server + """ + host = self.get_container_host_ip() + exposed_port = self.get_exposed_port(OllamaContainer.OLLAMA_PORT) + url = f"http://{host}:{exposed_port}" + return url + + @property + def id(self) -> str: + """ + Return the container object + """ + return self._container.id + + def pull_model(self, model_name: str) -> None: + """ + Pull a model from the Ollama server + + Args: + model_name (str): Name of the model + """ + self.exec(f"ollama pull {model_name}") + + def list_models(self) -> list[OllamaModel]: + endpoint = self.get_endpoint() + response = get(url=f"{endpoint}/api/tags") + response.raise_for_status() + return response.json().get("models", []) + + def commit_to_image(self, image_name: str) -> None: + """ + Commit the current container to a new image + + Args: + image_name (str): Name of the new image + """ + docker_client = self.get_docker_client() + existing_images = docker_client.client.images.list(name=image_name) + if not existing_images and self.id: + docker_client.client.containers.get(self.id).commit( + repository=image_name, conf={"Labels": {"org.testcontainers.session-id": ""}} + ) diff --git a/modules/ollama/tests/test_ollama.py b/modules/ollama/tests/test_ollama.py new file mode 100644 index 000000000..980dac00b --- /dev/null +++ b/modules/ollama/tests/test_ollama.py @@ -0,0 +1,60 @@ +import random +import string +from pathlib import Path + +import requests +from testcontainers.ollama import OllamaContainer + + +def random_string(length=6): + return "".join(random.choices(string.ascii_lowercase, k=length)) + + +def test_ollama_container(): + with OllamaContainer() as ollama: + url = ollama.get_endpoint() + response = requests.get(url) + assert response.status_code == 200 + assert response.text == "Ollama is running" + + +def test_with_default_config(): + with OllamaContainer("ollama/ollama:0.1.26") as ollama: + ollama.start() + response = requests.get(f"{ollama.get_endpoint()}/api/version") + version = response.json().get("version") + assert version == "0.1.26" + + +def test_download_model_and_commit_to_image(): + new_image_name = f"tc-ollama-allminilm-{random_string(length=4).lower()}" + with OllamaContainer("ollama/ollama:0.1.26") as ollama: + ollama.start() + # Pull the model + ollama.pull_model("all-minilm") + + response = requests.get(f"{ollama.get_endpoint()}/api/tags") + model_name = ollama.list_models()[0].get("name") + assert "all-minilm" in model_name + + # Commit the container state to a new image + ollama.commit_to_image(new_image_name) + + # Verify the new image + with OllamaContainer(new_image_name) as ollama: + ollama.start() + response = requests.get(f"{ollama.get_endpoint()}/api/tags") + model_name = response.json().get("models", [])[0].get("name") + assert "all-minilm" in model_name + + +def test_models_saved_in_folder(tmp_path: Path): + with OllamaContainer("ollama/ollama:0.1.26", ollama_home=tmp_path) as ollama: + assert len(ollama.list_models()) == 0 + ollama.pull_model("all-minilm") + assert len(ollama.list_models()) == 1 + assert "all-minilm" in ollama.list_models()[0].get("name") + + with OllamaContainer("ollama/ollama:0.1.26", ollama_home=tmp_path) as ollama: + assert len(ollama.list_models()) == 1 + assert "all-minilm" in ollama.list_models()[0].get("name") diff --git a/modules/testmoduleimport/README.rst b/modules/testmoduleimport/README.rst new file mode 100644 index 000000000..ae5d5708a --- /dev/null +++ b/modules/testmoduleimport/README.rst @@ -0,0 +1,2 @@ +.. autoclass:: testcontainers.testmoduleimport.NewSubModuleContainer +.. title:: testcontainers.testmoduleimport.NewSubModuleContainer diff --git a/modules/testmoduleimport/testcontainers/testmoduleimport/__init__.py b/modules/testmoduleimport/testcontainers/testmoduleimport/__init__.py new file mode 100644 index 000000000..74074699e --- /dev/null +++ b/modules/testmoduleimport/testcontainers/testmoduleimport/__init__.py @@ -0,0 +1 @@ +from .new_sub_module import NewSubModuleContainer # noqa: F401 diff --git a/modules/testmoduleimport/testcontainers/testmoduleimport/new_sub_module.py b/modules/testmoduleimport/testcontainers/testmoduleimport/new_sub_module.py new file mode 100644 index 000000000..f45796f76 --- /dev/null +++ b/modules/testmoduleimport/testcontainers/testmoduleimport/new_sub_module.py @@ -0,0 +1,27 @@ +from testcontainers.generic.server import ServerContainer + + +class NewSubModuleContainer(ServerContainer): + """ + This class is a mock container for testing purposes. It is used to test importing from other modules. + + .. doctest:: + + >>> import httpx + >>> from testcontainers.core.image import DockerImage + >>> from testcontainers.testmoduleimport import NewSubModuleContainer + + >>> with DockerImage(path="./modules/generic/tests/samples/python_server", tag="test-mod:latest") as image: + ... with NewSubModuleContainer(port=9000, image=image) as srv: + ... url = srv._create_connection_url() + ... response = httpx.get(f"{url}", timeout=5) + ... assert response.status_code == 200, "Response status code is not 200" + ... assert srv.print_mock() == "NewSubModuleContainer" + + """ + + def __init__(self, port: int, image: str) -> None: + super().__init__(port, image) + + def print_mock(self) -> str: + return "NewSubModuleContainer" diff --git a/modules/testmoduleimport/tests/test_mock_one.py b/modules/testmoduleimport/tests/test_mock_one.py new file mode 100644 index 000000000..85ac6c315 --- /dev/null +++ b/modules/testmoduleimport/tests/test_mock_one.py @@ -0,0 +1,15 @@ +import httpx + +from testcontainers.core.waiting_utils import wait_for_logs +from testcontainers.core.image import DockerImage +from testcontainers.testmoduleimport import NewSubModuleContainer + + +def test_like_doctest(): + with DockerImage(path="./modules/generic/tests/samples/python_server", tag="test-srv:latest") as image: + with NewSubModuleContainer(port=9000, image=image) as srv: + assert srv.print_mock() == "NewSubModuleContainer" + url = srv._create_connection_url() + response = httpx.get(f"{url}", timeout=5) + assert response.status_code == 200, "Response status code is not 200" + _ = wait_for_logs(srv, "GET / HTTP/1.1") diff --git a/poetry.lock b/poetry.lock index 272b0b238..90a83f33f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -175,6 +175,21 @@ typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] +[[package]] +name = "azure-cosmos" +version = "4.7.0" +description = "Microsoft Azure Cosmos Client Library for Python" +optional = true +python-versions = ">=3.8" +files = [ + {file = "azure-cosmos-4.7.0.tar.gz", hash = "sha256:72d714033134656302a2e8957c4b93590673bd288b0ca60cb123e348ae99a241"}, + {file = "azure_cosmos-4.7.0-py3-none-any.whl", hash = "sha256:03d8c7740ddc2906fb16e07b136acc0fe6a6a02656db46c5dd6f1b127b58cc96"}, +] + +[package.dependencies] +azure-core = ">=1.25.1" +typing-extensions = ">=4.6.0" + [[package]] name = "azure-storage-blob" version = "12.19.1" @@ -931,6 +946,27 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] +[[package]] +name = "environs" +version = "9.5.0" +description = "simplified environment variable parsing" +optional = false +python-versions = ">=3.6" +files = [ + {file = "environs-9.5.0-py2.py3-none-any.whl", hash = "sha256:1e549569a3de49c05f856f40bce86979e7d5ffbbc4398e7f338574c220189124"}, + {file = "environs-9.5.0.tar.gz", hash = "sha256:a76307b36fbe856bdca7ee9161e6c466fd7fcffc297109a118c59b54e27e30c9"}, +] + +[package.dependencies] +marshmallow = ">=3.0.0" +python-dotenv = "*" + +[package.extras] +dev = ["dj-database-url", "dj-email-url", "django-cache-url", "flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +django = ["dj-database-url", "dj-email-url", "django-cache-url"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==21.9.2)", "mypy (==0.910)", "pre-commit (>=2.4,<3.0)"] +tests = ["dj-database-url", "dj-email-url", "django-cache-url", "pytest"] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -1205,7 +1241,7 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 name = "grpcio" version = "1.62.1" description = "HTTP/2-based RPC framework" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "grpcio-1.62.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e"}, @@ -1370,7 +1406,7 @@ setuptools = "*" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, @@ -1407,7 +1443,7 @@ files = [ name = "httpcore" version = "1.0.5" description = "A minimal low-level HTTP client." -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, @@ -1428,7 +1464,7 @@ trio = ["trio (>=0.22.0,<0.26.0)"] name = "httpx" version = "0.27.0" description = "The next generation HTTP client." -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, @@ -1838,6 +1874,25 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "marshmallow" +version = "3.21.3" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.21.3-py3-none-any.whl", hash = "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1"}, + {file = "marshmallow-3.21.3.tar.gz", hash = "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] +docs = ["alabaster (==0.7.16)", "autodocsumm (==0.2.12)", "sphinx (==7.3.7)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "pytz", "simplejson"] + [[package]] name = "mdurl" version = "0.1.2" @@ -1849,6 +1904,18 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "milvus-lite" +version = "2.4.7" +description = "A lightweight version of Milvus wrapped with Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "milvus_lite-2.4.7-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:c828190118b104b05b8c8e0b5a4147811c86b54b8fb67bc2e726ad10fc0b544e"}, + {file = "milvus_lite-2.4.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e1537633c39879714fb15082be56a4b97f74c905a6e98e302ec01320561081af"}, + {file = "milvus_lite-2.4.7-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f016474d663045787dddf1c3aad13b7d8b61fd329220318f858184918143dcbf"}, +] + [[package]] name = "minio" version = "7.2.5" @@ -2088,7 +2155,7 @@ setuptools = "*" name = "numpy" version = "1.26.4" description = "Fundamental package for array computing in Python" -optional = true +optional = false python-versions = ">=3.9" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, @@ -2387,6 +2454,93 @@ files = [ {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] +[[package]] +name = "paho-mqtt" +version = "2.1.0" +description = "MQTT version 5.0/3.1.1 client class" +optional = false +python-versions = ">=3.7" +files = [ + {file = "paho_mqtt-2.1.0-py3-none-any.whl", hash = "sha256:6db9ba9b34ed5bc6b6e3812718c7e06e2fd7444540df2455d2c51bd58808feee"}, + {file = "paho_mqtt-2.1.0.tar.gz", hash = "sha256:12d6e7511d4137555a3f6ea167ae846af2c7357b10bc6fa4f7c3968fc1723834"}, +] + +[package.extras] +proxy = ["pysocks"] + +[[package]] +name = "pandas" +version = "2.2.2" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"}, + {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"}, + {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"}, + {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"}, + {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + [[package]] name = "pg8000" version = "1.30.5" @@ -2543,7 +2697,7 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] name = "protobuf" version = "4.25.3" description = "" -optional = true +optional = false python-versions = ">=3.8" files = [ {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, @@ -2882,6 +3036,31 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pymilvus" +version = "2.4.3" +description = "Python Sdk for Milvus" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pymilvus-2.4.3-py3-none-any.whl", hash = "sha256:38239e89f8d739f665141d0b80908990b5f59681e889e135c234a4a45669a5c8"}, + {file = "pymilvus-2.4.3.tar.gz", hash = "sha256:703ac29296cdce03d6dc2aaebbe959e57745c141a94150e371dc36c61c226cc1"}, +] + +[package.dependencies] +environs = "<=9.5.0" +grpcio = ">=1.49.1,<=1.63.0" +milvus-lite = ">=2.4.0,<2.5.0" +pandas = ">=1.2.4" +protobuf = ">=3.20.0" +setuptools = ">=67" +ujson = ">=2.0.0" + +[package.extras] +bulk-writer = ["azure-storage-blob", "minio (>=7.0.0)", "pyarrow (>=12.0.0)", "requests"] +dev = ["black", "grpcio (==1.62.2)", "grpcio-testing (==1.62.2)", "grpcio-tools (==1.62.2)", "pytest (>=5.3.4)", "pytest-cov (>=2.8.1)", "pytest-timeout (>=1.3.4)", "ruff (>0.4.0)"] +model = ["milvus-model (>=0.1.0)"] + [[package]] name = "pymongo" version = "4.6.2" @@ -3187,6 +3366,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "python-keycloak" version = "3.9.1" @@ -3211,7 +3404,7 @@ docs = ["Sphinx (>=6.1.0,<7.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (> name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" -optional = true +optional = false python-versions = "*" files = [ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, @@ -3828,6 +4021,20 @@ postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] pymysql = ["pymysql"] sqlcipher = ["sqlcipher3_binary"] +[[package]] +name = "sqlalchemy-cockroachdb" +version = "2.0.2" +description = "CockroachDB dialect for SQLAlchemy" +optional = false +python-versions = "*" +files = [ + {file = "sqlalchemy-cockroachdb-2.0.2.tar.gz", hash = "sha256:119756eb905855d6a11345b99cfe853031a3fe598a9c4bf35a8ddac9f89fe8cc"}, + {file = "sqlalchemy_cockroachdb-2.0.2-py3-none-any.whl", hash = "sha256:0d5d50e805b024cb2ccd85423a5c1a367d1a56a5cd0ea47765233fd47665070d"}, +] + +[package.dependencies] +SQLAlchemy = "*" + [[package]] name = "tenacity" version = "8.2.3" @@ -3950,6 +4157,93 @@ tzdata = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] +[[package]] +name = "ujson" +version = "5.10.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + [[package]] name = "urllib3" version = "1.26.18" @@ -4182,7 +4476,10 @@ azurite = ["azure-storage-blob"] cassandra = [] chroma = ["chromadb-client"] clickhouse = ["clickhouse-driver"] +cockroachdb = [] +cosmosdb = ["azure-cosmos"] elasticsearch = [] +generic = ["httpx"] google = ["google-cloud-datastore", "google-cloud-pubsub"] influxdb = ["influxdb", "influxdb-client"] k3s = ["kubernetes", "pyyaml"] @@ -4190,13 +4487,16 @@ kafka = [] keycloak = ["python-keycloak"] localstack = ["boto3"] memcached = [] +milvus = [] minio = ["minio"] mongodb = ["pymongo"] +mqtt = [] mssql = ["pymssql", "sqlalchemy"] mysql = ["pymysql", "sqlalchemy"] nats = ["nats-py"] neo4j = ["neo4j"] nginx = [] +ollama = [] opensearch = ["opensearch-py"] oracle = ["oracledb", "sqlalchemy"] oracle-free = ["oracledb", "sqlalchemy"] @@ -4206,10 +4506,11 @@ rabbitmq = ["pika"] redis = ["redis"] registry = ["bcrypt"] selenium = ["selenium"] +testmoduleimport = ["httpx"] vault = [] weaviate = ["weaviate-client"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "95a2e0ef23d8dfb1cbc74d72f534028aeff5da8bc26cc194f464f6fe282ba38f" +content-hash = "2b87af7b69af2cc83f8198ab0fcfef7ceaf8411a8300c4ca72c0521e5d966445" diff --git a/pyproject.toml b/pyproject.toml index cbf650bea..c7a398d7b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "testcontainers" -version = "4.5.0" # auto-incremented by release-please +version = "4.6.0" # auto-incremented by release-please description = "Python library for throwaway instances of anything that can run in a Docker container" authors = ["Sergey Pirogov "] maintainers = [ @@ -33,7 +33,11 @@ packages = [ { include = "testcontainers", from = "modules/cassandra" }, { include = "testcontainers", from = "modules/chroma" }, { include = "testcontainers", from = "modules/clickhouse" }, + { include = "testcontainers", from = "modules/cockroachdb" }, + { include = "testcontainers", from = "modules/cosmosdb" }, { include = "testcontainers", from = "modules/elasticsearch" }, + { include = "testcontainers", from = "modules/generic" }, + { include = "testcontainers", from = "modules/testmoduleimport"}, { include = "testcontainers", from = "modules/google" }, { include = "testcontainers", from = "modules/influxdb" }, { include = "testcontainers", from = "modules/k3s" }, @@ -42,12 +46,15 @@ packages = [ { include = "testcontainers", from = "modules/localstack" }, { include = "testcontainers", from = "modules/memcached" }, { include = "testcontainers", from = "modules/minio" }, + { include = "testcontainers", from = "modules/milvus" }, { include = "testcontainers", from = "modules/mongodb" }, + { include = "testcontainers", from = "modules/mqtt" }, { include = "testcontainers", from = "modules/mssql" }, { include = "testcontainers", from = "modules/mysql" }, { include = "testcontainers", from = "modules/nats" }, { include = "testcontainers", from = "modules/neo4j" }, { include = "testcontainers", from = "modules/nginx" }, + { include = "testcontainers", from = "modules/ollama" }, { include = "testcontainers", from = "modules/opensearch" }, { include = "testcontainers", from = "modules/oracle-free" }, { include = "testcontainers", from = "modules/postgres" }, @@ -57,7 +64,7 @@ packages = [ { include = "testcontainers", from = "modules/registry" }, { include = "testcontainers", from = "modules/selenium" }, { include = "testcontainers", from = "modules/vault" }, - { include = "testcontainers", from = "modules/weaviate" } + { include = "testcontainers", from = "modules/weaviate" }, ] [tool.poetry.urls] @@ -99,13 +106,19 @@ weaviate-client = { version = "^4.5.4", optional = true } chromadb-client = { version = "*", optional = true } qdrant-client = { version = "*", optional = true } bcrypt = { version = "*", optional = true } +httpx = { version = "*", optional = true } +azure-cosmos = { version = "*", optional = true } [tool.poetry.extras] arangodb = ["python-arango"] azurite = ["azure-storage-blob"] cassandra = [] clickhouse = ["clickhouse-driver"] +cosmosdb = ["azure-cosmos"] +cockroachdb = [] elasticsearch = [] +generic = ["httpx"] +testmoduleimport = ["httpx"] google = ["google-cloud-pubsub", "google-cloud-datastore"] influxdb = ["influxdb", "influxdb-client"] k3s = ["kubernetes", "pyyaml"] @@ -114,13 +127,16 @@ keycloak = ["python-keycloak"] localstack = ["boto3"] memcached = [] minio = ["minio"] +milvus = [] mongodb = ["pymongo"] +mqtt = [] mssql = ["sqlalchemy", "pymssql"] mysql = ["sqlalchemy", "pymysql"] nats = ["nats-py"] neo4j = ["neo4j"] nginx = [] opensearch = ["opensearch-py"] +ollama = [] oracle = ["sqlalchemy", "oracledb"] oracle-free = ["sqlalchemy", "oracledb"] postgres = [] @@ -138,18 +154,22 @@ mypy = "1.7.1" pre-commit = "^3.6" pytest = "7.4.3" pytest-cov = "4.1.0" -sphinx = "^7.2.6" -twine = "^4.0.2" -anyio = "^4.3.0" +sphinx = "7.2.6" +twine = "4.0.2" +anyio = "4.3.0" # for tests only -psycopg2-binary = "*" -pg8000 = "*" -sqlalchemy = "*" -psycopg = "*" -cassandra-driver = "*" +psycopg2-binary = "2.9.9" +pg8000 = "1.30.5" +sqlalchemy = "2.0.28" +psycopg = "3.1.18" +cassandra-driver = "3.29.1" pytest-asyncio = "0.23.5" kafka-python-ng = "^2.2.0" -hvac = "*" +hvac = "2.1.0" +pymilvus = "2.4.3" +httpx = "0.27.0" +paho-mqtt = "2.1.0" +sqlalchemy-cockroachdb = "2.0.2" [[tool.poetry.source]] name = "PyPI" @@ -159,7 +179,7 @@ priority = "primary" line-length = 120 [tool.pytest.ini_options] -addopts = "--cov-report=term --cov-report=html --tb=short --strict-markers" +addopts = "--tb=short --strict-markers" log_cli = true log_cli_level = "INFO" @@ -262,6 +282,7 @@ mypy_path = [ # "modules/mysql", # "modules/neo4j", # "modules/nginx", +# "modules/ollama", # "modules/opensearch", # "modules/oracle", # "modules/postgres",