From d5e9116e79bad6b46817aac2ec57e4cfc4e3061f Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 11:18:00 +0000 Subject: [PATCH 1/9] feat: introduced ChatCompletionResponse helper classes --- .../utils/chat_completion_response.py | 43 ++ aidial_adapter_openai/utils/merge_chunks.py | 29 - aidial_adapter_openai/utils/streaming.py | 38 +- poetry.lock | 573 +----------------- pyproject.toml | 4 +- 5 files changed, 76 insertions(+), 611 deletions(-) create mode 100644 aidial_adapter_openai/utils/chat_completion_response.py delete mode 100644 aidial_adapter_openai/utils/merge_chunks.py diff --git a/aidial_adapter_openai/utils/chat_completion_response.py b/aidial_adapter_openai/utils/chat_completion_response.py new file mode 100644 index 0000000..310278c --- /dev/null +++ b/aidial_adapter_openai/utils/chat_completion_response.py @@ -0,0 +1,43 @@ +from typing import Any, Iterable, Literal, Self + +from aidial_sdk.utils.merge_chunks import merge_chat_completion_chunks +from pydantic import BaseModel + + +class ChatCompletionResponse(BaseModel): + message_key: Literal["delta", "message"] + resp: dict = {} + + @property + def usage(self) -> Any | None: + return self.resp.get("usage") + + @property + def is_empty(self) -> bool: + return bool(self.resp) + + @property + def finish_reasons(self) -> Iterable[Any]: + for choice in self.resp.get("choices") or []: + if (reason := choice.get("finish_reason")) is not None: + yield reason + + @property + def messages(self) -> Iterable[Any]: + for choice in self.resp.get("choices") or []: + if (message := choice.get(self.message_key)) is not None: + yield message + + +class ChatCompletionBlock(ChatCompletionResponse): + def __init__(self): + super().__init__(message_key="message") + + +class ChatCompletionStreamingChunk(ChatCompletionResponse): + def __init__(self): + super().__init__(message_key="delta") + + def merge(self, chunk: dict) -> Self: + self.resp = merge_chat_completion_chunks(self.resp, chunk) + return self diff --git a/aidial_adapter_openai/utils/merge_chunks.py b/aidial_adapter_openai/utils/merge_chunks.py deleted file mode 100644 index fc5119c..0000000 --- a/aidial_adapter_openai/utils/merge_chunks.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import TypeVar - -from aidial_sdk.utils.merge_chunks import merge - -_Chunk = TypeVar("_Chunk", bound=dict) - - -def merge_chunks(*chunks: _Chunk) -> _Chunk: - """ - The recursive merging procedure that avoids merging top-level atomic fields - (e.g. "id", "created", "model", "object", "system_fingerprint") and - instead chooses an _override_ merging strategy for such fields. - Non-atomic fields (e.g. "choice", "usage") are merged following - the standard recursive merging procedure. - """ - - assert len(chunks) > 0, "At least one chunk must be provided" - - target = chunks[0] - - for chunk in chunks[1:]: - source = chunk.copy() - for key, value in list(source.items()): - if not isinstance(value, (list, dict)) and value is not None: - target[key] = value - del source[key] - target = merge(target, source) - - return target diff --git a/aidial_adapter_openai/utils/streaming.py b/aidial_adapter_openai/utils/streaming.py index 038df0b..eea80d7 100644 --- a/aidial_adapter_openai/utils/streaming.py +++ b/aidial_adapter_openai/utils/streaming.py @@ -4,14 +4,17 @@ from uuid import uuid4 from aidial_sdk.exceptions import HTTPException as DialException +from aidial_sdk.utils.merge_chunks import merge_chat_completion_chunks from fastapi.responses import JSONResponse, Response, StreamingResponse from openai import APIError, APIStatusError from openai.types.chat.chat_completion_chunk import ChatCompletionChunk from pydantic import BaseModel from aidial_adapter_openai.env import get_eliminate_empty_choices +from aidial_adapter_openai.utils.chat_completion_response import ( + ChatCompletionStreamingChunk, +) from aidial_adapter_openai.utils.log_config import logger -from aidial_adapter_openai.utils.merge_chunks import merge_chunks from aidial_adapter_openai.utils.sse_stream import to_openai_sse_stream ELIMINATE_EMPTY_CHOICES = get_eliminate_empty_choices() @@ -91,34 +94,22 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: chunk["statistics"] = {"discarded_messages": indices} return chunk - n_chunks = 0 last_chunk = None buffer_chunk = None + snapshot = ChatCompletionStreamingChunk() - completions: dict[int, str] = {} - found_finish_reason = False - found_usage = False error = None try: async for chunk in stream: - n_chunks += 1 + snapshot.merge(chunk) if buffer_chunk is not None: - chunk = merge_chunks(buffer_chunk, chunk) + chunk = merge_chat_completion_chunks(chunk, buffer_chunk) buffer_chunk = None choices = chunk.get("choices") or [] - for choice in choices: - index = choice["index"] - content = (choice.get("delta") or {}).get("content") or "" - - completions[index] = completions.get(index, "") + content - found_finish_reason |= bool(choice.get("finish_reason")) - - found_usage |= bool(chunk.get("usage")) - # Azure OpenAI returns an empty list of choices as a first chunk # when content filtering is enabled for a corresponding deployment. # The safety rating of the request is reported in this first chunk. @@ -141,16 +132,19 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: ).json_error() if last_chunk is not None and buffer_chunk is not None: - last_chunk = merge_chunks(buffer_chunk, last_chunk) + last_chunk = merge_chat_completion_chunks(last_chunk, buffer_chunk) if discarded_messages is not None: last_chunk = set_discarded_messages(last_chunk, discarded_messages) - if not found_usage and (not error or completions): - last_chunk = set_usage(last_chunk, completions.values()) + completions = [msg.get("content") or "" for msg in snapshot.messages] + found_finish_reason = any(True for _ in snapshot.finish_reasons) + + if snapshot.usage is None and (not error or completions): + last_chunk = set_usage(last_chunk, completions) if not error: - if n_chunks == 0: + if snapshot.is_empty: logger.warning("Received 0 chunks") elif not found_finish_reason: logger.warning("Didn't receive chunk with the finish reason") @@ -158,8 +152,8 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: if not found_finish_reason: last_chunk = set_finish_reason(last_chunk, "length") - if not found_usage: - last_chunk = set_usage(last_chunk, completions.values()) + if snapshot.usage is None: + last_chunk = set_usage(last_chunk, completions) if last_chunk: yield last_chunk diff --git a/poetry.lock b/poetry.lock index f2fc0ba..412edfa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,41 +2,31 @@ [[package]] name = "aidial-sdk" -version = "0.13.0" +version = "0.15.0rc" description = "Framework to create applications and model adapters for AI DIAL" optional = false -python-versions = "<4.0,>=3.8.1" -files = [ - {file = "aidial_sdk-0.13.0-py3-none-any.whl", hash = "sha256:35784f12367e43f4540d67bab7b18315832e313517e02e969068d7ff2de3d69e"}, - {file = "aidial_sdk-0.13.0.tar.gz", hash = "sha256:c895c22d95d1c1954e170ebda3f5010e80cd47ed8b7225d375d1da01f67962e5"}, -] +python-versions = ">=3.8.1,<4.0" +files = [] +develop = false [package.dependencies] -aiohttp = ">=3.8.3,<4.0.0" +aiohttp = "^3.8.3" fastapi = ">=0.51,<1.0" httpx = ">=0.25.0,<1.0" -opentelemetry-api = {version = "1.20.0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-distro = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-exporter-otlp-proto-grpc = {version = "1.20.0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-exporter-prometheus = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-aiohttp-client = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-fastapi = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-httpx = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-logging = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-requests = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-system-metrics = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-instrumentation-urllib = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} -opentelemetry-sdk = {version = "1.20.0", optional = true, markers = "extra == \"telemetry\""} -prometheus-client = {version = "0.17.1", optional = true, markers = "extra == \"telemetry\""} pydantic = ">=1.10,<3" -requests = ">=2.19,<3.0" +requests = "^2.19" uvicorn = ">=0.19,<1.0" -wrapt = ">=1.14,<2.0" +wrapt = "^1.14" [package.extras] telemetry = ["opentelemetry-api (==1.20.0)", "opentelemetry-distro (==0.41b0)", "opentelemetry-exporter-otlp-proto-grpc (==1.20.0)", "opentelemetry-exporter-prometheus (==0.41b0)", "opentelemetry-instrumentation (==0.41b0)", "opentelemetry-instrumentation-aiohttp-client (==0.41b0)", "opentelemetry-instrumentation-fastapi (==0.41b0)", "opentelemetry-instrumentation-httpx (==0.41b0)", "opentelemetry-instrumentation-logging (==0.41b0)", "opentelemetry-instrumentation-requests (==0.41b0)", "opentelemetry-instrumentation-system-metrics (==0.41b0)", "opentelemetry-instrumentation-urllib (==0.41b0)", "opentelemetry-sdk (==1.20.0)", "prometheus-client (==0.17.1)"] +[package.source] +type = "git" +url = "https://github.com/epam/ai-dial-sdk.git" +reference = "fix/fixed-sharing-issues-in-merge-chunks-algo" +resolved_reference = "df41a7c26b5f8413705b1736b59e1f6e04b68eef" + [[package]] name = "aiohappyeyeballs" version = "2.3.5" @@ -192,20 +182,6 @@ files = [ [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] -[[package]] -name = "asgiref" -version = "3.8.1" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.8" -files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, -] - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - [[package]] name = "attrs" version = "23.1.0" @@ -274,17 +250,6 @@ cryptography = ">=2.5" msal = ">=1.24.0" msal-extensions = ">=0.3.0" -[[package]] -name = "backoff" -version = "2.2.1" -description = "Function decoration for backoff and retry" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, - {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, -] - [[package]] name = "black" version = "24.3.0" @@ -594,23 +559,6 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "deprecated" -version = "1.2.14" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, - {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] - [[package]] name = "distlib" version = "0.3.7" @@ -755,81 +703,6 @@ files = [ {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, ] -[[package]] -name = "googleapis-common-protos" -version = "1.63.2" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, - {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, -] - -[package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "grpcio" -version = "1.65.4" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"}, - {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"}, - {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"}, - {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"}, - {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"}, - {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"}, - {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"}, - {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"}, - {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"}, - {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"}, - {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"}, - {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"}, - {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"}, - {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"}, - {file = "grpcio-1.65.4-cp312-cp312-linux_armv7l.whl", hash = "sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56"}, - {file = "grpcio-1.65.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74"}, - {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63"}, - {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a"}, - {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28"}, - {file = "grpcio-1.65.4-cp312-cp312-win32.whl", hash = "sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0"}, - {file = "grpcio-1.65.4-cp312-cp312-win_amd64.whl", hash = "sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416"}, - {file = "grpcio-1.65.4-cp38-cp38-linux_armv7l.whl", hash = "sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021"}, - {file = "grpcio-1.65.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864"}, - {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816"}, - {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524"}, - {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7"}, - {file = "grpcio-1.65.4-cp38-cp38-win32.whl", hash = "sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a"}, - {file = "grpcio-1.65.4-cp38-cp38-win_amd64.whl", hash = "sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f"}, - {file = "grpcio-1.65.4-cp39-cp39-linux_armv7l.whl", hash = "sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733"}, - {file = "grpcio-1.65.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1"}, - {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b"}, - {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2"}, - {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257"}, - {file = "grpcio-1.65.4-cp39-cp39-win32.whl", hash = "sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d"}, - {file = "grpcio-1.65.4-cp39-cp39-win_amd64.whl", hash = "sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9"}, - {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.65.4)"] - [[package]] name = "h11" version = "0.14.0" @@ -897,25 +770,6 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] -[[package]] -name = "importlib-metadata" -version = "6.11.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, - {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1185,328 +1039,6 @@ typing-extensions = ">=4.7,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -[[package]] -name = "opentelemetry-api" -version = "1.20.0" -description = "OpenTelemetry Python API" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_api-1.20.0-py3-none-any.whl", hash = "sha256:982b76036fec0fdaf490ae3dfd9f28c81442a33414f737abc687a32758cdcba5"}, - {file = "opentelemetry_api-1.20.0.tar.gz", hash = "sha256:06abe351db7572f8afdd0fb889ce53f3c992dbf6f6262507b385cc1963e06983"}, -] - -[package.dependencies] -deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<7.0" - -[[package]] -name = "opentelemetry-distro" -version = "0.41b0" -description = "OpenTelemetry Python Distro" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_distro-0.41b0-py3-none-any.whl", hash = "sha256:61a028dc8c1418b8634a5bf71e15ad85427cb55d97a0cd6a58dd135e456cc027"}, - {file = "opentelemetry_distro-0.41b0.tar.gz", hash = "sha256:8ce05f9499a09c99d9c5f550ff2ed6d229444cae17ae36baf705b0ccb647a959"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-sdk = ">=1.13,<2.0" - -[package.extras] -otlp = ["opentelemetry-exporter-otlp (==1.20.0)"] - -[[package]] -name = "opentelemetry-exporter-otlp-proto-common" -version = "1.20.0" -description = "OpenTelemetry Protobuf encoding" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_common-1.20.0-py3-none-any.whl", hash = "sha256:dd63209b40702636ab6ae76a06b401b646ad7b008a906ecb41222d4af24fbdef"}, - {file = "opentelemetry_exporter_otlp_proto_common-1.20.0.tar.gz", hash = "sha256:df60c681bd61812e50b3a39a7a1afeeb6d4066117583249fcc262269374e7a49"}, -] - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -opentelemetry-proto = "1.20.0" - -[[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.20.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.20.0-py3-none-any.whl", hash = "sha256:7c3f066065891b56348ba2c7f9df6ec635a712841cae0a36f2f6a81642ae7dec"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.20.0.tar.gz", hash = "sha256:6c06d43c3771bda1795226e327722b4b980fa1ca1ec9e985f2ef3e29795bdd52"}, -] - -[package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} -deprecated = ">=1.2.6" -googleapis-common-protos = ">=1.52,<2.0" -grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.15,<2.0" -opentelemetry-exporter-otlp-proto-common = "1.20.0" -opentelemetry-proto = "1.20.0" -opentelemetry-sdk = ">=1.20.0,<1.21.0" - -[package.extras] -test = ["pytest-grpc"] - -[[package]] -name = "opentelemetry-exporter-prometheus" -version = "0.41b0" -description = "Prometheus Metric Exporter for OpenTelemetry" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_exporter_prometheus-0.41b0-py3-none-any.whl", hash = "sha256:ca996f3bc15b0cbf3abd798e786095a202650202a5c0edd9e34bb9186a247b79"}, - {file = "opentelemetry_exporter_prometheus-0.41b0.tar.gz", hash = "sha256:0cc58d5d10040e69090637803b97e120f558467037c88988742c80a627e7f1ed"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-sdk = ">=1.12,<2.0" -prometheus-client = ">=0.5.0,<1.0.0" - -[[package]] -name = "opentelemetry-instrumentation" -version = "0.41b0" -description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation-0.41b0-py3-none-any.whl", hash = "sha256:0ef9e5705ceca0205992a4a845ae4251ce6ec15a1206ca07c2b00afb0c5bd386"}, - {file = "opentelemetry_instrumentation-0.41b0.tar.gz", hash = "sha256:214382ba10dfd29d4e24898a4c7ef18b7368178a6277a1aec95cdb75cabf4612"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.4,<2.0" -setuptools = ">=16.0" -wrapt = ">=1.0.0,<2.0.0" - -[[package]] -name = "opentelemetry-instrumentation-aiohttp-client" -version = "0.41b0" -description = "OpenTelemetry aiohttp client instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_aiohttp_client-0.41b0-py3-none-any.whl", hash = "sha256:a1d0d18dee5e57cf9187d1a561f9d4ce56d16433231208405458358ff6399a6f"}, - {file = "opentelemetry_instrumentation_aiohttp_client-0.41b0.tar.gz", hash = "sha256:56fd35e90c2534b2647e7cdd85f34383eddaa300ee51e989c3763dcdb205ca91"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-semantic-conventions = "0.41b0" -opentelemetry-util-http = "0.41b0" -wrapt = ">=1.0.0,<2.0.0" - -[package.extras] -instruments = ["aiohttp (>=3.0,<4.0)"] -test = ["http-server-mock", "opentelemetry-instrumentation-aiohttp-client[instruments]"] - -[[package]] -name = "opentelemetry-instrumentation-asgi" -version = "0.41b0" -description = "ASGI instrumentation for OpenTelemetry" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_asgi-0.41b0-py3-none-any.whl", hash = "sha256:46084195fb9c50507abbe1dd490ae4c31c8658c5790f1ddf7af95c417dbe6422"}, - {file = "opentelemetry_instrumentation_asgi-0.41b0.tar.gz", hash = "sha256:921244138b37a9a25edf2153f1c248f16f98610ee8d840b25fd7bf6b165e4d72"}, -] - -[package.dependencies] -asgiref = ">=3.0,<4.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-semantic-conventions = "0.41b0" -opentelemetry-util-http = "0.41b0" - -[package.extras] -instruments = ["asgiref (>=3.0,<4.0)"] -test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.41b0)"] - -[[package]] -name = "opentelemetry-instrumentation-fastapi" -version = "0.41b0" -description = "OpenTelemetry FastAPI Instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_fastapi-0.41b0-py3-none-any.whl", hash = "sha256:5990368e99ecc989df0a248a0b9b8e85d8b3eb7c1dbf5131c36982ba7f4a43b7"}, - {file = "opentelemetry_instrumentation_fastapi-0.41b0.tar.gz", hash = "sha256:eb4ceefe8b944fc9ea5e61fa558b99afd1285431b563f3f0104ac177cde4dfe5"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-instrumentation-asgi = "0.41b0" -opentelemetry-semantic-conventions = "0.41b0" -opentelemetry-util-http = "0.41b0" - -[package.extras] -instruments = ["fastapi (>=0.58,<1.0)"] -test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.41b0)", "requests (>=2.23,<3.0)"] - -[[package]] -name = "opentelemetry-instrumentation-httpx" -version = "0.41b0" -description = "OpenTelemetry HTTPX Instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_httpx-0.41b0-py3-none-any.whl", hash = "sha256:6ada84b7caa95a2889b2d883c089a977546b0102c815658b88f1c2dae713e9b2"}, - {file = "opentelemetry_instrumentation_httpx-0.41b0.tar.gz", hash = "sha256:96ebc54f3f41bfcd2fc043349c8cee4b11737602512383d437e24c39a1e4adff"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-semantic-conventions = "0.41b0" - -[package.extras] -instruments = ["httpx (>=0.18.0)"] -test = ["opentelemetry-instrumentation-httpx[instruments]", "opentelemetry-sdk (>=1.12,<2.0)", "opentelemetry-test-utils (==0.41b0)"] - -[[package]] -name = "opentelemetry-instrumentation-logging" -version = "0.41b0" -description = "OpenTelemetry Logging instrumentation" -optional = false -python-versions = "*" -files = [ - {file = "opentelemetry_instrumentation_logging-0.41b0-py2.py3-none-any.whl", hash = "sha256:ab7117886695c32eb30d7a59199292283c5e652e2b9f2d11874fe4359eacc16a"}, - {file = "opentelemetry_instrumentation_logging-0.41b0.tar.gz", hash = "sha256:8ad46e011a99df726323428f0d0a09bf68159ab776b8184ba6d83a7c44f7de81"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" - -[package.extras] -test = ["opentelemetry-test-utils (==0.41b0)"] - -[[package]] -name = "opentelemetry-instrumentation-requests" -version = "0.41b0" -description = "OpenTelemetry requests instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_requests-0.41b0-py3-none-any.whl", hash = "sha256:687fde31111669e729054e64d246c96b0b9d4d8702bd0e3569b7660bdb528d71"}, - {file = "opentelemetry_instrumentation_requests-0.41b0.tar.gz", hash = "sha256:bdc5515ae7533e620b312fd989941b7c2c92d492a2d4418f6ef8db5d7422fa64"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-semantic-conventions = "0.41b0" -opentelemetry-util-http = "0.41b0" - -[package.extras] -instruments = ["requests (>=2.0,<3.0)"] -test = ["httpretty (>=1.0,<2.0)", "opentelemetry-instrumentation-requests[instruments]", "opentelemetry-test-utils (==0.41b0)"] - -[[package]] -name = "opentelemetry-instrumentation-system-metrics" -version = "0.41b0" -description = "OpenTelemetry System Metrics Instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_system_metrics-0.41b0-py3-none-any.whl", hash = "sha256:4f2106cf4b77664eb9096727eaba4ccffe28ebf426068b19aa7289644d4b9680"}, - {file = "opentelemetry_instrumentation_system_metrics-0.41b0.tar.gz", hash = "sha256:727193655d81d31a89e118d905a2691e80d967993ae62bac96979a373f59485a"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.11,<2.0" -opentelemetry-sdk = ">=1.11,<2.0" -psutil = ">=5.9,<6.0" - -[package.extras] -instruments = ["psutil (>=5)"] -test = ["opentelemetry-instrumentation-system-metrics[instruments]", "opentelemetry-test-utils (==0.41b0)"] - -[[package]] -name = "opentelemetry-instrumentation-urllib" -version = "0.41b0" -description = "OpenTelemetry urllib instrumentation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_instrumentation_urllib-0.41b0-py3-none-any.whl", hash = "sha256:cee9e95f55a73480df0915358ce8668bbeda53324c9426847e2ccaea0cac1a87"}, - {file = "opentelemetry_instrumentation_urllib-0.41b0.tar.gz", hash = "sha256:113416b8bd9c2d5c890cb6f86737886e209a3776c2ecdc023887bd78634d5ef3"}, -] - -[package.dependencies] -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.41b0" -opentelemetry-semantic-conventions = "0.41b0" -opentelemetry-util-http = "0.41b0" - -[package.extras] -test = ["httpretty (>=1.0,<2.0)", "opentelemetry-test-utils (==0.41b0)"] - -[[package]] -name = "opentelemetry-proto" -version = "1.20.0" -description = "OpenTelemetry Python Proto" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_proto-1.20.0-py3-none-any.whl", hash = "sha256:512c3d2c6864fb7547a69577c3907348e6c985b7a204533563cb4c4c5046203b"}, - {file = "opentelemetry_proto-1.20.0.tar.gz", hash = "sha256:cf01f49b3072ee57468bccb1a4f93bdb55411f4512d0ac3f97c5c04c0040b5a2"}, -] - -[package.dependencies] -protobuf = ">=3.19,<5.0" - -[[package]] -name = "opentelemetry-sdk" -version = "1.20.0" -description = "OpenTelemetry Python SDK" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_sdk-1.20.0-py3-none-any.whl", hash = "sha256:f2230c276ff4c63ea09b3cb2e2ac6b1265f90af64e8d16bbf275c81a9ce8e804"}, - {file = "opentelemetry_sdk-1.20.0.tar.gz", hash = "sha256:702e432a457fa717fd2ddfd30640180e69938f85bb7fec3e479f85f61c1843f8"}, -] - -[package.dependencies] -opentelemetry-api = "1.20.0" -opentelemetry-semantic-conventions = "0.41b0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.41b0" -description = "OpenTelemetry Semantic Conventions" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_semantic_conventions-0.41b0-py3-none-any.whl", hash = "sha256:45404391ed9e50998183a4925ad1b497c01c143f06500c3b9c3d0013492bb0f2"}, - {file = "opentelemetry_semantic_conventions-0.41b0.tar.gz", hash = "sha256:0ce5b040b8a3fc816ea5879a743b3d6fe5db61f6485e4def94c6ee4d402e1eb7"}, -] - -[[package]] -name = "opentelemetry-util-http" -version = "0.41b0" -description = "Web util for OpenTelemetry" -optional = false -python-versions = ">=3.7" -files = [ - {file = "opentelemetry_util_http-0.41b0-py3-none-any.whl", hash = "sha256:6a167fd1e0e8b0f629530d971165b5d82ed0be2154b7f29498499c3a517edee5"}, - {file = "opentelemetry_util_http-0.41b0.tar.gz", hash = "sha256:16d5bd04a380dc1079e766562d1e1626cbb47720f197f67010c45f090fffdfb3"}, -] - [[package]] name = "packaging" version = "23.2" @@ -1664,68 +1196,6 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] -[[package]] -name = "prometheus-client" -version = "0.17.1" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = ">=3.6" -files = [ - {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, - {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "protobuf" -version = "4.25.4" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, - {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, - {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, - {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, - {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, - {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, - {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, - {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, - {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, - {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, -] - -[[package]] -name = "psutil" -version = "5.9.8" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - [[package]] name = "pycodestyle" version = "2.10.0" @@ -2419,22 +1889,7 @@ files = [ idna = ">=2.0" multidict = ">=4.0" -[[package]] -name = "zipp" -version = "3.19.2" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, - {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, -] - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.13" -content-hash = "b5ce5e84cdb97ccf9d9bd43fb5879a78e3da57b71403739b08c351ad85a27cf1" +content-hash = "2b92b069222db76980c6d57d157f14e5a2fbcbd0daf321f77067e55d6c2e52bc" diff --git a/pyproject.toml b/pyproject.toml index 1a5d3f6..ff79126 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,9 @@ aiohttp = "^3.10.2" numpy = "^1.26.0" pillow = "^10.3.0" azure-identity = "^1.16.1" -aidial-sdk = {version = "^0.13.0", extras = ["telemetry"]} +# FIXME: migrate to the SDK once it released +# aidial-sdk = {version = "^0.13.0", extras = ["telemetry"]} +aidial-sdk = {git = "https://github.com/epam/ai-dial-sdk.git", branch = "fix/fixed-sharing-issues-in-merge-chunks-algo"} [tool.poetry.group.test.dependencies] pytest = "7.4.0" From c3ac17316abbc153a742a3f5e3bfa07c2b8cde9a Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 11:50:58 +0000 Subject: [PATCH 2/9] feat: supported conservative token counting for the completion message with tool and function calls --- aidial_adapter_openai/gpt.py | 8 +++--- .../gpt4_multi_modal/chat_completion.py | 2 +- .../utils/chat_completion_response.py | 2 +- aidial_adapter_openai/utils/streaming.py | 26 +++++++++---------- aidial_adapter_openai/utils/tokenizer.py | 25 ++++++++++++++++++ 5 files changed, 44 insertions(+), 19 deletions(-) diff --git a/aidial_adapter_openai/gpt.py b/aidial_adapter_openai/gpt.py index 5c141fb..d57953d 100644 --- a/aidial_adapter_openai/gpt.py +++ b/aidial_adapter_openai/gpt.py @@ -43,7 +43,7 @@ async def gpt_chat_completion( tokenizer: PlainTextTokenizer, ): discarded_messages = None - prompt_tokens = None + estimated_prompt_tokens = None if "max_prompt_tokens" in data: max_prompt_tokens = data["max_prompt_tokens"] if not isinstance(max_prompt_tokens, int): @@ -56,7 +56,7 @@ async def gpt_chat_completion( ) del data["max_prompt_tokens"] - data["messages"], discarded_messages, prompt_tokens = ( + data["messages"], discarded_messages, estimated_prompt_tokens = ( plain_text_truncate_prompt( messages=cast(List[dict], data["messages"]), max_prompt_tokens=max_prompt_tokens, @@ -73,9 +73,9 @@ async def gpt_chat_completion( if isinstance(response, AsyncIterator): return generate_stream( - get_prompt_tokens=lambda: prompt_tokens + get_prompt_tokens=lambda: estimated_prompt_tokens or tokenizer.calculate_prompt_tokens(data["messages"]), - tokenize=tokenizer.calculate_text_tokens, + tokenize_completion_tokens=tokenizer.calculate_completion_tokens, deployment=deployment_id, discarded_messages=discarded_messages, stream=map_stream(chunk_to_dict, response), diff --git a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py index b6be728..12db7ce 100644 --- a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py +++ b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py @@ -255,7 +255,7 @@ def debug_print(chunk: T) -> T: debug_print, generate_stream( get_prompt_tokens=lambda: estimated_prompt_tokens, - tokenize=tokenizer.calculate_text_tokens, + tokenize_completion_tokens=tokenizer.calculate_completion_tokens, deployment=deployment, discarded_messages=discarded_messages, stream=map_stream( diff --git a/aidial_adapter_openai/utils/chat_completion_response.py b/aidial_adapter_openai/utils/chat_completion_response.py index 310278c..b27ab32 100644 --- a/aidial_adapter_openai/utils/chat_completion_response.py +++ b/aidial_adapter_openai/utils/chat_completion_response.py @@ -14,7 +14,7 @@ def usage(self) -> Any | None: @property def is_empty(self) -> bool: - return bool(self.resp) + return self.resp == {} @property def finish_reasons(self) -> Iterable[Any]: diff --git a/aidial_adapter_openai/utils/streaming.py b/aidial_adapter_openai/utils/streaming.py index eea80d7..a340e66 100644 --- a/aidial_adapter_openai/utils/streaming.py +++ b/aidial_adapter_openai/utils/streaming.py @@ -1,6 +1,6 @@ import logging from time import time -from typing import Any, AsyncIterator, Callable, Iterable, Optional, TypeVar +from typing import Any, AsyncIterator, Callable, List, Optional, TypeVar from uuid import uuid4 from aidial_sdk.exceptions import HTTPException as DialException @@ -57,7 +57,7 @@ def build_chunk( async def generate_stream( *, get_prompt_tokens: Callable[[], int], - tokenize: Callable[[str], int], + tokenize_completion_tokens: Callable[[Any], int], deployment: str, discarded_messages: Optional[list[int]], stream: AsyncIterator[dict], @@ -72,9 +72,9 @@ async def generate_stream( finish_reason=None, ) - def set_usage(chunk: dict | None, completions: Iterable[str]) -> dict: + def set_usage(chunk: dict | None, messages: List[Any]) -> dict: chunk = chunk or noop_chunk - completion_tokens = sum(map(tokenize, completions)) + completion_tokens = sum(map(tokenize_completion_tokens, messages)) prompt_tokens = get_prompt_tokens() chunk["usage"] = { "completion_tokens": completion_tokens, @@ -96,13 +96,13 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: last_chunk = None buffer_chunk = None - snapshot = ChatCompletionStreamingChunk() + response_snapshot = ChatCompletionStreamingChunk() error = None try: async for chunk in stream: - snapshot.merge(chunk) + response_snapshot.merge(chunk) if buffer_chunk is not None: chunk = merge_chat_completion_chunks(chunk, buffer_chunk) @@ -137,14 +137,14 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: if discarded_messages is not None: last_chunk = set_discarded_messages(last_chunk, discarded_messages) - completions = [msg.get("content") or "" for msg in snapshot.messages] - found_finish_reason = any(True for _ in snapshot.finish_reasons) + messages = list(response_snapshot.messages) + found_finish_reason = any(True for _ in response_snapshot.finish_reasons) - if snapshot.usage is None and (not error or completions): - last_chunk = set_usage(last_chunk, completions) + if response_snapshot.usage is None and (not error or messages): + last_chunk = set_usage(last_chunk, messages) if not error: - if snapshot.is_empty: + if response_snapshot.is_empty: logger.warning("Received 0 chunks") elif not found_finish_reason: logger.warning("Didn't receive chunk with the finish reason") @@ -152,8 +152,8 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: if not found_finish_reason: last_chunk = set_finish_reason(last_chunk, "length") - if snapshot.usage is None: - last_chunk = set_usage(last_chunk, completions) + if response_snapshot.usage is None: + last_chunk = set_usage(last_chunk, messages) if last_chunk: yield last_chunk diff --git a/aidial_adapter_openai/utils/tokenizer.py b/aidial_adapter_openai/utils/tokenizer.py index 328e08d..1874a55 100644 --- a/aidial_adapter_openai/utils/tokenizer.py +++ b/aidial_adapter_openai/utils/tokenizer.py @@ -2,6 +2,7 @@ Implemented based on the official recipe: https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken """ +import json from abc import abstractmethod from typing import Any, Callable, Generic, List, TypeVar @@ -33,6 +34,30 @@ def __init__(self, model: str) -> None: def calculate_text_tokens(self, text: str) -> int: return len(self.encoding.encode(text)) + def calculate_completion_tokens(self, message: Any) -> int: + def _calculate_tokens(obj: Any) -> int: + if not obj: + return 0 + + # OpenAI doesn't reveal tokenize algo for tools calls and function calls. + # An approximation is used instead - tokens of the string repr of the objects. + text = ( + obj + if isinstance(obj, str) + else json.dumps(obj, separators=(",", ":")) + ) + return self.calculate_text_tokens(text) + + tokens = 0 + + for key in ["content", "refusal", "function"]: + tokens += _calculate_tokens(message.get(key)) + + for tool_call in message.get("tool_calls") or []: + tokens += _calculate_tokens(tool_call.get("function")) + + return tokens + @property def tokens_per_message(self) -> int: """ From 1504fdb3d6b62f3596a834fc0a7c0ebc376183f2 Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 12:13:19 +0000 Subject: [PATCH 3/9] fix: migrated to estimated tool and function call tokenization throughout the project --- aidial_adapter_openai/gpt.py | 2 +- .../gpt4_multi_modal/chat_completion.py | 33 +++++++++-------- .../utils/chat_completion_response.py | 16 ++++++--- aidial_adapter_openai/utils/streaming.py | 35 ++++++++++--------- aidial_adapter_openai/utils/tokenizer.py | 20 ++++++++--- 5 files changed, 66 insertions(+), 40 deletions(-) diff --git a/aidial_adapter_openai/gpt.py b/aidial_adapter_openai/gpt.py index d57953d..1271c8c 100644 --- a/aidial_adapter_openai/gpt.py +++ b/aidial_adapter_openai/gpt.py @@ -75,7 +75,7 @@ async def gpt_chat_completion( return generate_stream( get_prompt_tokens=lambda: estimated_prompt_tokens or tokenizer.calculate_prompt_tokens(data["messages"]), - tokenize_completion_tokens=tokenizer.calculate_completion_tokens, + tokenize_chat_completion_response=tokenizer.calculate_chat_completion_response_tokens, deployment=deployment_id, discarded_messages=discarded_messages, stream=map_stream(chunk_to_dict, response), diff --git a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py index 12db7ce..1cba1de 100644 --- a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py +++ b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py @@ -25,6 +25,9 @@ ResourceProcessor, ) from aidial_adapter_openai.utils.auth import OpenAICreds, get_auth_headers +from aidial_adapter_openai.utils.chat_completion_response import ( + ChatCompletionBlock, +) from aidial_adapter_openai.utils.log_config import logger from aidial_adapter_openai.utils.multi_modal_message import MultiModalMessage from aidial_adapter_openai.utils.sse_stream import parse_openai_sse_stream @@ -255,7 +258,7 @@ def debug_print(chunk: T) -> T: debug_print, generate_stream( get_prompt_tokens=lambda: estimated_prompt_tokens, - tokenize_completion_tokens=tokenizer.calculate_completion_tokens, + tokenize_chat_completion_response=tokenizer.calculate_chat_completion_response_tokens, deployment=deployment, discarded_messages=discarded_messages, stream=map_stream( @@ -277,25 +280,27 @@ def debug_print(chunk: T) -> T: type="invalid_response_error", ) - content = response["choices"][0]["message"].get("content") or "" - usage = response["usage"] - if discarded_messages: response |= { "statistics": {"discarded_messages": discarded_messages} } - actual_prompt_tokens = usage["prompt_tokens"] - if actual_prompt_tokens != estimated_prompt_tokens: - logger.warning( - f"Estimated prompt tokens ({estimated_prompt_tokens}) don't match the actual ones ({actual_prompt_tokens})" - ) + if usage := response.get("usage"): + actual_prompt_tokens = usage["prompt_tokens"] + if actual_prompt_tokens != estimated_prompt_tokens: + logger.warning( + f"Estimated prompt tokens ({estimated_prompt_tokens}) don't match the actual ones ({actual_prompt_tokens})" + ) - actual_completion_tokens = usage["completion_tokens"] - estimated_completion_tokens = tokenizer.calculate_text_tokens(content) - if actual_completion_tokens != estimated_completion_tokens: - logger.warning( - f"Estimated completion tokens ({estimated_completion_tokens}) don't match the actual ones ({actual_completion_tokens})" + actual_completion_tokens = usage["completion_tokens"] + estimated_completion_tokens = ( + tokenizer.calculate_chat_completion_response_tokens( + ChatCompletionBlock(resp=response) + ) ) + if actual_completion_tokens != estimated_completion_tokens: + logger.warning( + f"Estimated completion tokens ({estimated_completion_tokens}) don't match the actual ones ({actual_completion_tokens})" + ) return response diff --git a/aidial_adapter_openai/utils/chat_completion_response.py b/aidial_adapter_openai/utils/chat_completion_response.py index b27ab32..e3d7336 100644 --- a/aidial_adapter_openai/utils/chat_completion_response.py +++ b/aidial_adapter_openai/utils/chat_completion_response.py @@ -22,21 +22,29 @@ def finish_reasons(self) -> Iterable[Any]: if (reason := choice.get("finish_reason")) is not None: yield reason + @property + def has_finish_reason(self) -> bool: + return len(list(self.finish_reasons)) > 0 + @property def messages(self) -> Iterable[Any]: for choice in self.resp.get("choices") or []: if (message := choice.get(self.message_key)) is not None: yield message + @property + def has_messages(self) -> bool: + return len(list(self.messages)) > 0 + class ChatCompletionBlock(ChatCompletionResponse): - def __init__(self): - super().__init__(message_key="message") + def __init__(self, **kwargs): + super().__init__(message_key="message", **kwargs) class ChatCompletionStreamingChunk(ChatCompletionResponse): - def __init__(self): - super().__init__(message_key="delta") + def __init__(self, **kwargs): + super().__init__(message_key="delta", **kwargs) def merge(self, chunk: dict) -> Self: self.resp = merge_chat_completion_chunks(self.resp, chunk) diff --git a/aidial_adapter_openai/utils/streaming.py b/aidial_adapter_openai/utils/streaming.py index a340e66..243266f 100644 --- a/aidial_adapter_openai/utils/streaming.py +++ b/aidial_adapter_openai/utils/streaming.py @@ -1,6 +1,6 @@ import logging from time import time -from typing import Any, AsyncIterator, Callable, List, Optional, TypeVar +from typing import Any, AsyncIterator, Callable, Optional, TypeVar from uuid import uuid4 from aidial_sdk.exceptions import HTTPException as DialException @@ -12,6 +12,7 @@ from aidial_adapter_openai.env import get_eliminate_empty_choices from aidial_adapter_openai.utils.chat_completion_response import ( + ChatCompletionResponse, ChatCompletionStreamingChunk, ) from aidial_adapter_openai.utils.log_config import logger @@ -57,13 +58,13 @@ def build_chunk( async def generate_stream( *, get_prompt_tokens: Callable[[], int], - tokenize_completion_tokens: Callable[[Any], int], + tokenize_chat_completion_response: Callable[[ChatCompletionResponse], int], deployment: str, discarded_messages: Optional[list[int]], stream: AsyncIterator[dict], ) -> AsyncIterator[dict]: - noop_chunk = build_chunk( + empty_chunk = build_chunk( id=generate_id(), created=generate_created(), model=deployment, @@ -72,10 +73,11 @@ async def generate_stream( finish_reason=None, ) - def set_usage(chunk: dict | None, messages: List[Any]) -> dict: - chunk = chunk or noop_chunk - completion_tokens = sum(map(tokenize_completion_tokens, messages)) + def set_usage(chunk: dict | None, resp: ChatCompletionResponse) -> dict: + completion_tokens = tokenize_chat_completion_response(resp) prompt_tokens = get_prompt_tokens() + + chunk = chunk or empty_chunk chunk["usage"] = { "completion_tokens": completion_tokens, "prompt_tokens": prompt_tokens, @@ -84,13 +86,13 @@ def set_usage(chunk: dict | None, messages: List[Any]) -> dict: return chunk def set_finish_reason(chunk: dict | None, finish_reason: str) -> dict: - chunk = chunk or noop_chunk + chunk = chunk or empty_chunk chunk["choices"] = chunk.get("choices") or [{"index": 0, "delta": {}}] chunk["choices"][0]["finish_reason"] = finish_reason return chunk def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: - chunk = chunk or noop_chunk + chunk = chunk or empty_chunk chunk["statistics"] = {"discarded_messages": indices} return chunk @@ -137,23 +139,24 @@ def set_discarded_messages(chunk: dict | None, indices: list[int]) -> dict: if discarded_messages is not None: last_chunk = set_discarded_messages(last_chunk, discarded_messages) - messages = list(response_snapshot.messages) - found_finish_reason = any(True for _ in response_snapshot.finish_reasons) - - if response_snapshot.usage is None and (not error or messages): - last_chunk = set_usage(last_chunk, messages) + if response_snapshot.usage is None and ( + not error or response_snapshot.has_messages + ): + last_chunk = set_usage(last_chunk, response_snapshot) if not error: + has_finish_reason = response_snapshot.has_finish_reason + if response_snapshot.is_empty: logger.warning("Received 0 chunks") - elif not found_finish_reason: + elif not has_finish_reason: logger.warning("Didn't receive chunk with the finish reason") - if not found_finish_reason: + if not has_finish_reason: last_chunk = set_finish_reason(last_chunk, "length") if response_snapshot.usage is None: - last_chunk = set_usage(last_chunk, messages) + last_chunk = set_usage(last_chunk, response_snapshot) if last_chunk: yield last_chunk diff --git a/aidial_adapter_openai/utils/tokenizer.py b/aidial_adapter_openai/utils/tokenizer.py index 1874a55..8cb9d0a 100644 --- a/aidial_adapter_openai/utils/tokenizer.py +++ b/aidial_adapter_openai/utils/tokenizer.py @@ -9,6 +9,9 @@ from aidial_sdk.exceptions import InternalServerError from tiktoken import Encoding, encoding_for_model +from aidial_adapter_openai.utils.chat_completion_response import ( + ChatCompletionResponse, +) from aidial_adapter_openai.utils.image_tokenizer import ImageTokenizer from aidial_adapter_openai.utils.multi_modal_message import MultiModalMessage @@ -34,13 +37,20 @@ def __init__(self, model: str) -> None: def calculate_text_tokens(self, text: str) -> int: return len(self.encoding.encode(text)) - def calculate_completion_tokens(self, message: Any) -> int: - def _calculate_tokens(obj: Any) -> int: + def calculate_chat_completion_response_tokens( + self, resp: ChatCompletionResponse + ) -> int: + return sum( + map(self._calculate_chat_completion_message_tokens, resp.messages) + ) + + def _calculate_chat_completion_message_tokens(self, message: Any) -> int: + def _tokenize(obj: Any) -> int: if not obj: return 0 # OpenAI doesn't reveal tokenize algo for tools calls and function calls. - # An approximation is used instead - tokens of the string repr of the objects. + # An approximation is used instead - tokens in the string repr of the objects. text = ( obj if isinstance(obj, str) @@ -51,10 +61,10 @@ def _calculate_tokens(obj: Any) -> int: tokens = 0 for key in ["content", "refusal", "function"]: - tokens += _calculate_tokens(message.get(key)) + tokens += _tokenize(message.get(key)) for tool_call in message.get("tool_calls") or []: - tokens += _calculate_tokens(tool_call.get("function")) + tokens += _tokenize(tool_call.get("function")) return tokens From f4ab6d999f8975de1a3fb3c04b05b70133520dda Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 12:27:14 +0000 Subject: [PATCH 4/9] chore: renamings --- aidial_adapter_openai/gpt.py | 6 +- .../gpt4_multi_modal/chat_completion.py | 12 ++-- aidial_adapter_openai/utils/streaming.py | 4 +- aidial_adapter_openai/utils/tokenizer.py | 56 ++++++++----------- 4 files changed, 33 insertions(+), 45 deletions(-) diff --git a/aidial_adapter_openai/gpt.py b/aidial_adapter_openai/gpt.py index 1271c8c..297033f 100644 --- a/aidial_adapter_openai/gpt.py +++ b/aidial_adapter_openai/gpt.py @@ -27,7 +27,7 @@ def plain_text_truncate_prompt( ) -> Tuple[List[dict], DiscardedMessages, TruncatedTokens]: return truncate_prompt( messages=messages, - message_tokens=tokenizer.calculate_message_tokens, + message_tokens=tokenizer.tokenize_request_message, is_system_message=lambda message: message["role"] == "system", max_prompt_tokens=max_prompt_tokens, initial_prompt_tokens=tokenizer.TOKENS_PER_REQUEST, @@ -74,8 +74,8 @@ async def gpt_chat_completion( if isinstance(response, AsyncIterator): return generate_stream( get_prompt_tokens=lambda: estimated_prompt_tokens - or tokenizer.calculate_prompt_tokens(data["messages"]), - tokenize_chat_completion_response=tokenizer.calculate_chat_completion_response_tokens, + or tokenizer.tokenize_request(data["messages"]), + tokenize_response=tokenizer.tokenize_response, deployment=deployment_id, discarded_messages=discarded_messages, stream=map_stream(chunk_to_dict, response), diff --git a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py index 1cba1de..9732cef 100644 --- a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py +++ b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py @@ -126,7 +126,7 @@ def multi_modal_truncate_prompt( ) -> Tuple[List[MultiModalMessage], DiscardedMessages, TruncatedTokens]: return truncate_prompt( messages=messages, - message_tokens=tokenizer.calculate_message_tokens, + message_tokens=tokenizer.tokenize_request_message, is_system_message=lambda message: message.raw_message["role"] == "system", max_prompt_tokens=max_prompt_tokens, @@ -228,7 +228,7 @@ async def chat_completion( f"prompt tokens after truncation: {estimated_prompt_tokens}" ) else: - estimated_prompt_tokens = tokenizer.calculate_prompt_tokens( + estimated_prompt_tokens = tokenizer.tokenize_request( multi_modal_messages ) logger.debug( @@ -258,7 +258,7 @@ def debug_print(chunk: T) -> T: debug_print, generate_stream( get_prompt_tokens=lambda: estimated_prompt_tokens, - tokenize_chat_completion_response=tokenizer.calculate_chat_completion_response_tokens, + tokenize_response=tokenizer.tokenize_response, deployment=deployment, discarded_messages=discarded_messages, stream=map_stream( @@ -293,10 +293,8 @@ def debug_print(chunk: T) -> T: ) actual_completion_tokens = usage["completion_tokens"] - estimated_completion_tokens = ( - tokenizer.calculate_chat_completion_response_tokens( - ChatCompletionBlock(resp=response) - ) + estimated_completion_tokens = tokenizer.tokenize_response( + ChatCompletionBlock(resp=response) ) if actual_completion_tokens != estimated_completion_tokens: logger.warning( diff --git a/aidial_adapter_openai/utils/streaming.py b/aidial_adapter_openai/utils/streaming.py index 243266f..d677ef7 100644 --- a/aidial_adapter_openai/utils/streaming.py +++ b/aidial_adapter_openai/utils/streaming.py @@ -58,7 +58,7 @@ def build_chunk( async def generate_stream( *, get_prompt_tokens: Callable[[], int], - tokenize_chat_completion_response: Callable[[ChatCompletionResponse], int], + tokenize_response: Callable[[ChatCompletionResponse], int], deployment: str, discarded_messages: Optional[list[int]], stream: AsyncIterator[dict], @@ -74,7 +74,7 @@ async def generate_stream( ) def set_usage(chunk: dict | None, resp: ChatCompletionResponse) -> dict: - completion_tokens = tokenize_chat_completion_response(resp) + completion_tokens = tokenize_response(resp) prompt_tokens = get_prompt_tokens() chunk = chunk or empty_chunk diff --git a/aidial_adapter_openai/utils/tokenizer.py b/aidial_adapter_openai/utils/tokenizer.py index 8cb9d0a..23ebe4f 100644 --- a/aidial_adapter_openai/utils/tokenizer.py +++ b/aidial_adapter_openai/utils/tokenizer.py @@ -19,6 +19,10 @@ class BaseTokenizer(Generic[MessageType]): + """ + Tokenizer for chat completion requests and responses. + """ + model: str encoding: Encoding TOKENS_PER_REQUEST = 3 @@ -34,17 +38,13 @@ def __init__(self, model: str) -> None: "or declare it as a model which doesn't require tokenization through tiktoken.", ) from e - def calculate_text_tokens(self, text: str) -> int: + def tokenize_text(self, text: str) -> int: return len(self.encoding.encode(text)) - def calculate_chat_completion_response_tokens( - self, resp: ChatCompletionResponse - ) -> int: - return sum( - map(self._calculate_chat_completion_message_tokens, resp.messages) - ) + def tokenize_response(self, resp: ChatCompletionResponse) -> int: + return sum(map(self._tokenize_response_message, resp.messages)) - def _calculate_chat_completion_message_tokens(self, message: Any) -> int: + def _tokenize_response_message(self, message: Any) -> int: def _tokenize(obj: Any) -> int: if not obj: return 0 @@ -56,7 +56,7 @@ def _tokenize(obj: Any) -> int: if isinstance(obj, str) else json.dumps(obj, separators=(",", ":")) ) - return self.calculate_text_tokens(text) + return self.tokenize_text(text) tokens = 0 @@ -69,7 +69,7 @@ def _tokenize(obj: Any) -> int: return tokens @property - def tokens_per_message(self) -> int: + def _tokens_per_request_message(self) -> int: """ Tokens, that are counter for each message, regardless of its content """ @@ -78,7 +78,7 @@ def tokens_per_message(self) -> int: return 3 @property - def tokens_per_name(self) -> int: + def _tokens_per_request_message_name(self) -> int: """ Tokens, that are counter for "name" field in message, if it's present """ @@ -86,23 +86,13 @@ def tokens_per_name(self) -> int: return -1 return 1 - def calculate_request_prompt_tokens(self, messages_tokens: int): - """ - Amount of tokens, that will be counted by API - is greater than actual sum of tokens of all messages - """ - return self.TOKENS_PER_REQUEST + messages_tokens - - def calculate_prompt_tokens(self, messages: List[MessageType]) -> int: - return self.calculate_request_prompt_tokens( - messages_tokens=sum(map(self.calculate_message_tokens, messages)) + def tokenize_request(self, messages: List[MessageType]) -> int: + return self.TOKENS_PER_REQUEST + sum( + map(self.tokenize_request_message, messages) ) - def available_message_tokens(self, max_prompt_tokens: int): - return max_prompt_tokens - self.TOKENS_PER_REQUEST - @abstractmethod - def calculate_message_tokens(self, message: MessageType) -> int: + def tokenize_request_message(self, message: MessageType) -> int: pass @@ -156,11 +146,11 @@ def _handle_custom_content_part(self, content_part: Any): f"Use MultiModalTokenizer for messages with images" ) - def calculate_message_tokens(self, message: dict) -> int: - return self.tokens_per_message + _process_raw_message( + def tokenize_request_message(self, message: dict) -> int: + return self._tokens_per_request_message + _process_raw_message( raw_message=message, - tokens_per_name=self.tokens_per_name, - calculate_text_tokens=self.calculate_text_tokens, + tokens_per_name=self._tokens_per_request_message_name, + calculate_text_tokens=self.tokenize_text, handle_custom_content_part=self._handle_custom_content_part, ) @@ -172,14 +162,14 @@ def __init__(self, model: str, image_tokenizer: ImageTokenizer): super().__init__(model) self.image_tokenizer = image_tokenizer - def calculate_message_tokens(self, message: MultiModalMessage) -> int: - tokens = self.tokens_per_message + def tokenize_request_message(self, message: MultiModalMessage) -> int: + tokens = self._tokens_per_request_message raw_message = message.raw_message tokens += _process_raw_message( raw_message=raw_message, - tokens_per_name=self.tokens_per_name, - calculate_text_tokens=self.calculate_text_tokens, + tokens_per_name=self._tokens_per_request_message_name, + calculate_text_tokens=self.tokenize_text, handle_custom_content_part=lambda content_part: None, ) From b727ff107c56373001843c1f6181e595fa6d7f52 Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 12:50:50 +0000 Subject: [PATCH 5/9] feat: supported estimation of tools/functions in the chat completion request --- aidial_adapter_openai/gpt.py | 16 +++--- .../gpt4_multi_modal/chat_completion.py | 2 +- aidial_adapter_openai/utils/tokenizer.py | 51 ++++++++++++------- 3 files changed, 42 insertions(+), 27 deletions(-) diff --git a/aidial_adapter_openai/gpt.py b/aidial_adapter_openai/gpt.py index 297033f..2670e87 100644 --- a/aidial_adapter_openai/gpt.py +++ b/aidial_adapter_openai/gpt.py @@ -35,7 +35,7 @@ def plain_text_truncate_prompt( async def gpt_chat_completion( - data: dict, + request: dict, deployment_id: str, upstream_endpoint: str, creds: OpenAICreds, @@ -44,8 +44,8 @@ async def gpt_chat_completion( ): discarded_messages = None estimated_prompt_tokens = None - if "max_prompt_tokens" in data: - max_prompt_tokens = data["max_prompt_tokens"] + if "max_prompt_tokens" in request: + max_prompt_tokens = request["max_prompt_tokens"] if not isinstance(max_prompt_tokens, int): raise InvalidRequestError( f"'{max_prompt_tokens}' is not of type 'integer' - 'max_prompt_tokens'", @@ -54,11 +54,11 @@ async def gpt_chat_completion( raise InvalidRequestError( f"'{max_prompt_tokens}' is less than the minimum of 1 - 'max_prompt_tokens'", ) - del data["max_prompt_tokens"] + del request["max_prompt_tokens"] - data["messages"], discarded_messages, estimated_prompt_tokens = ( + request["messages"], discarded_messages, estimated_prompt_tokens = ( plain_text_truncate_prompt( - messages=cast(List[dict], data["messages"]), + messages=cast(List[dict], request["messages"]), max_prompt_tokens=max_prompt_tokens, tokenizer=tokenizer, ) @@ -68,13 +68,13 @@ async def gpt_chat_completion( {**creds, "api_version": api_version} ) response: AsyncStream[ChatCompletionChunk] | ChatCompletion = ( - await call_with_extra_body(client.chat.completions.create, data) + await call_with_extra_body(client.chat.completions.create, request) ) if isinstance(response, AsyncIterator): return generate_stream( get_prompt_tokens=lambda: estimated_prompt_tokens - or tokenizer.tokenize_request(data["messages"]), + or tokenizer.tokenize_request(request, request["messages"]), tokenize_response=tokenizer.tokenize_response, deployment=deployment_id, discarded_messages=discarded_messages, diff --git a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py index 9732cef..ce48e14 100644 --- a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py +++ b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py @@ -229,7 +229,7 @@ async def chat_completion( ) else: estimated_prompt_tokens = tokenizer.tokenize_request( - multi_modal_messages + request, multi_modal_messages ) logger.debug( f"prompt tokens without truncation: {estimated_prompt_tokens}" diff --git a/aidial_adapter_openai/utils/tokenizer.py b/aidial_adapter_openai/utils/tokenizer.py index 23ebe4f..d354521 100644 --- a/aidial_adapter_openai/utils/tokenizer.py +++ b/aidial_adapter_openai/utils/tokenizer.py @@ -44,27 +44,28 @@ def tokenize_text(self, text: str) -> int: def tokenize_response(self, resp: ChatCompletionResponse) -> int: return sum(map(self._tokenize_response_message, resp.messages)) + def _tokenize_object(self, obj: Any) -> int: + if not obj: + return 0 + + # OpenAI doesn't reveal tokenization algorithm for tools calls and function calls. + # An approximation is used instead - token count in the string repr of the objects. + text = ( + obj + if isinstance(obj, str) + else json.dumps(obj, separators=(",", ":")) + ) + return self.tokenize_text(text) + def _tokenize_response_message(self, message: Any) -> int: - def _tokenize(obj: Any) -> int: - if not obj: - return 0 - - # OpenAI doesn't reveal tokenize algo for tools calls and function calls. - # An approximation is used instead - tokens in the string repr of the objects. - text = ( - obj - if isinstance(obj, str) - else json.dumps(obj, separators=(",", ":")) - ) - return self.tokenize_text(text) tokens = 0 for key in ["content", "refusal", "function"]: - tokens += _tokenize(message.get(key)) + tokens += self._tokenize_object(message.get(key)) for tool_call in message.get("tool_calls") or []: - tokens += _tokenize(tool_call.get("function")) + tokens += self._tokenize_object(tool_call.get("function")) return tokens @@ -86,10 +87,24 @@ def _tokens_per_request_message_name(self) -> int: return -1 return 1 - def tokenize_request(self, messages: List[MessageType]) -> int: - return self.TOKENS_PER_REQUEST + sum( - map(self.tokenize_request_message, messages) - ) + def tokenize_request( + self, + original_request: dict, + messages: List[MessageType], + ) -> int: + tokens = self.TOKENS_PER_REQUEST + + if original_request.get("function_call") != "none": + for func in original_request.get("function") or []: + tokens += self._tokenize_object(func) + + if original_request.get("tool_choice") != "none": + for tool in original_request.get("tools") or []: + tokens += self._tokenize_object(tool.get("function")) + + tokens += sum(map(self.tokenize_request_message, messages)) + + return tokens @abstractmethod def tokenize_request_message(self, message: MessageType) -> int: From c68499f73f901297966c2a26ce83ce3fa82de942 Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 13:30:31 +0000 Subject: [PATCH 6/9] fix: fixed initial_prompt_tokens calculation --- aidial_adapter_openai/gpt.py | 8 ++++++-- aidial_adapter_openai/gpt4_multi_modal/chat_completion.py | 6 +++--- aidial_adapter_openai/utils/tokenizer.py | 4 +--- tests/test_discard_messages.py | 4 ++-- tests/test_multimodal_truncate.py | 6 +++--- 5 files changed, 15 insertions(+), 13 deletions(-) diff --git a/aidial_adapter_openai/gpt.py b/aidial_adapter_openai/gpt.py index 2670e87..5d6610d 100644 --- a/aidial_adapter_openai/gpt.py +++ b/aidial_adapter_openai/gpt.py @@ -23,14 +23,17 @@ def plain_text_truncate_prompt( - messages: List[dict], max_prompt_tokens: int, tokenizer: PlainTextTokenizer + request: dict, + messages: List[dict], + max_prompt_tokens: int, + tokenizer: PlainTextTokenizer, ) -> Tuple[List[dict], DiscardedMessages, TruncatedTokens]: return truncate_prompt( messages=messages, message_tokens=tokenizer.tokenize_request_message, is_system_message=lambda message: message["role"] == "system", max_prompt_tokens=max_prompt_tokens, - initial_prompt_tokens=tokenizer.TOKENS_PER_REQUEST, + initial_prompt_tokens=tokenizer.tokenize_request(request, []), ) @@ -58,6 +61,7 @@ async def gpt_chat_completion( request["messages"], discarded_messages, estimated_prompt_tokens = ( plain_text_truncate_prompt( + request=request, messages=cast(List[dict], request["messages"]), max_prompt_tokens=max_prompt_tokens, tokenizer=tokenizer, diff --git a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py index ce48e14..26d82c7 100644 --- a/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py +++ b/aidial_adapter_openai/gpt4_multi_modal/chat_completion.py @@ -119,9 +119,9 @@ async def predict_non_stream( def multi_modal_truncate_prompt( + request: dict, messages: List[MultiModalMessage], max_prompt_tokens: int, - initial_prompt_tokens: int, tokenizer: MultiModalTokenizer, ) -> Tuple[List[MultiModalMessage], DiscardedMessages, TruncatedTokens]: return truncate_prompt( @@ -130,7 +130,7 @@ def multi_modal_truncate_prompt( is_system_message=lambda message: message.raw_message["role"] == "system", max_prompt_tokens=max_prompt_tokens, - initial_prompt_tokens=initial_prompt_tokens, + initial_prompt_tokens=tokenizer.tokenize_request(request, []), ) @@ -218,9 +218,9 @@ async def chat_completion( if max_prompt_tokens is not None: multi_modal_messages, discarded_messages, estimated_prompt_tokens = ( multi_modal_truncate_prompt( + request=request, messages=multi_modal_messages, max_prompt_tokens=max_prompt_tokens, - initial_prompt_tokens=tokenizer.TOKENS_PER_REQUEST, tokenizer=tokenizer, ) ) diff --git a/aidial_adapter_openai/utils/tokenizer.py b/aidial_adapter_openai/utils/tokenizer.py index d354521..5463613 100644 --- a/aidial_adapter_openai/utils/tokenizer.py +++ b/aidial_adapter_openai/utils/tokenizer.py @@ -88,9 +88,7 @@ def _tokens_per_request_message_name(self) -> int: return 1 def tokenize_request( - self, - original_request: dict, - messages: List[MessageType], + self, original_request: dict, messages: List[MessageType] ) -> int: tokens = self.TOKENS_PER_REQUEST diff --git a/tests/test_discard_messages.py b/tests/test_discard_messages.py index aa98e71..742bb57 100644 --- a/tests/test_discard_messages.py +++ b/tests/test_discard_messages.py @@ -141,7 +141,7 @@ def test_discarded_messages_without_error( ): tokenizer = PlainTextTokenizer(model="gpt-4") truncated_messages, discarded_messages, _used_tokens = ( - plain_text_truncate_prompt(messages, max_prompt_tokens, tokenizer) + plain_text_truncate_prompt({}, messages, max_prompt_tokens, tokenizer) ) assert (truncated_messages, discarded_messages) == response @@ -157,5 +157,5 @@ def test_discarded_messages_with_error( tokenizer = PlainTextTokenizer(model="gpt-4") with pytest.raises(DialException) as e_info: - plain_text_truncate_prompt(messages, max_prompt_tokens, tokenizer) + plain_text_truncate_prompt({}, messages, max_prompt_tokens, tokenizer) assert e_info.value.message == error_message diff --git a/tests/test_multimodal_truncate.py b/tests/test_multimodal_truncate.py index 031ae3c..3db888c 100644 --- a/tests/test_multimodal_truncate.py +++ b/tests/test_multimodal_truncate.py @@ -45,7 +45,7 @@ def test_multimodal_truncate_with_system_and_last_user_error(): ), ] with pytest.raises(TruncatePromptSystemAndLastUserError): - multi_modal_truncate_prompt(transformations, 15, 0, tokenizer) + multi_modal_truncate_prompt({}, transformations, 15, tokenizer) def test_multimodal_truncate_with_system_error(): @@ -57,7 +57,7 @@ def test_multimodal_truncate_with_system_error(): ), ] with pytest.raises(TruncatePromptSystemError): - multi_modal_truncate_prompt(transformations, 9, 3, tokenizer) + multi_modal_truncate_prompt({}, transformations, 9, tokenizer) @pytest.mark.parametrize( @@ -194,9 +194,9 @@ def test_multimodal_truncate( ): truncated, actual_discarded_messages, actual_used_tokens = ( multi_modal_truncate_prompt( + {}, transformations, max_prompt_tokens, - initial_prompt_tokens=3, tokenizer=tokenizer, ) ) From 73fd8f1b23bc99b3ca46d62f3432d04357b7c9f1 Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Fri, 8 Nov 2024 13:44:48 +0000 Subject: [PATCH 7/9] fix: added telemetry extras --- poetry.lock | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 412edfa..2c6ebb6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1892,4 +1892,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.13" -content-hash = "2b92b069222db76980c6d57d157f14e5a2fbcbd0daf321f77067e55d6c2e52bc" +content-hash = "053c9c65687a429e513d52464b44355cbb33545ebcdfc76fb4745d0a1771b6ee" diff --git a/pyproject.toml b/pyproject.toml index ff79126..4685a9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,7 +33,7 @@ pillow = "^10.3.0" azure-identity = "^1.16.1" # FIXME: migrate to the SDK once it released # aidial-sdk = {version = "^0.13.0", extras = ["telemetry"]} -aidial-sdk = {git = "https://github.com/epam/ai-dial-sdk.git", branch = "fix/fixed-sharing-issues-in-merge-chunks-algo"} +aidial-sdk = {git = "https://github.com/epam/ai-dial-sdk.git", branch = "fix/fixed-sharing-issues-in-merge-chunks-algo", extras = ["telemetry"]} [tool.poetry.group.test.dependencies] pytest = "7.4.0" From a96f2e04fd99b0650dc420b33d5ac98521719b17 Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Mon, 18 Nov 2024 10:26:35 +0000 Subject: [PATCH 8/9] chore: bump aidial-sdk from 0.13.0 to 0.15.0 --- poetry.lock | 586 +++++++++++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 4 +- 2 files changed, 573 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2c6ebb6..7814cc7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,31 +2,41 @@ [[package]] name = "aidial-sdk" -version = "0.15.0rc" +version = "0.15.0" description = "Framework to create applications and model adapters for AI DIAL" optional = false -python-versions = ">=3.8.1,<4.0" -files = [] -develop = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "aidial_sdk-0.15.0-py3-none-any.whl", hash = "sha256:7b9b3e5ec9688be2919dcd7dd0312aac807dc7917393ee5f846332713ad2e26a"}, + {file = "aidial_sdk-0.15.0.tar.gz", hash = "sha256:6b47bb36e8c795300e0d4b61308c6a2f86b59abb97905390a02789b343460720"}, +] [package.dependencies] -aiohttp = "^3.8.3" +aiohttp = ">=3.8.3,<4.0.0" fastapi = ">=0.51,<1.0" httpx = ">=0.25.0,<1.0" +opentelemetry-api = {version = "1.20.0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-distro = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-exporter-otlp-proto-grpc = {version = "1.20.0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-exporter-prometheus = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-aiohttp-client = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-fastapi = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-httpx = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-logging = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-requests = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-system-metrics = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-instrumentation-urllib = {version = "0.41b0", optional = true, markers = "extra == \"telemetry\""} +opentelemetry-sdk = {version = "1.20.0", optional = true, markers = "extra == \"telemetry\""} +prometheus-client = {version = "0.17.1", optional = true, markers = "extra == \"telemetry\""} pydantic = ">=1.10,<3" -requests = "^2.19" +requests = ">=2.19,<3.0" uvicorn = ">=0.19,<1.0" -wrapt = "^1.14" +wrapt = ">=1.14,<2.0" [package.extras] telemetry = ["opentelemetry-api (==1.20.0)", "opentelemetry-distro (==0.41b0)", "opentelemetry-exporter-otlp-proto-grpc (==1.20.0)", "opentelemetry-exporter-prometheus (==0.41b0)", "opentelemetry-instrumentation (==0.41b0)", "opentelemetry-instrumentation-aiohttp-client (==0.41b0)", "opentelemetry-instrumentation-fastapi (==0.41b0)", "opentelemetry-instrumentation-httpx (==0.41b0)", "opentelemetry-instrumentation-logging (==0.41b0)", "opentelemetry-instrumentation-requests (==0.41b0)", "opentelemetry-instrumentation-system-metrics (==0.41b0)", "opentelemetry-instrumentation-urllib (==0.41b0)", "opentelemetry-sdk (==1.20.0)", "prometheus-client (==0.17.1)"] -[package.source] -type = "git" -url = "https://github.com/epam/ai-dial-sdk.git" -reference = "fix/fixed-sharing-issues-in-merge-chunks-algo" -resolved_reference = "df41a7c26b5f8413705b1736b59e1f6e04b68eef" - [[package]] name = "aiohappyeyeballs" version = "2.3.5" @@ -182,6 +192,20 @@ files = [ [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + [[package]] name = "attrs" version = "23.1.0" @@ -250,6 +274,17 @@ cryptography = ">=2.5" msal = ">=1.24.0" msal-extensions = ">=0.3.0" +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + [[package]] name = "black" version = "24.3.0" @@ -559,6 +594,23 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "deprecated" +version = "1.2.15" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +files = [ + {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, + {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] + [[package]] name = "distlib" version = "0.3.7" @@ -703,6 +755,90 @@ files = [ {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, ] +[[package]] +name = "googleapis-common-protos" +version = "1.66.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, + {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "grpcio" +version = "1.68.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"}, + {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"}, + {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"}, + {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"}, + {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"}, + {file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"}, + {file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"}, + {file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"}, + {file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"}, + {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"}, + {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"}, + {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"}, + {file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"}, + {file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"}, + {file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"}, + {file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"}, + {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"}, + {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"}, + {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"}, + {file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"}, + {file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"}, + {file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"}, + {file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"}, + {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"}, + {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"}, + {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"}, + {file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"}, + {file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"}, + {file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"}, + {file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"}, + {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"}, + {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"}, + {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"}, + {file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"}, + {file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"}, + {file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"}, + {file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"}, + {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"}, + {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"}, + {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"}, + {file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"}, + {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"}, + {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.68.0)"] + [[package]] name = "h11" version = "0.14.0" @@ -770,6 +906,25 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "importlib-metadata" +version = "6.11.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, + {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1039,6 +1194,328 @@ typing-extensions = ">=4.7,<5" [package.extras] datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +[[package]] +name = "opentelemetry-api" +version = "1.20.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_api-1.20.0-py3-none-any.whl", hash = "sha256:982b76036fec0fdaf490ae3dfd9f28c81442a33414f737abc687a32758cdcba5"}, + {file = "opentelemetry_api-1.20.0.tar.gz", hash = "sha256:06abe351db7572f8afdd0fb889ce53f3c992dbf6f6262507b385cc1963e06983"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" +importlib-metadata = ">=6.0,<7.0" + +[[package]] +name = "opentelemetry-distro" +version = "0.41b0" +description = "OpenTelemetry Python Distro" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_distro-0.41b0-py3-none-any.whl", hash = "sha256:61a028dc8c1418b8634a5bf71e15ad85427cb55d97a0cd6a58dd135e456cc027"}, + {file = "opentelemetry_distro-0.41b0.tar.gz", hash = "sha256:8ce05f9499a09c99d9c5f550ff2ed6d229444cae17ae36baf705b0ccb647a959"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-sdk = ">=1.13,<2.0" + +[package.extras] +otlp = ["opentelemetry-exporter-otlp (==1.20.0)"] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.20.0" +description = "OpenTelemetry Protobuf encoding" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_common-1.20.0-py3-none-any.whl", hash = "sha256:dd63209b40702636ab6ae76a06b401b646ad7b008a906ecb41222d4af24fbdef"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.20.0.tar.gz", hash = "sha256:df60c681bd61812e50b3a39a7a1afeeb6d4066117583249fcc262269374e7a49"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +opentelemetry-proto = "1.20.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.20.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.20.0-py3-none-any.whl", hash = "sha256:7c3f066065891b56348ba2c7f9df6ec635a712841cae0a36f2f6a81642ae7dec"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.20.0.tar.gz", hash = "sha256:6c06d43c3771bda1795226e327722b4b980fa1ca1ec9e985f2ef3e29795bdd52"}, +] + +[package.dependencies] +backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +deprecated = ">=1.2.6" +googleapis-common-protos = ">=1.52,<2.0" +grpcio = ">=1.0.0,<2.0.0" +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.20.0" +opentelemetry-proto = "1.20.0" +opentelemetry-sdk = ">=1.20.0,<1.21.0" + +[package.extras] +test = ["pytest-grpc"] + +[[package]] +name = "opentelemetry-exporter-prometheus" +version = "0.41b0" +description = "Prometheus Metric Exporter for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_exporter_prometheus-0.41b0-py3-none-any.whl", hash = "sha256:ca996f3bc15b0cbf3abd798e786095a202650202a5c0edd9e34bb9186a247b79"}, + {file = "opentelemetry_exporter_prometheus-0.41b0.tar.gz", hash = "sha256:0cc58d5d10040e69090637803b97e120f558467037c88988742c80a627e7f1ed"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-sdk = ">=1.12,<2.0" +prometheus-client = ">=0.5.0,<1.0.0" + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.41b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation-0.41b0-py3-none-any.whl", hash = "sha256:0ef9e5705ceca0205992a4a845ae4251ce6ec15a1206ca07c2b00afb0c5bd386"}, + {file = "opentelemetry_instrumentation-0.41b0.tar.gz", hash = "sha256:214382ba10dfd29d4e24898a4c7ef18b7368178a6277a1aec95cdb75cabf4612"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +setuptools = ">=16.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-instrumentation-aiohttp-client" +version = "0.41b0" +description = "OpenTelemetry aiohttp client instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_aiohttp_client-0.41b0-py3-none-any.whl", hash = "sha256:a1d0d18dee5e57cf9187d1a561f9d4ce56d16433231208405458358ff6399a6f"}, + {file = "opentelemetry_instrumentation_aiohttp_client-0.41b0.tar.gz", hash = "sha256:56fd35e90c2534b2647e7cdd85f34383eddaa300ee51e989c3763dcdb205ca91"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" +wrapt = ">=1.0.0,<2.0.0" + +[package.extras] +instruments = ["aiohttp (>=3.0,<4.0)"] +test = ["http-server-mock", "opentelemetry-instrumentation-aiohttp-client[instruments]"] + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.41b0" +description = "ASGI instrumentation for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_asgi-0.41b0-py3-none-any.whl", hash = "sha256:46084195fb9c50507abbe1dd490ae4c31c8658c5790f1ddf7af95c417dbe6422"}, + {file = "opentelemetry_instrumentation_asgi-0.41b0.tar.gz", hash = "sha256:921244138b37a9a25edf2153f1c248f16f98610ee8d840b25fd7bf6b165e4d72"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" + +[package.extras] +instruments = ["asgiref (>=3.0,<4.0)"] +test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-utils (==0.41b0)"] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.41b0" +description = "OpenTelemetry FastAPI Instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_fastapi-0.41b0-py3-none-any.whl", hash = "sha256:5990368e99ecc989df0a248a0b9b8e85d8b3eb7c1dbf5131c36982ba7f4a43b7"}, + {file = "opentelemetry_instrumentation_fastapi-0.41b0.tar.gz", hash = "sha256:eb4ceefe8b944fc9ea5e61fa558b99afd1285431b563f3f0104ac177cde4dfe5"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-instrumentation-asgi = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" + +[package.extras] +instruments = ["fastapi (>=0.58,<1.0)"] +test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instruments]", "opentelemetry-test-utils (==0.41b0)", "requests (>=2.23,<3.0)"] + +[[package]] +name = "opentelemetry-instrumentation-httpx" +version = "0.41b0" +description = "OpenTelemetry HTTPX Instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_httpx-0.41b0-py3-none-any.whl", hash = "sha256:6ada84b7caa95a2889b2d883c089a977546b0102c815658b88f1c2dae713e9b2"}, + {file = "opentelemetry_instrumentation_httpx-0.41b0.tar.gz", hash = "sha256:96ebc54f3f41bfcd2fc043349c8cee4b11737602512383d437e24c39a1e4adff"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" + +[package.extras] +instruments = ["httpx (>=0.18.0)"] +test = ["opentelemetry-instrumentation-httpx[instruments]", "opentelemetry-sdk (>=1.12,<2.0)", "opentelemetry-test-utils (==0.41b0)"] + +[[package]] +name = "opentelemetry-instrumentation-logging" +version = "0.41b0" +description = "OpenTelemetry Logging instrumentation" +optional = false +python-versions = "*" +files = [ + {file = "opentelemetry_instrumentation_logging-0.41b0-py2.py3-none-any.whl", hash = "sha256:ab7117886695c32eb30d7a59199292283c5e652e2b9f2d11874fe4359eacc16a"}, + {file = "opentelemetry_instrumentation_logging-0.41b0.tar.gz", hash = "sha256:8ad46e011a99df726323428f0d0a09bf68159ab776b8184ba6d83a7c44f7de81"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" + +[package.extras] +test = ["opentelemetry-test-utils (==0.41b0)"] + +[[package]] +name = "opentelemetry-instrumentation-requests" +version = "0.41b0" +description = "OpenTelemetry requests instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_requests-0.41b0-py3-none-any.whl", hash = "sha256:687fde31111669e729054e64d246c96b0b9d4d8702bd0e3569b7660bdb528d71"}, + {file = "opentelemetry_instrumentation_requests-0.41b0.tar.gz", hash = "sha256:bdc5515ae7533e620b312fd989941b7c2c92d492a2d4418f6ef8db5d7422fa64"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" + +[package.extras] +instruments = ["requests (>=2.0,<3.0)"] +test = ["httpretty (>=1.0,<2.0)", "opentelemetry-instrumentation-requests[instruments]", "opentelemetry-test-utils (==0.41b0)"] + +[[package]] +name = "opentelemetry-instrumentation-system-metrics" +version = "0.41b0" +description = "OpenTelemetry System Metrics Instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_system_metrics-0.41b0-py3-none-any.whl", hash = "sha256:4f2106cf4b77664eb9096727eaba4ccffe28ebf426068b19aa7289644d4b9680"}, + {file = "opentelemetry_instrumentation_system_metrics-0.41b0.tar.gz", hash = "sha256:727193655d81d31a89e118d905a2691e80d967993ae62bac96979a373f59485a"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.11,<2.0" +opentelemetry-sdk = ">=1.11,<2.0" +psutil = ">=5.9,<6.0" + +[package.extras] +instruments = ["psutil (>=5)"] +test = ["opentelemetry-instrumentation-system-metrics[instruments]", "opentelemetry-test-utils (==0.41b0)"] + +[[package]] +name = "opentelemetry-instrumentation-urllib" +version = "0.41b0" +description = "OpenTelemetry urllib instrumentation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_instrumentation_urllib-0.41b0-py3-none-any.whl", hash = "sha256:cee9e95f55a73480df0915358ce8668bbeda53324c9426847e2ccaea0cac1a87"}, + {file = "opentelemetry_instrumentation_urllib-0.41b0.tar.gz", hash = "sha256:113416b8bd9c2d5c890cb6f86737886e209a3776c2ecdc023887bd78634d5ef3"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.12,<2.0" +opentelemetry-instrumentation = "0.41b0" +opentelemetry-semantic-conventions = "0.41b0" +opentelemetry-util-http = "0.41b0" + +[package.extras] +test = ["httpretty (>=1.0,<2.0)", "opentelemetry-test-utils (==0.41b0)"] + +[[package]] +name = "opentelemetry-proto" +version = "1.20.0" +description = "OpenTelemetry Python Proto" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_proto-1.20.0-py3-none-any.whl", hash = "sha256:512c3d2c6864fb7547a69577c3907348e6c985b7a204533563cb4c4c5046203b"}, + {file = "opentelemetry_proto-1.20.0.tar.gz", hash = "sha256:cf01f49b3072ee57468bccb1a4f93bdb55411f4512d0ac3f97c5c04c0040b5a2"}, +] + +[package.dependencies] +protobuf = ">=3.19,<5.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.20.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_sdk-1.20.0-py3-none-any.whl", hash = "sha256:f2230c276ff4c63ea09b3cb2e2ac6b1265f90af64e8d16bbf275c81a9ce8e804"}, + {file = "opentelemetry_sdk-1.20.0.tar.gz", hash = "sha256:702e432a457fa717fd2ddfd30640180e69938f85bb7fec3e479f85f61c1843f8"}, +] + +[package.dependencies] +opentelemetry-api = "1.20.0" +opentelemetry-semantic-conventions = "0.41b0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.41b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_semantic_conventions-0.41b0-py3-none-any.whl", hash = "sha256:45404391ed9e50998183a4925ad1b497c01c143f06500c3b9c3d0013492bb0f2"}, + {file = "opentelemetry_semantic_conventions-0.41b0.tar.gz", hash = "sha256:0ce5b040b8a3fc816ea5879a743b3d6fe5db61f6485e4def94c6ee4d402e1eb7"}, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.41b0" +description = "Web util for OpenTelemetry" +optional = false +python-versions = ">=3.7" +files = [ + {file = "opentelemetry_util_http-0.41b0-py3-none-any.whl", hash = "sha256:6a167fd1e0e8b0f629530d971165b5d82ed0be2154b7f29498499c3a517edee5"}, + {file = "opentelemetry_util_http-0.41b0.tar.gz", hash = "sha256:16d5bd04a380dc1079e766562d1e1626cbb47720f197f67010c45f090fffdfb3"}, +] + [[package]] name = "packaging" version = "23.2" @@ -1196,6 +1673,68 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "prometheus-client" +version = "0.17.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.17.1-py3-none-any.whl", hash = "sha256:e537f37160f6807b8202a6fc4764cdd19bac5480ddd3e0d463c3002b34462101"}, + {file = "prometheus_client-0.17.1.tar.gz", hash = "sha256:21e674f39831ae3f8acde238afd9a27a37d0d2fb5a28ea094f0ce25d2cbf2091"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "protobuf" +version = "4.25.5" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, + {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, + {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, + {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, + {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, + {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, + {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, + {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, + {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, + {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, +] + +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "pycodestyle" version = "2.10.0" @@ -1889,7 +2428,26 @@ files = [ idna = ">=2.0" multidict = ">=4.0" +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = ">=3.11,<3.13" -content-hash = "053c9c65687a429e513d52464b44355cbb33545ebcdfc76fb4745d0a1771b6ee" +content-hash = "baeed7683a14f3e035c48fca98703be4dd4e143a9b834a546a515521d3f6f3bc" diff --git a/pyproject.toml b/pyproject.toml index 4685a9b..7f955b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,9 +31,7 @@ aiohttp = "^3.10.2" numpy = "^1.26.0" pillow = "^10.3.0" azure-identity = "^1.16.1" -# FIXME: migrate to the SDK once it released -# aidial-sdk = {version = "^0.13.0", extras = ["telemetry"]} -aidial-sdk = {git = "https://github.com/epam/ai-dial-sdk.git", branch = "fix/fixed-sharing-issues-in-merge-chunks-algo", extras = ["telemetry"]} +aidial-sdk = {version = "^0.15.0", extras = ["telemetry"]} [tool.poetry.group.test.dependencies] pytest = "7.4.0" From e2ba08c76aa919349a6d8bfbf0323d19316fffc2 Mon Sep 17 00:00:00 2001 From: Anton Dubovik Date: Mon, 18 Nov 2024 10:29:12 +0000 Subject: [PATCH 9/9] chore: fixes in poetry.lock --- poetry.lock | 159 ++++++++++++++++++++++++---------------------------- 1 file changed, 73 insertions(+), 86 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7814cc7..092654c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -596,20 +596,20 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "deprecated" -version = "1.2.15" +version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ - {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, - {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, ] [package.dependencies] wrapt = ">=1.10,<2" [package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] [[package]] name = "distlib" @@ -757,13 +757,13 @@ files = [ [[package]] name = "googleapis-common-protos" -version = "1.66.0" +version = "1.63.2" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" files = [ - {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, - {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, + {file = "googleapis-common-protos-1.63.2.tar.gz", hash = "sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87"}, + {file = "googleapis_common_protos-1.63.2-py2.py3-none-any.whl", hash = "sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945"}, ] [package.dependencies] @@ -774,70 +774,61 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] [[package]] name = "grpcio" -version = "1.68.0" +version = "1.65.4" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" files = [ - {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"}, - {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"}, - {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"}, - {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"}, - {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"}, - {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"}, - {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"}, - {file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"}, - {file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"}, - {file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"}, - {file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"}, - {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"}, - {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"}, - {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"}, - {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"}, - {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"}, - {file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"}, - {file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"}, - {file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"}, - {file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"}, - {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"}, - {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"}, - {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"}, - {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"}, - {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"}, - {file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"}, - {file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"}, - {file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"}, - {file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"}, - {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"}, - {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"}, - {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"}, - {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"}, - {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"}, - {file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"}, - {file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"}, - {file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"}, - {file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"}, - {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"}, - {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"}, - {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"}, - {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"}, - {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"}, - {file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"}, - {file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"}, - {file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"}, - {file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"}, - {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"}, - {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"}, - {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"}, - {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"}, - {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"}, - {file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"}, - {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"}, - {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"}, + {file = "grpcio-1.65.4-cp310-cp310-linux_armv7l.whl", hash = "sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c"}, + {file = "grpcio-1.65.4-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82"}, + {file = "grpcio-1.65.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e"}, + {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de"}, + {file = "grpcio-1.65.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d"}, + {file = "grpcio-1.65.4-cp310-cp310-win32.whl", hash = "sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1"}, + {file = "grpcio-1.65.4-cp310-cp310-win_amd64.whl", hash = "sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec"}, + {file = "grpcio-1.65.4-cp311-cp311-linux_armv7l.whl", hash = "sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef"}, + {file = "grpcio-1.65.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17"}, + {file = "grpcio-1.65.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4"}, + {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e"}, + {file = "grpcio-1.65.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5"}, + {file = "grpcio-1.65.4-cp311-cp311-win32.whl", hash = "sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b"}, + {file = "grpcio-1.65.4-cp311-cp311-win_amd64.whl", hash = "sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558"}, + {file = "grpcio-1.65.4-cp312-cp312-linux_armv7l.whl", hash = "sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56"}, + {file = "grpcio-1.65.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2"}, + {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e"}, + {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74"}, + {file = "grpcio-1.65.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63"}, + {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a"}, + {file = "grpcio-1.65.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28"}, + {file = "grpcio-1.65.4-cp312-cp312-win32.whl", hash = "sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0"}, + {file = "grpcio-1.65.4-cp312-cp312-win_amd64.whl", hash = "sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416"}, + {file = "grpcio-1.65.4-cp38-cp38-linux_armv7l.whl", hash = "sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021"}, + {file = "grpcio-1.65.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52"}, + {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385"}, + {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864"}, + {file = "grpcio-1.65.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816"}, + {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524"}, + {file = "grpcio-1.65.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7"}, + {file = "grpcio-1.65.4-cp38-cp38-win32.whl", hash = "sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a"}, + {file = "grpcio-1.65.4-cp38-cp38-win_amd64.whl", hash = "sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f"}, + {file = "grpcio-1.65.4-cp39-cp39-linux_armv7l.whl", hash = "sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733"}, + {file = "grpcio-1.65.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1"}, + {file = "grpcio-1.65.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b"}, + {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2"}, + {file = "grpcio-1.65.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257"}, + {file = "grpcio-1.65.4-cp39-cp39-win32.whl", hash = "sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d"}, + {file = "grpcio-1.65.4-cp39-cp39-win_amd64.whl", hash = "sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9"}, + {file = "grpcio-1.65.4.tar.gz", hash = "sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.68.0)"] +protobuf = ["grpcio-tools (>=1.65.4)"] [[package]] name = "h11" @@ -1689,22 +1680,22 @@ twisted = ["twisted"] [[package]] name = "protobuf" -version = "4.25.5" +version = "4.25.4" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8"}, - {file = "protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea"}, - {file = "protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d"}, - {file = "protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331"}, - {file = "protobuf-4.25.5-cp38-cp38-win32.whl", hash = "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1"}, - {file = "protobuf-4.25.5-cp38-cp38-win_amd64.whl", hash = "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a"}, - {file = "protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f"}, - {file = "protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45"}, - {file = "protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41"}, - {file = "protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584"}, + {file = "protobuf-4.25.4-cp310-abi3-win32.whl", hash = "sha256:db9fd45183e1a67722cafa5c1da3e85c6492a5383f127c86c4c4aa4845867dc4"}, + {file = "protobuf-4.25.4-cp310-abi3-win_amd64.whl", hash = "sha256:ba3d8504116a921af46499471c63a85260c1a5fc23333154a427a310e015d26d"}, + {file = "protobuf-4.25.4-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:eecd41bfc0e4b1bd3fa7909ed93dd14dd5567b98c941d6c1ad08fdcab3d6884b"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:4c8a70fdcb995dcf6c8966cfa3a29101916f7225e9afe3ced4395359955d3835"}, + {file = "protobuf-4.25.4-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:3319e073562e2515c6ddc643eb92ce20809f5d8f10fead3332f71c63be6a7040"}, + {file = "protobuf-4.25.4-cp38-cp38-win32.whl", hash = "sha256:7e372cbbda66a63ebca18f8ffaa6948455dfecc4e9c1029312f6c2edcd86c4e1"}, + {file = "protobuf-4.25.4-cp38-cp38-win_amd64.whl", hash = "sha256:051e97ce9fa6067a4546e75cb14f90cf0232dcb3e3d508c448b8d0e4265b61c1"}, + {file = "protobuf-4.25.4-cp39-cp39-win32.whl", hash = "sha256:90bf6fd378494eb698805bbbe7afe6c5d12c8e17fca817a646cd6a1818c696ca"}, + {file = "protobuf-4.25.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac79a48d6b99dfed2729ccccee547b34a1d3d63289c71cef056653a846a2240f"}, + {file = "protobuf-4.25.4-py3-none-any.whl", hash = "sha256:bfbebc1c8e4793cfd58589acfb8a1026be0003e852b9da7db5a4285bde996978"}, + {file = "protobuf-4.25.4.tar.gz", hash = "sha256:0dc4a62cc4052a036ee2204d26fe4d835c62827c855c8a03f29fe6da146b380d"}, ] [[package]] @@ -2430,22 +2421,18 @@ multidict = ">=4.0" [[package]] name = "zipp" -version = "3.21.0" +version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0"